Skip to content

const-eval: full support for pointer fragments #144081

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions compiler/rustc_const_eval/messages.ftl
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ const_eval_const_context = {$kind ->
}

const_eval_const_heap_ptr_in_final = encountered `const_allocate` pointer in final value that was not made global
.note = use `const_make_global` to make allocated pointers immutable before returning
.note = use `const_make_global` to turn allocated pointers into immutable globals before returning

const_eval_const_make_global_ptr_already_made_global = attempting to call `const_make_global` twice on the same allocation {$alloc}

Expand Down Expand Up @@ -231,6 +231,9 @@ const_eval_mutable_borrow_escaping =

const_eval_mutable_ptr_in_final = encountered mutable pointer in final value of {const_eval_intern_kind}

const_eval_partial_pointer_in_final = encountered partial pointer in final value of {const_eval_intern_kind}
.note = while pointers can be broken apart into individual bytes during const-evaluation, only complete pointers (with all their bytes in the right order) are supported in the final value

const_eval_nested_static_in_thread_local = #[thread_local] does not support implicit nested statics, please create explicit static items and refer to them instead

const_eval_non_const_await =
Expand Down Expand Up @@ -299,10 +302,8 @@ const_eval_panic = evaluation panicked: {$msg}

const_eval_panic_non_str = argument to `panic!()` in a const context must have type `&str`

const_eval_partial_pointer_copy =
unable to copy parts of a pointer from memory at {$ptr}
const_eval_partial_pointer_overwrite =
unable to overwrite parts of a pointer in memory at {$ptr}
const_eval_partial_pointer_read =
unable to read parts of a pointer from memory at {$ptr}
const_eval_pointer_arithmetic_overflow =
overflowing pointer arithmetic: the total offset in bytes does not fit in an `isize`

Expand Down
7 changes: 7 additions & 0 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,13 @@ fn eval_body_using_ecx<'tcx, R: InterpretationResult<'tcx>>(
ecx.tcx.dcx().emit_err(errors::ConstHeapPtrInFinal { span: ecx.tcx.span }),
)));
}
Err(InternError::PartialPointer) => {
throw_inval!(AlreadyReported(ReportedErrorInfo::non_const_eval_error(
ecx.tcx
.dcx()
.emit_err(errors::PartialPtrInFinal { span: ecx.tcx.span, kind: intern_kind }),
)));
}
}

interp_ok(R::make_result(ret, ecx))
Expand Down
16 changes: 12 additions & 4 deletions compiler/rustc_const_eval/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,15 @@ pub(crate) struct ConstHeapPtrInFinal {
pub span: Span,
}

#[derive(Diagnostic)]
#[diag(const_eval_partial_pointer_in_final)]
#[note]
pub(crate) struct PartialPtrInFinal {
#[primary_span]
pub span: Span,
pub kind: InternKind,
}

#[derive(Diagnostic)]
#[diag(const_eval_unstable_in_stable_exposed)]
pub(crate) struct UnstableInStableExposed {
Expand Down Expand Up @@ -832,8 +841,7 @@ impl ReportErrorExt for UnsupportedOpInfo {
UnsupportedOpInfo::Unsupported(s) => s.clone().into(),
UnsupportedOpInfo::ExternTypeField => const_eval_extern_type_field,
UnsupportedOpInfo::UnsizedLocal => const_eval_unsized_local,
UnsupportedOpInfo::OverwritePartialPointer(_) => const_eval_partial_pointer_overwrite,
UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_copy,
UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_read,
UnsupportedOpInfo::ReadPointerAsInt(_) => const_eval_read_pointer_as_int,
UnsupportedOpInfo::ThreadLocalStatic(_) => const_eval_thread_local_static,
UnsupportedOpInfo::ExternStatic(_) => const_eval_extern_static,
Expand All @@ -844,7 +852,7 @@ impl ReportErrorExt for UnsupportedOpInfo {
use UnsupportedOpInfo::*;

use crate::fluent_generated::*;
if let ReadPointerAsInt(_) | OverwritePartialPointer(_) | ReadPartialPointer(_) = self {
if let ReadPointerAsInt(_) | ReadPartialPointer(_) = self {
diag.help(const_eval_ptr_as_bytes_1);
diag.help(const_eval_ptr_as_bytes_2);
}
Expand All @@ -856,7 +864,7 @@ impl ReportErrorExt for UnsupportedOpInfo {
| UnsupportedOpInfo::ExternTypeField
| Unsupported(_)
| ReadPointerAsInt(_) => {}
OverwritePartialPointer(ptr) | ReadPartialPointer(ptr) => {
ReadPartialPointer(ptr) => {
diag.arg("ptr", ptr);
}
ThreadLocalStatic(did) | ExternStatic(did) => rustc_middle::ty::tls::with(|tcx| {
Expand Down
97 changes: 57 additions & 40 deletions compiler/rustc_const_eval/src/interpret/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,12 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_hir as hir;
use rustc_hir::definitions::{DefPathData, DisambiguatorState};
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
use rustc_middle::mir::interpret::{ConstAllocation, CtfeProvenance, InterpResult};
use rustc_middle::mir::interpret::{
AllocBytes, ConstAllocation, CtfeProvenance, InterpResult, Provenance,
};
use rustc_middle::query::TyCtxtAt;
use rustc_middle::span_bug;
use rustc_middle::ty::TyCtxt;
use rustc_middle::ty::layout::TyAndLayout;
use rustc_span::def_id::LocalDefId;
use tracing::{instrument, trace};
Expand Down Expand Up @@ -52,39 +55,30 @@ impl HasStaticRootDefId for const_eval::CompileTimeMachine<'_> {
}
}

/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory.
///
/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the
/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
/// already mutable (as a sanity check).
///
/// Returns an iterator over all relocations referred to by this allocation.
fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>(
ecx: &mut InterpCx<'tcx, M>,
alloc_id: AllocId,
fn prepare_alloc<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
tcx: TyCtxt<'tcx>,
kind: MemoryKind<const_eval::MemoryKind>,
alloc: &mut Allocation<Prov, Extra, Bytes>,
mutability: Mutability,
disambiguator: Option<&mut DisambiguatorState>,
) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, InternError> {
trace!("intern_shallow {:?}", alloc_id);
// remove allocation
// FIXME(#120456) - is `swap_remove` correct?
let Some((kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
return Err(InternError::DanglingPointer);
};

) -> Result<(), InternError> {
match kind {
MemoryKind::Machine(const_eval::MemoryKind::Heap { was_made_global }) => {
if !was_made_global {
// Attempting to intern a `const_allocate`d pointer that was not made global via
// `const_make_global`. We want to error here, but we have to first put the
// allocation back into the `alloc_map` to keep things in a consistent state.
ecx.memory.alloc_map.insert(alloc_id, (kind, alloc));
// `const_make_global`.
tcx.dcx().delayed_bug("non-global heap allocation in const value");
return Err(InternError::ConstAllocNotGlobal);
}
}
MemoryKind::Stack | MemoryKind::CallerLocation => {}
}

if !alloc.provenance_merge_bytes(&tcx) {
// Per-byte provenance is not supported by backends, so we cannot accept it here.
tcx.dcx().delayed_bug("partial pointer in const value");
return Err(InternError::PartialPointer);
}

// Set allocation mutability as appropriate. This is used by LLVM to put things into
// read-only memory, and also by Miri when evaluating other globals that
// access this one.
Expand All @@ -97,6 +91,36 @@ fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>(
assert_eq!(alloc.mutability, Mutability::Mut);
}
}
Ok(())
}

/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory.
///
/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the
/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
/// already mutable (as a sanity check).
///
/// Returns an iterator over all relocations referred to by this allocation.
fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>(
ecx: &mut InterpCx<'tcx, M>,
alloc_id: AllocId,
mutability: Mutability,
disambiguator: Option<&mut DisambiguatorState>,
) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, InternError> {
trace!("intern_shallow {:?}", alloc_id);
// remove allocation
// FIXME(#120456) - is `swap_remove` correct?
let Some((kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
return Err(InternError::DanglingPointer);
};

if let Err(err) = prepare_alloc(*ecx.tcx, kind, &mut alloc, mutability) {
// We want to error here, but we have to first put the
// allocation back into the `alloc_map` to keep things in a consistent state.
ecx.memory.alloc_map.insert(alloc_id, (kind, alloc));
return Err(err);
}

// link the alloc id to the actual allocation
let alloc = ecx.tcx.mk_const_alloc(alloc);
if let Some(static_id) = ecx.machine.static_def_id() {
Expand Down Expand Up @@ -166,6 +190,7 @@ pub enum InternError {
BadMutablePointer,
DanglingPointer,
ConstAllocNotGlobal,
PartialPointer,
}

/// Intern `ret` and everything it references.
Expand Down Expand Up @@ -221,21 +246,18 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>(
let mut todo: Vec<_> = if is_static {
// Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
// But still change its mutability to match the requested one.
let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
alloc.1.mutability = base_mutability;
alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
let (kind, alloc) = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
prepare_alloc(*ecx.tcx, *kind, alloc, base_mutability)?;
alloc.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
} else {
intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator))
.unwrap()
.collect()
intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator))?.collect()
};
// We need to distinguish "has just been interned" from "was already in `tcx`",
// so we track this in a separate set.
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
// Whether we encountered a bad mutable pointer.
// We want to first report "dangling" and then "mutable", so we need to delay reporting these
// errors.
let mut result = Ok(());
let mut found_bad_mutable_ptr = false;

// Keep interning as long as there are things to intern.
Expand Down Expand Up @@ -310,28 +332,23 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>(
// okay with losing some potential for immutability here. This can anyway only affect
// `static mut`.
just_interned.insert(alloc_id);
match intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator)) {
Ok(nested) => todo.extend(nested),
Err(err) => {
ecx.tcx.dcx().delayed_bug("error during const interning");
result = Err(err);
}
}
let next = intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator))?;
todo.extend(next);
}
if found_bad_mutable_ptr && result.is_ok() {
if found_bad_mutable_ptr {
// We found a mutable pointer inside a const where inner allocations should be immutable,
// and there was no other error. This should usually never happen! However, this can happen
// in unleash-miri mode, so report it as a normal error then.
if ecx.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
result = Err(InternError::BadMutablePointer);
return Err(InternError::BadMutablePointer);
} else {
span_bug!(
ecx.tcx.span,
"the static const safety checks accepted a mutable pointer they should not have accepted"
);
}
}
result
Ok(())
}

/// Intern `ret`. This function assumes that `ret` references no other allocation.
Expand Down
44 changes: 13 additions & 31 deletions compiler/rustc_const_eval/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1310,29 +1310,20 @@ impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
}

/// Mark the given sub-range (relative to this allocation reference) as uninitialized.
pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
pub fn write_uninit(&mut self, range: AllocRange) {
let range = self.range.subrange(range);

self.alloc
.write_uninit(&self.tcx, range)
.map_err(|e| e.to_interp_error(self.alloc_id))
.into()
self.alloc.write_uninit(&self.tcx, range);
}

/// Mark the entire referenced range as uninitialized
pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
self.alloc
.write_uninit(&self.tcx, self.range)
.map_err(|e| e.to_interp_error(self.alloc_id))
.into()
pub fn write_uninit_full(&mut self) {
self.alloc.write_uninit(&self.tcx, self.range);
}

/// Remove all provenance in the reference range.
pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
self.alloc
.clear_provenance(&self.tcx, self.range)
.map_err(|e| e.to_interp_error(self.alloc_id))
.into()
pub fn clear_provenance(&mut self) {
self.alloc.clear_provenance(&self.tcx, self.range);
}
}

Expand Down Expand Up @@ -1423,11 +1414,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {

// Side-step AllocRef and directly access the underlying bytes more efficiently.
// (We are staying inside the bounds here and all bytes do get overwritten so all is good.)
let alloc_id = alloc_ref.alloc_id;
let bytes = alloc_ref
.alloc
.get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
.map_err(move |e| e.to_interp_error(alloc_id))?;
let bytes =
alloc_ref.alloc.get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range);
// `zip` would stop when the first iterator ends; we want to definitely
// cover all of `bytes`.
for dest in bytes {
Expand Down Expand Up @@ -1509,10 +1497,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// `get_bytes_mut` will clear the provenance, which is correct,
// since we don't want to keep any provenance at the target.
// This will also error if copying partial provenance is not supported.
let provenance = src_alloc
.provenance()
.prepare_copy(src_range, dest_offset, num_copies, self)
.map_err(|e| e.to_interp_error(src_alloc_id))?;
let provenance =
src_alloc.provenance().prepare_copy(src_range, dest_offset, num_copies, self);
// Prepare a copy of the initialization mask.
let init = src_alloc.init_mask().prepare_copy(src_range);

Expand All @@ -1530,10 +1516,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
dest_range,
)?;
// Yes we do overwrite all bytes in `dest_bytes`.
let dest_bytes = dest_alloc
.get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
.map_err(|e| e.to_interp_error(dest_alloc_id))?
.as_mut_ptr();
let dest_bytes =
dest_alloc.get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range).as_mut_ptr();

if init.no_bytes_init() {
// Fast path: If all bytes are `uninit` then there is nothing to copy. The target range
Expand All @@ -1542,9 +1526,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// This also avoids writing to the target bytes so that the backing allocation is never
// touched if the bytes stay uninitialized for the whole interpreter execution. On contemporary
// operating system this can avoid physically allocating the page.
dest_alloc
.write_uninit(&tcx, dest_range)
.map_err(|e| e.to_interp_error(dest_alloc_id))?;
dest_alloc.write_uninit(&tcx, dest_range);
// `write_uninit` also resets the provenance, so we are done.
return interp_ok(());
}
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_const_eval/src/interpret/place.rs
Original file line number Diff line number Diff line change
Expand Up @@ -700,7 +700,7 @@ where

match value {
Immediate::Scalar(scalar) => {
alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar)
alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar)?;
}
Immediate::ScalarPair(a_val, b_val) => {
let BackendRepr::ScalarPair(a, b) = layout.backend_repr else {
Expand All @@ -720,10 +720,10 @@ where
alloc.write_scalar(alloc_range(Size::ZERO, a_val.size()), a_val)?;
alloc.write_scalar(alloc_range(b_offset, b_val.size()), b_val)?;
// We don't have to reset padding here, `write_immediate` will anyway do a validation run.
interp_ok(())
}
Immediate::Uninit => alloc.write_uninit_full(),
}
interp_ok(())
}

pub fn write_uninit(
Expand All @@ -743,7 +743,7 @@ where
// Zero-sized access
return interp_ok(());
};
alloc.write_uninit_full()?;
alloc.write_uninit_full();
}
}
interp_ok(())
Expand All @@ -767,7 +767,7 @@ where
// Zero-sized access
return interp_ok(());
};
alloc.clear_provenance()?;
alloc.clear_provenance();
}
}
interp_ok(())
Expand Down
Loading
Loading