summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_const_eval/src/interpret/intern.rs
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:26:03 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:26:03 +0000
commit9918693037dce8aa4bb6f08741b6812923486c18 (patch)
tree21d2b40bec7e6a7ea664acee056eb3d08e15a1cf /compiler/rustc_const_eval/src/interpret/intern.rs
parentReleasing progress-linux version 1.75.0+dfsg1-5~progress7.99u1. (diff)
downloadrustc-9918693037dce8aa4bb6f08741b6812923486c18.tar.xz
rustc-9918693037dce8aa4bb6f08741b6812923486c18.zip
Merging upstream version 1.76.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_const_eval/src/interpret/intern.rs')
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs34
1 files changed, 18 insertions, 16 deletions
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index 3d90e95c0..7931789e4 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -18,7 +18,7 @@ use super::validity::RefTracking;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
-use rustc_middle::mir::interpret::InterpResult;
+use rustc_middle::mir::interpret::{CtfeProvenance, InterpResult};
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
use rustc_ast::Mutability;
@@ -34,7 +34,7 @@ pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
'mir,
'tcx,
MemoryKind = T,
- Provenance = AllocId,
+ Provenance = CtfeProvenance,
ExtraFnVal = !,
FrameExtra = (),
AllocExtra = (),
@@ -94,9 +94,9 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
// If the pointer is dangling (neither in local nor global memory), we leave it
// to validation to error -- it has the much better error messages, pointing out where
// in the value the dangling reference lies.
- // The `delay_span_bug` ensures that we don't forget such a check in validation.
+ // The `span_delayed_bug` ensures that we don't forget such a check in validation.
if tcx.try_get_global_alloc(alloc_id).is_none() {
- tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
+ tcx.sess.span_delayed_bug(ecx.tcx.span, "tried to intern dangling pointer");
}
// treat dangling pointers like other statics
// just to stop trying to recurse into them
@@ -135,7 +135,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
alloc.mutability = Mutability::Not;
};
// link the alloc id to the actual allocation
- leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, alloc_id)| alloc_id));
+ leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, prov)| prov.alloc_id()));
let alloc = tcx.mk_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
None
@@ -178,20 +178,20 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
{
let ptr = mplace.meta().unwrap_meta().to_pointer(&tcx)?;
- if let Some(alloc_id) = ptr.provenance {
+ if let Some(prov) = ptr.provenance {
// Explicitly choose const mode here, since vtables are immutable, even
// if the reference of the fat pointer is mutable.
- self.intern_shallow(alloc_id, InternMode::Const, None);
+ self.intern_shallow(prov.alloc_id(), InternMode::Const, None);
} else {
// Validation will error (with a better message) on an invalid vtable pointer.
// Let validation show the error message, but make sure it *does* error.
tcx.sess
- .delay_span_bug(tcx.span, "vtables pointers cannot be integer pointers");
+ .span_delayed_bug(tcx.span, "vtables pointers cannot be integer pointers");
}
}
// Check if we have encountered this pointer+layout combination before.
// Only recurse for allocation-backed pointers.
- if let Some(alloc_id) = mplace.ptr().provenance {
+ if let Some(prov) = mplace.ptr().provenance {
// Compute the mode with which we intern this. Our goal here is to make as many
// statics as we can immutable so they can be placed in read-only memory by LLVM.
let ref_mode = match self.mode {
@@ -234,7 +234,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
InternMode::Const
}
};
- match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
+ match self.intern_shallow(prov.alloc_id(), ref_mode, Some(referenced_ty)) {
// No need to recurse, these are interned already and statics may have
// cycles, so we don't want to recurse there
Some(IsStaticOrFn) => {}
@@ -259,7 +259,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
// to avoid could be expensive: on the potentially larger types, arrays and slices,
// rather than on all aggregates unconditionally.
if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
- let Some((size, _align)) = self.ecx.size_and_align_of_mplace(&mplace)? else {
+ let Some((size, _align)) = self.ecx.size_and_align_of_mplace(mplace)? else {
// We do the walk if we can't determine the size of the mplace: we may be
// dealing with extern types here in the future.
return Ok(true);
@@ -353,7 +353,7 @@ pub fn intern_const_alloc_recursive<
leftover_allocations,
// The outermost allocation must exist, because we allocated it with
// `Memory::allocate`.
- ret.ptr().provenance.unwrap(),
+ ret.ptr().provenance.unwrap().alloc_id(),
base_intern_mode,
Some(ret.layout.ty),
);
@@ -375,7 +375,7 @@ pub fn intern_const_alloc_recursive<
match res {
Ok(()) => {}
Err(error) => {
- ecx.tcx.sess.delay_span_bug(
+ ecx.tcx.sess.span_delayed_bug(
ecx.tcx.span,
format!(
"error during interning should later cause validation failure: {}",
@@ -431,7 +431,8 @@ pub fn intern_const_alloc_recursive<
}
let alloc = tcx.mk_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
- for &(_, alloc_id) in alloc.inner().provenance().ptrs().iter() {
+ for &(_, prov) in alloc.inner().provenance().ptrs().iter() {
+ let alloc_id = prov.alloc_id();
if leftover_allocations.insert(alloc_id) {
todo.push(alloc_id);
}
@@ -503,10 +504,11 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
// `allocate` picks a fresh AllocId that we will associate with its data below.
let dest = self.allocate(layout, MemoryKind::Stack)?;
f(self, &dest.clone().into())?;
- let mut alloc = self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap()).unwrap().1;
+ let mut alloc =
+ self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap().alloc_id()).unwrap().1;
alloc.mutability = Mutability::Not;
let alloc = self.tcx.mk_const_alloc(alloc);
- let alloc_id = dest.ptr().provenance.unwrap(); // this was just allocated, it must have provenance
+ let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
self.tcx.set_alloc_id_memory(alloc_id, alloc);
Ok(alloc_id)
}