summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_const_eval/src/interpret/intern.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_const_eval/src/interpret/intern.rs')
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs135
1 files changed, 61 insertions, 74 deletions
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index 7b11ad330..910c3ca5d 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -30,7 +30,7 @@ use super::{
use crate::const_eval;
use crate::errors::{DanglingPtrInFinal, UnsupportedUntypedPointer};
-pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
+pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
'mir,
'tcx,
MemoryKind = T,
@@ -164,82 +164,13 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
&self.ecx
}
- fn visit_aggregate(
- &mut self,
- mplace: &MPlaceTy<'tcx>,
- fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
- ) -> InterpResult<'tcx> {
- // We want to walk the aggregate to look for references to intern. While doing that we
- // also need to take special care of interior mutability.
- //
- // As an optimization, however, if the allocation does not contain any references: we don't
- // need to do the walk. It can be costly for big arrays for example (e.g. issue #93215).
- let is_walk_needed = |mplace: &MPlaceTy<'tcx>| -> InterpResult<'tcx, bool> {
- // ZSTs cannot contain pointers, we can avoid the interning walk.
- if mplace.layout.is_zst() {
- return Ok(false);
- }
-
- // Now, check whether this allocation could contain references.
- //
- // Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
- // to avoid could be expensive: on the potentially larger types, arrays and slices,
- // rather than on all aggregates unconditionally.
- if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
- let Some((size, align)) = self.ecx.size_and_align_of_mplace(&mplace)? else {
- // We do the walk if we can't determine the size of the mplace: we may be
- // dealing with extern types here in the future.
- return Ok(true);
- };
-
- // If there is no provenance in this allocation, it does not contain references
- // that point to another allocation, and we can avoid the interning walk.
- if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
- if !alloc.has_provenance() {
- return Ok(false);
- }
- } else {
- // We're encountering a ZST here, and can avoid the walk as well.
- return Ok(false);
- }
- }
-
- // In the general case, we do the walk.
- Ok(true)
- };
-
- // If this allocation contains no references to intern, we avoid the potentially costly
- // walk.
- //
- // We can do this before the checks for interior mutability below, because only references
- // are relevant in that situation, and we're checking if there are any here.
- if !is_walk_needed(mplace)? {
- return Ok(());
- }
-
- if let Some(def) = mplace.layout.ty.ty_adt_def() {
- if def.is_unsafe_cell() {
- // We are crossing over an `UnsafeCell`, we can mutate again. This means that
- // References we encounter inside here are interned as pointing to mutable
- // allocations.
- // Remember the `old` value to handle nested `UnsafeCell`.
- let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
- let walked = self.walk_aggregate(mplace, fields);
- self.inside_unsafe_cell = old;
- return walked;
- }
- }
-
- self.walk_aggregate(mplace, fields)
- }
-
fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
// Handle Reference types, as these are the only types with provenance supported by const eval.
// Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
let tcx = self.ecx.tcx;
let ty = mplace.layout.ty;
if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
- let value = self.ecx.read_immediate(&mplace.into())?;
+ let value = self.ecx.read_immediate(mplace)?;
let mplace = self.ecx.ref_to_mplace(&value)?;
assert_eq!(mplace.layout.ty, referenced_ty);
// Handle trait object vtables.
@@ -315,7 +246,63 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
}
Ok(())
} else {
- // Not a reference -- proceed recursively.
+ // Not a reference. Check if we want to recurse.
+ let is_walk_needed = |mplace: &MPlaceTy<'tcx>| -> InterpResult<'tcx, bool> {
+ // ZSTs cannot contain pointers, we can avoid the interning walk.
+ if mplace.layout.is_zst() {
+ return Ok(false);
+ }
+
+ // Now, check whether this allocation could contain references.
+ //
+ // Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
+ // to avoid could be expensive: on the potentially larger types, arrays and slices,
+ // rather than on all aggregates unconditionally.
+ if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
+ let Some((size, align)) = self.ecx.size_and_align_of_mplace(&mplace)? else {
+ // We do the walk if we can't determine the size of the mplace: we may be
+ // dealing with extern types here in the future.
+ return Ok(true);
+ };
+
+ // If there is no provenance in this allocation, it does not contain references
+ // that point to another allocation, and we can avoid the interning walk.
+ if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
+ if !alloc.has_provenance() {
+ return Ok(false);
+ }
+ } else {
+ // We're encountering a ZST here, and can avoid the walk as well.
+ return Ok(false);
+ }
+ }
+
+ // In the general case, we do the walk.
+ Ok(true)
+ };
+
+ // If this allocation contains no references to intern, we avoid the potentially costly
+ // walk.
+ //
+ // We can do this before the checks for interior mutability below, because only references
+ // are relevant in that situation, and we're checking if there are any here.
+ if !is_walk_needed(mplace)? {
+ return Ok(());
+ }
+
+ if let Some(def) = mplace.layout.ty.ty_adt_def() {
+ if def.is_unsafe_cell() {
+ // We are crossing over an `UnsafeCell`, we can mutate again. This means that
+ // References we encounter inside here are interned as pointing to mutable
+ // allocations.
+ // Remember the `old` value to handle nested `UnsafeCell`.
+ let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
+ let walked = self.walk_value(mplace);
+ self.inside_unsafe_cell = old;
+ return walked;
+ }
+ }
+
self.walk_value(mplace)
}
}
@@ -371,7 +358,7 @@ pub fn intern_const_alloc_recursive<
Some(ret.layout.ty),
);
- ref_tracking.track((*ret, base_intern_mode), || ());
+ ref_tracking.track((ret.clone(), base_intern_mode), || ());
while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() {
let res = InternVisitor {
@@ -477,7 +464,7 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
) -> InterpResult<'tcx, ()>,
) -> InterpResult<'tcx, ConstAllocation<'tcx>> {
let dest = self.allocate(layout, MemoryKind::Stack)?;
- f(self, &dest.into())?;
+ f(self, &dest.clone().into())?;
let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1;
alloc.mutability = Mutability::Not;
Ok(self.tcx.mk_const_alloc(alloc))