rustc_const_eval/interpret/
intern.rs

1//! This module specifies the type based interner for constants.
2//!
3//! After a const evaluation has computed a value, before we destroy the const evaluator's session
4//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
5//!
6//! In principle, this is not very complicated: we recursively walk the final value, follow all the
7//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8//! is picking the right mutability: the outermost allocation generally has a clear mutability, but
9//! what about the other allocations it points to that have also been created with this value? We
10//! don't want to do guesswork here. The rules are: `static`, `const`, and promoted can only create
11//! immutable allocations that way. `static mut` can be initialized with expressions like `&mut 42`,
12//! so all inner allocations are marked mutable. Some of them could potentially be made immutable,
13//! but that would require relying on type information, and given how many ways Rust has to lie
14//! about type information, we want to avoid doing that.
15
16use hir::def::DefKind;
17use rustc_ast::Mutability;
18use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
19use rustc_hir as hir;
20use rustc_hir::definitions::{DefPathData, DisambiguatorState};
21use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
22use rustc_middle::mir::interpret::{ConstAllocation, CtfeProvenance, InterpResult};
23use rustc_middle::query::TyCtxtAt;
24use rustc_middle::span_bug;
25use rustc_middle::ty::layout::TyAndLayout;
26use rustc_span::def_id::LocalDefId;
27use tracing::{instrument, trace};
28
29use super::{
30    AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy, err_ub, interp_ok,
31};
32use crate::const_eval;
33use crate::const_eval::DummyMachine;
34use crate::errors::NestedStaticInThreadLocal;
35
36pub trait CompileTimeMachine<'tcx, T> = Machine<
37        'tcx,
38        MemoryKind = T,
39        Provenance = CtfeProvenance,
40        ExtraFnVal = !,
41        FrameExtra = (),
42        AllocExtra = (),
43        MemoryMap = FxIndexMap<AllocId, (MemoryKind<T>, Allocation)>,
44    > + HasStaticRootDefId;
45
46pub trait HasStaticRootDefId {
47    /// Returns the `DefId` of the static item that is currently being evaluated.
48    /// Used for interning to be able to handle nested allocations.
49    fn static_def_id(&self) -> Option<LocalDefId>;
50}
51
52impl HasStaticRootDefId for const_eval::CompileTimeMachine<'_> {
53    fn static_def_id(&self) -> Option<LocalDefId> {
54        Some(self.static_root_ids?.1)
55    }
56}
57
58/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory.
59///
60/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the
61/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
62/// already mutable (as a sanity check).
63///
64/// Returns an iterator over all relocations referred to by this allocation.
65fn intern_shallow<'tcx, T, M: CompileTimeMachine<'tcx, T>>(
66    ecx: &mut InterpCx<'tcx, M>,
67    alloc_id: AllocId,
68    mutability: Mutability,
69    disambiguator: Option<&mut DisambiguatorState>,
70) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, ()> {
71    trace!("intern_shallow {:?}", alloc_id);
72    // remove allocation
73    // FIXME(#120456) - is `swap_remove` correct?
74    let Some((_kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
75        return Err(());
76    };
77    // Set allocation mutability as appropriate. This is used by LLVM to put things into
78    // read-only memory, and also by Miri when evaluating other globals that
79    // access this one.
80    match mutability {
81        Mutability::Not => {
82            alloc.mutability = Mutability::Not;
83        }
84        Mutability::Mut => {
85            // This must be already mutable, we won't "un-freeze" allocations ever.
86            assert_eq!(alloc.mutability, Mutability::Mut);
87        }
88    }
89    // link the alloc id to the actual allocation
90    let alloc = ecx.tcx.mk_const_alloc(alloc);
91    if let Some(static_id) = ecx.machine.static_def_id() {
92        intern_as_new_static(
93            ecx.tcx,
94            static_id,
95            alloc_id,
96            alloc,
97            disambiguator.expect("disambiguator needed"),
98        );
99    } else {
100        ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
101    }
102    Ok(alloc.0.0.provenance().ptrs().iter().map(|&(_, prov)| prov))
103}
104
105/// Creates a new `DefId` and feeds all the right queries to make this `DefId`
106/// appear as if it were a user-written `static` (though it has no HIR).
107fn intern_as_new_static<'tcx>(
108    tcx: TyCtxtAt<'tcx>,
109    static_id: LocalDefId,
110    alloc_id: AllocId,
111    alloc: ConstAllocation<'tcx>,
112    disambiguator: &mut DisambiguatorState,
113) {
114    // `intern_const_alloc_recursive` is called once per static and it contains the `DisambiguatorState`.
115    //  The `<static_id>::{{nested}}` path is thus unique to `intern_const_alloc_recursive` and the
116    // `DisambiguatorState` ensures the generated path is unique for this call as we generate
117    // `<static_id>::{{nested#n}}` where `n` is the `n`th `intern_as_new_static` call.
118    let feed = tcx.create_def(
119        static_id,
120        None,
121        DefKind::Static { safety: hir::Safety::Safe, mutability: alloc.0.mutability, nested: true },
122        Some(DefPathData::NestedStatic),
123        disambiguator,
124    );
125    tcx.set_nested_alloc_id_static(alloc_id, feed.def_id());
126
127    if tcx.is_thread_local_static(static_id.into()) {
128        tcx.dcx().emit_err(NestedStaticInThreadLocal { span: tcx.def_span(static_id) });
129    }
130
131    // These do not inherit the codegen attrs of the parent static allocation, since
132    // it doesn't make sense for them to inherit their `#[no_mangle]` and `#[link_name = ..]`
133    // and the like.
134    feed.codegen_fn_attrs(CodegenFnAttrs::new());
135
136    feed.eval_static_initializer(Ok(alloc));
137    feed.generics_of(tcx.generics_of(static_id).clone());
138    feed.def_ident_span(tcx.def_ident_span(static_id));
139    feed.explicit_predicates_of(tcx.explicit_predicates_of(static_id));
140    feed.feed_hir();
141}
142
143/// How a constant value should be interned.
144#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
145pub enum InternKind {
146    /// The `mutability` of the static, ignoring the type which may have interior mutability.
147    Static(hir::Mutability),
148    /// A `const` item
149    Constant,
150    Promoted,
151}
152
153#[derive(Debug)]
154pub enum InternResult {
155    FoundBadMutablePointer,
156    FoundDanglingPointer,
157}
158
159/// Intern `ret` and everything it references.
160///
161/// This *cannot raise an interpreter error*. Doing so is left to validation, which
162/// tracks where in the value we are and thus can show much better error messages.
163///
164/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
165#[instrument(level = "debug", skip(ecx))]
166pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx, const_eval::MemoryKind>>(
167    ecx: &mut InterpCx<'tcx, M>,
168    intern_kind: InternKind,
169    ret: &MPlaceTy<'tcx>,
170) -> Result<(), InternResult> {
171    let mut disambiguator = DisambiguatorState::new();
172
173    // We are interning recursively, and for mutability we are distinguishing the "root" allocation
174    // that we are starting in, and all other allocations that we are encountering recursively.
175    let (base_mutability, inner_mutability, is_static) = match intern_kind {
176        InternKind::Constant | InternKind::Promoted => {
177            // Completely immutable. Interning anything mutably here can only lead to unsoundness,
178            // since all consts are conceptually independent values but share the same underlying
179            // memory.
180            (Mutability::Not, Mutability::Not, false)
181        }
182        InternKind::Static(Mutability::Not) => {
183            (
184                // Outermost allocation is mutable if `!Freeze`.
185                if ret.layout.ty.is_freeze(*ecx.tcx, ecx.typing_env) {
186                    Mutability::Not
187                } else {
188                    Mutability::Mut
189                },
190                // Inner allocations are never mutable. They can only arise via the "tail
191                // expression" / "outer scope" rule, and we treat them consistently with `const`.
192                Mutability::Not,
193                true,
194            )
195        }
196        InternKind::Static(Mutability::Mut) => {
197            // Just make everything mutable. We accept code like
198            // `static mut X = &mut [42]`, so even inner allocations need to be mutable.
199            (Mutability::Mut, Mutability::Mut, true)
200        }
201    };
202
203    // Intern the base allocation, and initialize todo list for recursive interning.
204    let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
205    trace!(?base_alloc_id, ?base_mutability);
206    // First we intern the base allocation, as it requires a different mutability.
207    // This gives us the initial set of nested allocations, which will then all be processed
208    // recursively in the loop below.
209    let mut todo: Vec<_> = if is_static {
210        // Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
211        // But still change its mutability to match the requested one.
212        let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
213        alloc.1.mutability = base_mutability;
214        alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
215    } else {
216        intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator))
217            .unwrap()
218            .collect()
219    };
220    // We need to distinguish "has just been interned" from "was already in `tcx`",
221    // so we track this in a separate set.
222    let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
223    // Whether we encountered a bad mutable pointer.
224    // We want to first report "dangling" and then "mutable", so we need to delay reporting these
225    // errors.
226    let mut result = Ok(());
227
228    // Keep interning as long as there are things to intern.
229    // We show errors if there are dangling pointers, or mutable pointers in immutable contexts
230    // (i.e., everything except for `static mut`). We only return these errors as a `Result`
231    // so that the caller can run validation, and subsequently only report interning errors
232    // if validation fails. Validation has the better error messages so we prefer those, but
233    // interning has better coverage since it "sees" *all* pointers, including raw pointers and
234    // references stored in unions.
235    while let Some(prov) = todo.pop() {
236        trace!(?prov);
237        let alloc_id = prov.alloc_id();
238
239        if base_alloc_id == alloc_id && is_static {
240            // This is a pointer to the static itself. It's ok for a static to refer to itself,
241            // even mutably. Whether that mutable pointer is legal at all is checked in validation.
242            // See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
243            // We also already collected all the nested allocations, so there's no need to do that again.
244            continue;
245        }
246
247        // Ensure that this is derived from a shared reference. Crucially, we check this *before*
248        // checking whether the `alloc_id` has already been interned. The point of this check is to
249        // ensure that when there are multiple pointers to the same allocation, they are *all*
250        // derived from a shared reference. Therefore it would be bad if we only checked the first
251        // pointer to any given allocation.
252        // (It is likely not possible to actually have multiple pointers to the same allocation,
253        // so alternatively we could also check that and ICE if there are multiple such pointers.)
254        // See <https://github.com/rust-lang/rust/pull/128543> for why we are checking for "shared
255        // reference" and not "immutable", i.e., for why we are allowing interior-mutable shared
256        // references: they can actually be created in safe code while pointing to apparently
257        // "immutable" values, via promotion or tail expression lifetime extension of
258        // `&None::<Cell<T>>`.
259        // We also exclude promoteds from this as `&mut []` can be promoted, which is a mutable
260        // reference pointing to an immutable (zero-sized) allocation. We rely on the promotion
261        // analysis not screwing up to ensure that it is sound to intern promoteds as immutable.
262        if intern_kind != InternKind::Promoted
263            && inner_mutability == Mutability::Not
264            && !prov.shared_ref()
265        {
266            let is_already_global = ecx.tcx.try_get_global_alloc(alloc_id).is_some();
267            if is_already_global && !just_interned.contains(&alloc_id) {
268                // This is a pointer to some memory from another constant. We encounter mutable
269                // pointers to such memory since we do not always track immutability through
270                // these "global" pointers. Allowing them is harmless; the point of these checks
271                // during interning is to justify why we intern the *new* allocations immutably,
272                // so we can completely ignore existing allocations.
273                // We can also skip the rest of this loop iteration, since after all it is already
274                // interned.
275                continue;
276            }
277            // If this is a dangling pointer, that's actually fine -- the problematic case is
278            // when there is memory there that someone might expect to be mutable, but we make it immutable.
279            let dangling = !is_already_global && !ecx.memory.alloc_map.contains_key(&alloc_id);
280            if !dangling {
281                // Found a mutable pointer inside a const where inner allocations should be
282                // immutable.
283                if !ecx.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
284                    span_bug!(
285                        ecx.tcx.span,
286                        "the static const safety checks accepted a mutable pointer they should not have accepted"
287                    );
288                }
289                // Prefer dangling pointer errors over mutable pointer errors
290                if result.is_ok() {
291                    result = Err(InternResult::FoundBadMutablePointer);
292                }
293            }
294        }
295        if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
296            // Already interned.
297            debug_assert!(!ecx.memory.alloc_map.contains_key(&alloc_id));
298            continue;
299        }
300        // We always intern with `inner_mutability`, and furthermore we ensured above that if
301        // that is "immutable", then there are *no* mutable pointers anywhere in the newly
302        // interned memory -- justifying that we can indeed intern immutably. However this also
303        // means we can *not* easily intern immutably here if `prov.immutable()` is true and
304        // `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
305        // we'd have to somehow check that they are *all* immutable before deciding that this
306        // allocation can be made immutable. In the future we could consider analyzing all
307        // pointers before deciding which allocations can be made immutable; but for now we are
308        // okay with losing some potential for immutability here. This can anyway only affect
309        // `static mut`.
310        just_interned.insert(alloc_id);
311        match intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator)) {
312            Ok(nested) => todo.extend(nested),
313            Err(()) => {
314                ecx.tcx.dcx().delayed_bug("found dangling pointer during const interning");
315                result = Err(InternResult::FoundDanglingPointer);
316            }
317        }
318    }
319    result
320}
321
322/// Intern `ret`. This function assumes that `ret` references no other allocation.
323#[instrument(level = "debug", skip(ecx))]
324pub fn intern_const_alloc_for_constprop<'tcx, T, M: CompileTimeMachine<'tcx, T>>(
325    ecx: &mut InterpCx<'tcx, M>,
326    alloc_id: AllocId,
327) -> InterpResult<'tcx, ()> {
328    if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
329        // The constant is already in global memory. Do nothing.
330        return interp_ok(());
331    }
332    // Move allocation to `tcx`.
333    if let Some(_) = intern_shallow(ecx, alloc_id, Mutability::Not, None)
334        .map_err(|()| err_ub!(DeadLocal))?
335        .next()
336    {
337        // We are not doing recursive interning, so we don't currently support provenance.
338        // (If this assertion ever triggers, we should just implement a
339        // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
340        panic!("`intern_const_alloc_for_constprop` called on allocation with nested provenance")
341    }
342    interp_ok(())
343}
344
345impl<'tcx> InterpCx<'tcx, DummyMachine> {
346    /// A helper function that allocates memory for the layout given and gives you access to mutate
347    /// it. Once your own mutation code is done, the backing `Allocation` is removed from the
348    /// current `Memory` and interned as read-only into the global memory.
349    pub fn intern_with_temp_alloc(
350        &mut self,
351        layout: TyAndLayout<'tcx>,
352        f: impl FnOnce(
353            &mut InterpCx<'tcx, DummyMachine>,
354            &PlaceTy<'tcx, CtfeProvenance>,
355        ) -> InterpResult<'tcx, ()>,
356    ) -> InterpResult<'tcx, AllocId> {
357        // `allocate` picks a fresh AllocId that we will associate with its data below.
358        let dest = self.allocate(layout, MemoryKind::Stack)?;
359        f(self, &dest.clone().into())?;
360        let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
361        for prov in intern_shallow(self, alloc_id, Mutability::Not, None).unwrap() {
362            // We are not doing recursive interning, so we don't currently support provenance.
363            // (If this assertion ever triggers, we should just implement a
364            // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
365            if self.tcx.try_get_global_alloc(prov.alloc_id()).is_none() {
366                panic!("`intern_with_temp_alloc` with nested allocations");
367            }
368        }
369        interp_ok(alloc_id)
370    }
371}