rustc_const_eval/const_eval/
valtrees.rs

1use rustc_abi::{BackendRepr, FieldIdx, VariantIdx};
2use rustc_data_structures::stack::ensure_sufficient_stack;
3use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId, ValTreeCreationError};
4use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
5use rustc_middle::ty::{self, Ty, TyCtxt};
6use rustc_middle::{bug, mir};
7use rustc_span::DUMMY_SP;
8use tracing::{debug, instrument, trace};
9
10use super::VALTREE_MAX_NODES;
11use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const};
12use super::machine::CompileTimeInterpCx;
13use crate::const_eval::CanAccessMutGlobal;
14use crate::interpret::{
15    ImmTy, Immediate, InternKind, MPlaceTy, MemPlaceMeta, MemoryKind, PlaceTy, Projectable, Scalar,
16    intern_const_alloc_recursive,
17};
18
19#[instrument(skip(ecx), level = "debug")]
20fn branches<'tcx>(
21    ecx: &CompileTimeInterpCx<'tcx>,
22    place: &MPlaceTy<'tcx>,
23    field_count: usize,
24    variant: Option<VariantIdx>,
25    num_nodes: &mut usize,
26) -> EvalToValTreeResult<'tcx> {
27    let place = match variant {
28        Some(variant) => ecx.project_downcast(place, variant).unwrap(),
29        None => place.clone(),
30    };
31    debug!(?place);
32
33    let mut branches = Vec::with_capacity(field_count + variant.is_some() as usize);
34
35    // For enums, we prepend their variant index before the variant's fields so we can figure out
36    // the variant again when just seeing a valtree.
37    if let Some(variant) = variant {
38        branches.push(ty::ValTree::from_scalar_int(*ecx.tcx, variant.as_u32().into()));
39    }
40
41    for i in 0..field_count {
42        let field = ecx.project_field(&place, FieldIdx::from_usize(i)).unwrap();
43        let valtree = const_to_valtree_inner(ecx, &field, num_nodes)?;
44        branches.push(valtree);
45    }
46
47    // Have to account for ZSTs here
48    if branches.len() == 0 {
49        *num_nodes += 1;
50    }
51
52    Ok(ty::ValTree::from_branches(*ecx.tcx, branches))
53}
54
55#[instrument(skip(ecx), level = "debug")]
56fn slice_branches<'tcx>(
57    ecx: &CompileTimeInterpCx<'tcx>,
58    place: &MPlaceTy<'tcx>,
59    num_nodes: &mut usize,
60) -> EvalToValTreeResult<'tcx> {
61    let n = place.len(ecx).unwrap_or_else(|_| panic!("expected to use len of place {place:?}"));
62
63    let mut elems = Vec::with_capacity(n as usize);
64    for i in 0..n {
65        let place_elem = ecx.project_index(place, i).unwrap();
66        let valtree = const_to_valtree_inner(ecx, &place_elem, num_nodes)?;
67        elems.push(valtree);
68    }
69
70    Ok(ty::ValTree::from_branches(*ecx.tcx, elems))
71}
72
73#[instrument(skip(ecx), level = "debug")]
74fn const_to_valtree_inner<'tcx>(
75    ecx: &CompileTimeInterpCx<'tcx>,
76    place: &MPlaceTy<'tcx>,
77    num_nodes: &mut usize,
78) -> EvalToValTreeResult<'tcx> {
79    let tcx = *ecx.tcx;
80    let ty = place.layout.ty;
81    debug!("ty kind: {:?}", ty.kind());
82
83    if *num_nodes >= VALTREE_MAX_NODES {
84        return Err(ValTreeCreationError::NodesOverflow);
85    }
86
87    match ty.kind() {
88        ty::FnDef(..) => {
89            *num_nodes += 1;
90            Ok(ty::ValTree::zst(tcx))
91        }
92        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
93            let val = ecx.read_immediate(place).report_err()?;
94            let val = val.to_scalar_int().unwrap();
95            *num_nodes += 1;
96
97            Ok(ty::ValTree::from_scalar_int(tcx, val))
98        }
99
100        ty::Pat(base, ..) => {
101            let mut place = place.clone();
102            // The valtree of the base type is the same as the valtree of the pattern type.
103            // Since the returned valtree does not contain the type or layout, we can just
104            // switch to the base type.
105            place.layout = ecx.layout_of(*base).unwrap();
106            ensure_sufficient_stack(|| const_to_valtree_inner(ecx, &place, num_nodes))
107        },
108
109
110        ty::RawPtr(_, _) => {
111            // Not all raw pointers are allowed, as we cannot properly test them for
112            // equality at compile-time (see `ptr_guaranteed_cmp`).
113            // However we allow those that are just integers in disguise.
114            // First, get the pointer. Remember it might be wide!
115            let val = ecx.read_immediate(place).report_err()?;
116            // We could allow wide raw pointers where both sides are integers in the future,
117            // but for now we reject them.
118            if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
119                return Err(ValTreeCreationError::NonSupportedType(ty));
120            }
121            let val = val.to_scalar();
122            // We are in the CTFE machine, so ptr-to-int casts will fail.
123            // This can only be `Ok` if `val` already is an integer.
124            let Ok(val) = val.try_to_scalar_int() else {
125                return Err(ValTreeCreationError::NonSupportedType(ty));
126            };
127            // It's just a ScalarInt!
128            Ok(ty::ValTree::from_scalar_int(tcx, val))
129        }
130
131        // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to
132        // agree with runtime equality tests.
133        ty::FnPtr(..) => Err(ValTreeCreationError::NonSupportedType(ty)),
134
135        ty::Ref(_, _, _)  => {
136            let derefd_place = ecx.deref_pointer(place).report_err()?;
137            const_to_valtree_inner(ecx, &derefd_place, num_nodes)
138        }
139
140        ty::Str | ty::Slice(_) | ty::Array(_, _) => {
141            slice_branches(ecx, place, num_nodes)
142        }
143        // Trait objects are not allowed in type level constants, as we have no concept for
144        // resolving their backing type, even if we can do that at const eval time. We may
145        // hypothetically be able to allow `dyn StructuralPartialEq` trait objects in the future,
146        // but it is unclear if this is useful.
147        ty::Dynamic(..) => Err(ValTreeCreationError::NonSupportedType(ty)),
148
149        ty::Tuple(elem_tys) => {
150            branches(ecx, place, elem_tys.len(), None, num_nodes)
151        }
152
153        ty::Adt(def, _) => {
154            if def.is_union() {
155                return Err(ValTreeCreationError::NonSupportedType(ty));
156            } else if def.variants().is_empty() {
157                bug!("uninhabited types should have errored and never gotten converted to valtree")
158            }
159
160            let variant = ecx.read_discriminant(place).report_err()?;
161            branches(ecx, place, def.variant(variant).fields.len(), def.is_enum().then_some(variant), num_nodes)
162        }
163
164        ty::Never
165        | ty::Error(_)
166        | ty::Foreign(..)
167        | ty::Infer(ty::FreshIntTy(_))
168        | ty::Infer(ty::FreshFloatTy(_))
169        // FIXME(oli-obk): we could look behind opaque types
170        | ty::Alias(..)
171        | ty::Param(_)
172        | ty::Bound(..)
173        | ty::Placeholder(..)
174        | ty::Infer(_)
175        // FIXME(oli-obk): we can probably encode closures just like structs
176        | ty::Closure(..)
177        | ty::CoroutineClosure(..)
178        | ty::Coroutine(..)
179        | ty::CoroutineWitness(..)
180        | ty::UnsafeBinder(_) => Err(ValTreeCreationError::NonSupportedType(ty)),
181    }
182}
183
184/// Valtrees don't store the `MemPlaceMeta` that all dynamically sized values have in the interpreter.
185/// This function reconstructs it.
186fn reconstruct_place_meta<'tcx>(
187    layout: TyAndLayout<'tcx>,
188    valtree: ty::ValTree<'tcx>,
189    tcx: TyCtxt<'tcx>,
190) -> MemPlaceMeta {
191    if layout.is_sized() {
192        return MemPlaceMeta::None;
193    }
194
195    let mut last_valtree = valtree;
196    // Traverse the type, and update `last_valtree` as we go.
197    let tail = tcx.struct_tail_raw(
198        layout.ty,
199        |ty| ty,
200        || {
201            let branches = last_valtree.unwrap_branch();
202            last_valtree = *branches.last().unwrap();
203            debug!(?branches, ?last_valtree);
204        },
205    );
206    // Sanity-check that we got a tail we support.
207    match tail.kind() {
208        ty::Slice(..) | ty::Str => {}
209        _ => bug!("unsized tail of a valtree must be Slice or Str"),
210    };
211
212    // Get the number of elements in the unsized field.
213    let num_elems = last_valtree.unwrap_branch().len();
214    MemPlaceMeta::Meta(Scalar::from_target_usize(num_elems as u64, &tcx))
215}
216
217#[instrument(skip(ecx), level = "debug", ret)]
218fn create_valtree_place<'tcx>(
219    ecx: &mut CompileTimeInterpCx<'tcx>,
220    layout: TyAndLayout<'tcx>,
221    valtree: ty::ValTree<'tcx>,
222) -> MPlaceTy<'tcx> {
223    let meta = reconstruct_place_meta(layout, valtree, ecx.tcx.tcx);
224    ecx.allocate_dyn(layout, MemoryKind::Stack, meta).unwrap()
225}
226
227/// Evaluates a constant and turns it into a type-level constant value.
228pub(crate) fn eval_to_valtree<'tcx>(
229    tcx: TyCtxt<'tcx>,
230    typing_env: ty::TypingEnv<'tcx>,
231    cid: GlobalId<'tcx>,
232) -> EvalToValTreeResult<'tcx> {
233    // Const eval always happens in PostAnalysis mode . See the comment in
234    // `InterpCx::new` for more details.
235    debug_assert_eq!(typing_env.typing_mode, ty::TypingMode::PostAnalysis);
236    let const_alloc = tcx.eval_to_allocation_raw(typing_env.as_query_input(cid))?;
237
238    // FIXME Need to provide a span to `eval_to_valtree`
239    let ecx = mk_eval_cx_to_read_const_val(
240        tcx,
241        DUMMY_SP,
242        typing_env,
243        // It is absolutely crucial for soundness that
244        // we do not read from mutable memory.
245        CanAccessMutGlobal::No,
246    );
247    let place = ecx.raw_const_to_mplace(const_alloc).unwrap();
248    debug!(?place);
249
250    let mut num_nodes = 0;
251    const_to_valtree_inner(&ecx, &place, &mut num_nodes)
252}
253
254/// Converts a `ValTree` to a `ConstValue`, which is needed after mir
255/// construction has finished.
256// FIXME(valtrees): Merge `valtree_to_const_value` and `valtree_into_mplace` into one function
257#[instrument(skip(tcx), level = "debug", ret)]
258pub fn valtree_to_const_value<'tcx>(
259    tcx: TyCtxt<'tcx>,
260    typing_env: ty::TypingEnv<'tcx>,
261    cv: ty::Value<'tcx>,
262) -> mir::ConstValue<'tcx> {
263    // Basic idea: We directly construct `Scalar` values from trivial `ValTree`s
264    // (those for constants with type bool, int, uint, float or char).
265    // For all other types we create an `MPlace` and fill that by walking
266    // the `ValTree` and using `place_projection` and `place_field` to
267    // create inner `MPlace`s which are filled recursively.
268    // FIXME: Does this need an example?
269    match *cv.ty.kind() {
270        ty::FnDef(..) => {
271            assert!(cv.valtree.is_zst());
272            mir::ConstValue::ZeroSized
273        }
274        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(_, _) => {
275            mir::ConstValue::Scalar(Scalar::Int(cv.valtree.unwrap_leaf()))
276        }
277        ty::Pat(ty, _) => {
278            let cv = ty::Value { valtree: cv.valtree, ty };
279            valtree_to_const_value(tcx, typing_env, cv)
280        }
281        ty::Ref(_, inner_ty, _) => {
282            let mut ecx =
283                mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, typing_env, CanAccessMutGlobal::No);
284            let imm = valtree_to_ref(&mut ecx, cv.valtree, inner_ty);
285            let imm = ImmTy::from_immediate(
286                imm,
287                tcx.layout_of(typing_env.as_query_input(cv.ty)).unwrap(),
288            );
289            op_to_const(&ecx, &imm.into(), /* for diagnostics */ false)
290        }
291        ty::Tuple(_) | ty::Array(_, _) | ty::Adt(..) => {
292            let layout = tcx.layout_of(typing_env.as_query_input(cv.ty)).unwrap();
293            if layout.is_zst() {
294                // Fast path to avoid some allocations.
295                return mir::ConstValue::ZeroSized;
296            }
297            if layout.backend_repr.is_scalar()
298                && (matches!(cv.ty.kind(), ty::Tuple(_))
299                    || matches!(cv.ty.kind(), ty::Adt(def, _) if def.is_struct()))
300            {
301                // A Scalar tuple/struct; we can avoid creating an allocation.
302                let branches = cv.valtree.unwrap_branch();
303                // Find the non-ZST field. (There can be aligned ZST!)
304                for (i, &inner_valtree) in branches.iter().enumerate() {
305                    let field = layout.field(&LayoutCx::new(tcx, typing_env), i);
306                    if !field.is_zst() {
307                        let cv = ty::Value { valtree: inner_valtree, ty: field.ty };
308                        return valtree_to_const_value(tcx, typing_env, cv);
309                    }
310                }
311                bug!("could not find non-ZST field during in {layout:#?}");
312            }
313
314            let mut ecx =
315                mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, typing_env, CanAccessMutGlobal::No);
316
317            // Need to create a place for this valtree.
318            let place = create_valtree_place(&mut ecx, layout, cv.valtree);
319
320            valtree_into_mplace(&mut ecx, &place, cv.valtree);
321            dump_place(&ecx, &place);
322            intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap();
323
324            op_to_const(&ecx, &place.into(), /* for diagnostics */ false)
325        }
326        ty::Never
327        | ty::Error(_)
328        | ty::Foreign(..)
329        | ty::Infer(ty::FreshIntTy(_))
330        | ty::Infer(ty::FreshFloatTy(_))
331        | ty::Alias(..)
332        | ty::Param(_)
333        | ty::Bound(..)
334        | ty::Placeholder(..)
335        | ty::Infer(_)
336        | ty::Closure(..)
337        | ty::CoroutineClosure(..)
338        | ty::Coroutine(..)
339        | ty::CoroutineWitness(..)
340        | ty::FnPtr(..)
341        | ty::Str
342        | ty::Slice(_)
343        | ty::Dynamic(..)
344        | ty::UnsafeBinder(_) => {
345            bug!("no ValTree should have been created for type {:?}", cv.ty.kind())
346        }
347    }
348}
349
350/// Put a valtree into memory and return a reference to that.
351fn valtree_to_ref<'tcx>(
352    ecx: &mut CompileTimeInterpCx<'tcx>,
353    valtree: ty::ValTree<'tcx>,
354    pointee_ty: Ty<'tcx>,
355) -> Immediate {
356    let pointee_place = create_valtree_place(ecx, ecx.layout_of(pointee_ty).unwrap(), valtree);
357    debug!(?pointee_place);
358
359    valtree_into_mplace(ecx, &pointee_place, valtree);
360    dump_place(ecx, &pointee_place);
361    intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap();
362
363    pointee_place.to_ref(&ecx.tcx)
364}
365
366#[instrument(skip(ecx), level = "debug")]
367fn valtree_into_mplace<'tcx>(
368    ecx: &mut CompileTimeInterpCx<'tcx>,
369    place: &MPlaceTy<'tcx>,
370    valtree: ty::ValTree<'tcx>,
371) {
372    // This will match on valtree and write the value(s) corresponding to the ValTree
373    // inside the place recursively.
374
375    let ty = place.layout.ty;
376
377    match ty.kind() {
378        ty::FnDef(_, _) => {
379            // Zero-sized type, nothing to do.
380        }
381        ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(..) => {
382            let scalar_int = valtree.unwrap_leaf();
383            debug!("writing trivial valtree {:?} to place {:?}", scalar_int, place);
384            ecx.write_immediate(Immediate::Scalar(scalar_int.into()), place).unwrap();
385        }
386        ty::Ref(_, inner_ty, _) => {
387            let imm = valtree_to_ref(ecx, valtree, *inner_ty);
388            debug!(?imm);
389            ecx.write_immediate(imm, place).unwrap();
390        }
391        ty::Adt(_, _) | ty::Tuple(_) | ty::Array(_, _) | ty::Str | ty::Slice(_) => {
392            let branches = valtree.unwrap_branch();
393
394            // Need to downcast place for enums
395            let (place_adjusted, branches, variant_idx) = match ty.kind() {
396                ty::Adt(def, _) if def.is_enum() => {
397                    // First element of valtree corresponds to variant
398                    let scalar_int = branches[0].unwrap_leaf();
399                    let variant_idx = VariantIdx::from_u32(scalar_int.to_u32());
400                    let variant = def.variant(variant_idx);
401                    debug!(?variant);
402
403                    (
404                        ecx.project_downcast(place, variant_idx).unwrap(),
405                        &branches[1..],
406                        Some(variant_idx),
407                    )
408                }
409                _ => (place.clone(), branches, None),
410            };
411            debug!(?place_adjusted, ?branches);
412
413            // Create the places (by indexing into `place`) for the fields and fill
414            // them recursively
415            for (i, inner_valtree) in branches.iter().enumerate() {
416                debug!(?i, ?inner_valtree);
417
418                let place_inner = match ty.kind() {
419                    ty::Str | ty::Slice(_) | ty::Array(..) => {
420                        ecx.project_index(place, i as u64).unwrap()
421                    }
422                    _ => ecx.project_field(&place_adjusted, FieldIdx::from_usize(i)).unwrap(),
423                };
424
425                debug!(?place_inner);
426                valtree_into_mplace(ecx, &place_inner, *inner_valtree);
427                dump_place(ecx, &place_inner);
428            }
429
430            debug!("dump of place_adjusted:");
431            dump_place(ecx, &place_adjusted);
432
433            if let Some(variant_idx) = variant_idx {
434                // don't forget filling the place with the discriminant of the enum
435                ecx.write_discriminant(variant_idx, place).unwrap();
436            }
437
438            debug!("dump of place after writing discriminant:");
439            dump_place(ecx, place);
440        }
441        _ => bug!("shouldn't have created a ValTree for {:?}", ty),
442    }
443}
444
445fn dump_place<'tcx>(ecx: &CompileTimeInterpCx<'tcx>, place: &MPlaceTy<'tcx>) {
446    trace!("{:?}", ecx.dump_place(&PlaceTy::from(place.clone())));
447}