1use rustc_abi::{BackendRepr, FieldIdx, VariantIdx};
2use rustc_data_structures::stack::ensure_sufficient_stack;
3use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId, ValTreeCreationError};
4use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
5use rustc_middle::ty::{self, Ty, TyCtxt};
6use rustc_middle::{bug, mir};
7use rustc_span::DUMMY_SP;
8use tracing::{debug, instrument, trace};
9
10use super::VALTREE_MAX_NODES;
11use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const};
12use super::machine::CompileTimeInterpCx;
13use crate::const_eval::CanAccessMutGlobal;
14use crate::interpret::{
15 ImmTy, Immediate, InternKind, MPlaceTy, MemPlaceMeta, MemoryKind, PlaceTy, Projectable, Scalar,
16 intern_const_alloc_recursive,
17};
18
19#[instrument(skip(ecx), level = "debug")]
20fn branches<'tcx>(
21 ecx: &CompileTimeInterpCx<'tcx>,
22 place: &MPlaceTy<'tcx>,
23 field_count: usize,
24 variant: Option<VariantIdx>,
25 num_nodes: &mut usize,
26) -> EvalToValTreeResult<'tcx> {
27 let place = match variant {
28 Some(variant) => ecx.project_downcast(place, variant).unwrap(),
29 None => place.clone(),
30 };
31 debug!(?place);
32
33 let mut branches = Vec::with_capacity(field_count + variant.is_some() as usize);
34
35 if let Some(variant) = variant {
38 branches.push(ty::ValTree::from_scalar_int(*ecx.tcx, variant.as_u32().into()));
39 }
40
41 for i in 0..field_count {
42 let field = ecx.project_field(&place, FieldIdx::from_usize(i)).unwrap();
43 let valtree = const_to_valtree_inner(ecx, &field, num_nodes)?;
44 branches.push(valtree);
45 }
46
47 if branches.len() == 0 {
49 *num_nodes += 1;
50 }
51
52 Ok(ty::ValTree::from_branches(*ecx.tcx, branches))
53}
54
55#[instrument(skip(ecx), level = "debug")]
56fn slice_branches<'tcx>(
57 ecx: &CompileTimeInterpCx<'tcx>,
58 place: &MPlaceTy<'tcx>,
59 num_nodes: &mut usize,
60) -> EvalToValTreeResult<'tcx> {
61 let n = place.len(ecx).unwrap_or_else(|_| panic!("expected to use len of place {place:?}"));
62
63 let mut elems = Vec::with_capacity(n as usize);
64 for i in 0..n {
65 let place_elem = ecx.project_index(place, i).unwrap();
66 let valtree = const_to_valtree_inner(ecx, &place_elem, num_nodes)?;
67 elems.push(valtree);
68 }
69
70 Ok(ty::ValTree::from_branches(*ecx.tcx, elems))
71}
72
73#[instrument(skip(ecx), level = "debug")]
74fn const_to_valtree_inner<'tcx>(
75 ecx: &CompileTimeInterpCx<'tcx>,
76 place: &MPlaceTy<'tcx>,
77 num_nodes: &mut usize,
78) -> EvalToValTreeResult<'tcx> {
79 let tcx = *ecx.tcx;
80 let ty = place.layout.ty;
81 debug!("ty kind: {:?}", ty.kind());
82
83 if *num_nodes >= VALTREE_MAX_NODES {
84 return Err(ValTreeCreationError::NodesOverflow);
85 }
86
87 match ty.kind() {
88 ty::FnDef(..) => {
89 *num_nodes += 1;
90 Ok(ty::ValTree::zst(tcx))
91 }
92 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
93 let val = ecx.read_immediate(place).report_err()?;
94 let val = val.to_scalar_int().unwrap();
95 *num_nodes += 1;
96
97 Ok(ty::ValTree::from_scalar_int(tcx, val))
98 }
99
100 ty::Pat(base, ..) => {
101 let mut place = place.clone();
102 place.layout = ecx.layout_of(*base).unwrap();
106 ensure_sufficient_stack(|| const_to_valtree_inner(ecx, &place, num_nodes))
107 },
108
109
110 ty::RawPtr(_, _) => {
111 let val = ecx.read_immediate(place).report_err()?;
116 if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
119 return Err(ValTreeCreationError::NonSupportedType(ty));
120 }
121 let val = val.to_scalar();
122 let Ok(val) = val.try_to_scalar_int() else {
125 return Err(ValTreeCreationError::NonSupportedType(ty));
126 };
127 Ok(ty::ValTree::from_scalar_int(tcx, val))
129 }
130
131 ty::FnPtr(..) => Err(ValTreeCreationError::NonSupportedType(ty)),
134
135 ty::Ref(_, _, _) => {
136 let derefd_place = ecx.deref_pointer(place).report_err()?;
137 const_to_valtree_inner(ecx, &derefd_place, num_nodes)
138 }
139
140 ty::Str | ty::Slice(_) | ty::Array(_, _) => {
141 slice_branches(ecx, place, num_nodes)
142 }
143 ty::Dynamic(..) => Err(ValTreeCreationError::NonSupportedType(ty)),
148
149 ty::Tuple(elem_tys) => {
150 branches(ecx, place, elem_tys.len(), None, num_nodes)
151 }
152
153 ty::Adt(def, _) => {
154 if def.is_union() {
155 return Err(ValTreeCreationError::NonSupportedType(ty));
156 } else if def.variants().is_empty() {
157 bug!("uninhabited types should have errored and never gotten converted to valtree")
158 }
159
160 let variant = ecx.read_discriminant(place).report_err()?;
161 branches(ecx, place, def.variant(variant).fields.len(), def.is_enum().then_some(variant), num_nodes)
162 }
163
164 ty::Never
165 | ty::Error(_)
166 | ty::Foreign(..)
167 | ty::Infer(ty::FreshIntTy(_))
168 | ty::Infer(ty::FreshFloatTy(_))
169 | ty::Alias(..)
171 | ty::Param(_)
172 | ty::Bound(..)
173 | ty::Placeholder(..)
174 | ty::Infer(_)
175 | ty::Closure(..)
177 | ty::CoroutineClosure(..)
178 | ty::Coroutine(..)
179 | ty::CoroutineWitness(..)
180 | ty::UnsafeBinder(_) => Err(ValTreeCreationError::NonSupportedType(ty)),
181 }
182}
183
184fn reconstruct_place_meta<'tcx>(
187 layout: TyAndLayout<'tcx>,
188 valtree: ty::ValTree<'tcx>,
189 tcx: TyCtxt<'tcx>,
190) -> MemPlaceMeta {
191 if layout.is_sized() {
192 return MemPlaceMeta::None;
193 }
194
195 let mut last_valtree = valtree;
196 let tail = tcx.struct_tail_raw(
198 layout.ty,
199 |ty| ty,
200 || {
201 let branches = last_valtree.unwrap_branch();
202 last_valtree = *branches.last().unwrap();
203 debug!(?branches, ?last_valtree);
204 },
205 );
206 match tail.kind() {
208 ty::Slice(..) | ty::Str => {}
209 _ => bug!("unsized tail of a valtree must be Slice or Str"),
210 };
211
212 let num_elems = last_valtree.unwrap_branch().len();
214 MemPlaceMeta::Meta(Scalar::from_target_usize(num_elems as u64, &tcx))
215}
216
217#[instrument(skip(ecx), level = "debug", ret)]
218fn create_valtree_place<'tcx>(
219 ecx: &mut CompileTimeInterpCx<'tcx>,
220 layout: TyAndLayout<'tcx>,
221 valtree: ty::ValTree<'tcx>,
222) -> MPlaceTy<'tcx> {
223 let meta = reconstruct_place_meta(layout, valtree, ecx.tcx.tcx);
224 ecx.allocate_dyn(layout, MemoryKind::Stack, meta).unwrap()
225}
226
227pub(crate) fn eval_to_valtree<'tcx>(
229 tcx: TyCtxt<'tcx>,
230 typing_env: ty::TypingEnv<'tcx>,
231 cid: GlobalId<'tcx>,
232) -> EvalToValTreeResult<'tcx> {
233 debug_assert_eq!(typing_env.typing_mode, ty::TypingMode::PostAnalysis);
236 let const_alloc = tcx.eval_to_allocation_raw(typing_env.as_query_input(cid))?;
237
238 let ecx = mk_eval_cx_to_read_const_val(
240 tcx,
241 DUMMY_SP,
242 typing_env,
243 CanAccessMutGlobal::No,
246 );
247 let place = ecx.raw_const_to_mplace(const_alloc).unwrap();
248 debug!(?place);
249
250 let mut num_nodes = 0;
251 const_to_valtree_inner(&ecx, &place, &mut num_nodes)
252}
253
254#[instrument(skip(tcx), level = "debug", ret)]
258pub fn valtree_to_const_value<'tcx>(
259 tcx: TyCtxt<'tcx>,
260 typing_env: ty::TypingEnv<'tcx>,
261 cv: ty::Value<'tcx>,
262) -> mir::ConstValue<'tcx> {
263 match *cv.ty.kind() {
270 ty::FnDef(..) => {
271 assert!(cv.valtree.is_zst());
272 mir::ConstValue::ZeroSized
273 }
274 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(_, _) => {
275 mir::ConstValue::Scalar(Scalar::Int(cv.valtree.unwrap_leaf()))
276 }
277 ty::Pat(ty, _) => {
278 let cv = ty::Value { valtree: cv.valtree, ty };
279 valtree_to_const_value(tcx, typing_env, cv)
280 }
281 ty::Ref(_, inner_ty, _) => {
282 let mut ecx =
283 mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, typing_env, CanAccessMutGlobal::No);
284 let imm = valtree_to_ref(&mut ecx, cv.valtree, inner_ty);
285 let imm = ImmTy::from_immediate(
286 imm,
287 tcx.layout_of(typing_env.as_query_input(cv.ty)).unwrap(),
288 );
289 op_to_const(&ecx, &imm.into(), false)
290 }
291 ty::Tuple(_) | ty::Array(_, _) | ty::Adt(..) => {
292 let layout = tcx.layout_of(typing_env.as_query_input(cv.ty)).unwrap();
293 if layout.is_zst() {
294 return mir::ConstValue::ZeroSized;
296 }
297 if layout.backend_repr.is_scalar()
298 && (matches!(cv.ty.kind(), ty::Tuple(_))
299 || matches!(cv.ty.kind(), ty::Adt(def, _) if def.is_struct()))
300 {
301 let branches = cv.valtree.unwrap_branch();
303 for (i, &inner_valtree) in branches.iter().enumerate() {
305 let field = layout.field(&LayoutCx::new(tcx, typing_env), i);
306 if !field.is_zst() {
307 let cv = ty::Value { valtree: inner_valtree, ty: field.ty };
308 return valtree_to_const_value(tcx, typing_env, cv);
309 }
310 }
311 bug!("could not find non-ZST field during in {layout:#?}");
312 }
313
314 let mut ecx =
315 mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, typing_env, CanAccessMutGlobal::No);
316
317 let place = create_valtree_place(&mut ecx, layout, cv.valtree);
319
320 valtree_into_mplace(&mut ecx, &place, cv.valtree);
321 dump_place(&ecx, &place);
322 intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap();
323
324 op_to_const(&ecx, &place.into(), false)
325 }
326 ty::Never
327 | ty::Error(_)
328 | ty::Foreign(..)
329 | ty::Infer(ty::FreshIntTy(_))
330 | ty::Infer(ty::FreshFloatTy(_))
331 | ty::Alias(..)
332 | ty::Param(_)
333 | ty::Bound(..)
334 | ty::Placeholder(..)
335 | ty::Infer(_)
336 | ty::Closure(..)
337 | ty::CoroutineClosure(..)
338 | ty::Coroutine(..)
339 | ty::CoroutineWitness(..)
340 | ty::FnPtr(..)
341 | ty::Str
342 | ty::Slice(_)
343 | ty::Dynamic(..)
344 | ty::UnsafeBinder(_) => {
345 bug!("no ValTree should have been created for type {:?}", cv.ty.kind())
346 }
347 }
348}
349
350fn valtree_to_ref<'tcx>(
352 ecx: &mut CompileTimeInterpCx<'tcx>,
353 valtree: ty::ValTree<'tcx>,
354 pointee_ty: Ty<'tcx>,
355) -> Immediate {
356 let pointee_place = create_valtree_place(ecx, ecx.layout_of(pointee_ty).unwrap(), valtree);
357 debug!(?pointee_place);
358
359 valtree_into_mplace(ecx, &pointee_place, valtree);
360 dump_place(ecx, &pointee_place);
361 intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap();
362
363 pointee_place.to_ref(&ecx.tcx)
364}
365
366#[instrument(skip(ecx), level = "debug")]
367fn valtree_into_mplace<'tcx>(
368 ecx: &mut CompileTimeInterpCx<'tcx>,
369 place: &MPlaceTy<'tcx>,
370 valtree: ty::ValTree<'tcx>,
371) {
372 let ty = place.layout.ty;
376
377 match ty.kind() {
378 ty::FnDef(_, _) => {
379 }
381 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(..) => {
382 let scalar_int = valtree.unwrap_leaf();
383 debug!("writing trivial valtree {:?} to place {:?}", scalar_int, place);
384 ecx.write_immediate(Immediate::Scalar(scalar_int.into()), place).unwrap();
385 }
386 ty::Ref(_, inner_ty, _) => {
387 let imm = valtree_to_ref(ecx, valtree, *inner_ty);
388 debug!(?imm);
389 ecx.write_immediate(imm, place).unwrap();
390 }
391 ty::Adt(_, _) | ty::Tuple(_) | ty::Array(_, _) | ty::Str | ty::Slice(_) => {
392 let branches = valtree.unwrap_branch();
393
394 let (place_adjusted, branches, variant_idx) = match ty.kind() {
396 ty::Adt(def, _) if def.is_enum() => {
397 let scalar_int = branches[0].unwrap_leaf();
399 let variant_idx = VariantIdx::from_u32(scalar_int.to_u32());
400 let variant = def.variant(variant_idx);
401 debug!(?variant);
402
403 (
404 ecx.project_downcast(place, variant_idx).unwrap(),
405 &branches[1..],
406 Some(variant_idx),
407 )
408 }
409 _ => (place.clone(), branches, None),
410 };
411 debug!(?place_adjusted, ?branches);
412
413 for (i, inner_valtree) in branches.iter().enumerate() {
416 debug!(?i, ?inner_valtree);
417
418 let place_inner = match ty.kind() {
419 ty::Str | ty::Slice(_) | ty::Array(..) => {
420 ecx.project_index(place, i as u64).unwrap()
421 }
422 _ => ecx.project_field(&place_adjusted, FieldIdx::from_usize(i)).unwrap(),
423 };
424
425 debug!(?place_inner);
426 valtree_into_mplace(ecx, &place_inner, *inner_valtree);
427 dump_place(ecx, &place_inner);
428 }
429
430 debug!("dump of place_adjusted:");
431 dump_place(ecx, &place_adjusted);
432
433 if let Some(variant_idx) = variant_idx {
434 ecx.write_discriminant(variant_idx, place).unwrap();
436 }
437
438 debug!("dump of place after writing discriminant:");
439 dump_place(ecx, place);
440 }
441 _ => bug!("shouldn't have created a ValTree for {:?}", ty),
442 }
443}
444
445fn dump_place<'tcx>(ecx: &CompileTimeInterpCx<'tcx>, place: &MPlaceTy<'tcx>) {
446 trace!("{:?}", ecx.dump_place(&PlaceTy::from(place.clone())));
447}