1use std::assert_matches::assert_matches;
2
3use rustc_abi::{FieldIdx, Integer};
4use rustc_apfloat::ieee::{Double, Half, Quad, Single};
5use rustc_apfloat::{Float, FloatConvert};
6use rustc_middle::mir::CastKind;
7use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
10use rustc_middle::ty::{self, FloatTy, Ty};
11use rustc_middle::{bug, span_bug};
12use tracing::trace;
13
14use super::util::ensure_monomorphic_enough;
15use super::{
16 FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy, err_inval, interp_ok, throw_ub,
17 throw_ub_custom,
18};
19use crate::fluent_generated as fluent;
20
21impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
22 pub fn cast(
23 &mut self,
24 src: &OpTy<'tcx, M::Provenance>,
25 cast_kind: CastKind,
26 cast_ty: Ty<'tcx>,
27 dest: &PlaceTy<'tcx, M::Provenance>,
28 ) -> InterpResult<'tcx> {
29 let cast_layout =
32 if cast_ty == dest.layout.ty { dest.layout } else { self.layout_of(cast_ty)? };
33 match cast_kind {
35 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
36 self.unsize_into(src, cast_layout, dest)?;
37 }
38
39 CastKind::PointerExposeProvenance => {
40 let src = self.read_immediate(src)?;
41 let res = self.pointer_expose_provenance_cast(&src, cast_layout)?;
42 self.write_immediate(*res, dest)?;
43 }
44
45 CastKind::PointerWithExposedProvenance => {
46 let src = self.read_immediate(src)?;
47 let res = self.pointer_with_exposed_provenance_cast(&src, cast_layout)?;
48 self.write_immediate(*res, dest)?;
49 }
50
51 CastKind::IntToInt | CastKind::IntToFloat => {
52 let src = self.read_immediate(src)?;
53 let res = self.int_to_int_or_float(&src, cast_layout)?;
54 self.write_immediate(*res, dest)?;
55 }
56
57 CastKind::FloatToFloat | CastKind::FloatToInt => {
58 let src = self.read_immediate(src)?;
59 let res = self.float_to_float_or_int(&src, cast_layout)?;
60 self.write_immediate(*res, dest)?;
61 }
62
63 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
64 let src = self.read_immediate(src)?;
65 let res = self.ptr_to_ptr(&src, cast_layout)?;
66 self.write_immediate(*res, dest)?;
67 }
68
69 CastKind::PointerCoercion(
70 PointerCoercion::MutToConstPointer | PointerCoercion::ArrayToPointer,
71 _,
72 ) => {
73 bug!("{cast_kind:?} casts are for borrowck only, not runtime MIR");
74 }
75
76 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
77 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
79
80 match *src.layout.ty.kind() {
82 ty::FnDef(def_id, args) => {
83 let instance = ty::Instance::resolve_for_fn_ptr(
84 *self.tcx,
85 self.typing_env,
86 def_id,
87 args,
88 )
89 .ok_or_else(|| err_inval!(TooGeneric))?;
90
91 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
92 self.write_pointer(fn_ptr, dest)?;
93 }
94 _ => span_bug!(self.cur_span(), "reify fn pointer on {}", src.layout.ty),
95 }
96 }
97
98 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
99 let src = self.read_immediate(src)?;
100 match cast_ty.kind() {
101 ty::FnPtr(..) => {
102 self.write_immediate(*src, dest)?;
104 }
105 _ => span_bug!(self.cur_span(), "fn to unsafe fn cast on {}", cast_ty),
106 }
107 }
108
109 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_), _) => {
110 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
112
113 match *src.layout.ty.kind() {
115 ty::Closure(def_id, args) => {
116 let instance = ty::Instance::resolve_closure(
117 *self.tcx,
118 def_id,
119 args,
120 ty::ClosureKind::FnOnce,
121 );
122 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
123 self.write_pointer(fn_ptr, dest)?;
124 }
125 _ => span_bug!(self.cur_span(), "closure fn pointer on {}", src.layout.ty),
126 }
127 }
128
129 CastKind::Transmute => {
130 assert!(src.layout.is_sized());
131 assert!(dest.layout.is_sized());
132 assert_eq!(cast_ty, dest.layout.ty); if src.layout.size != dest.layout.size {
134 throw_ub_custom!(
135 fluent::const_eval_invalid_transmute,
136 src_bytes = src.layout.size.bytes(),
137 dest_bytes = dest.layout.size.bytes(),
138 src = src.layout.ty,
139 dest = dest.layout.ty,
140 );
141 }
142
143 self.copy_op_allow_transmute(src, dest)?;
144 }
145 }
146 interp_ok(())
147 }
148
149 pub fn int_to_int_or_float(
151 &self,
152 src: &ImmTy<'tcx, M::Provenance>,
153 cast_to: TyAndLayout<'tcx>,
154 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
155 assert!(src.layout.ty.is_integral() || src.layout.ty.is_char() || src.layout.ty.is_bool());
156 assert!(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() || cast_to.ty.is_char());
157
158 interp_ok(ImmTy::from_scalar(
159 self.cast_from_int_like(src.to_scalar(), src.layout, cast_to.ty)?,
160 cast_to,
161 ))
162 }
163
164 pub fn float_to_float_or_int(
166 &self,
167 src: &ImmTy<'tcx, M::Provenance>,
168 cast_to: TyAndLayout<'tcx>,
169 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
170 let ty::Float(fty) = src.layout.ty.kind() else {
171 bug!("FloatToFloat/FloatToInt cast: source type {} is not a float type", src.layout.ty)
172 };
173 let val = match fty {
174 FloatTy::F16 => self.cast_from_float(src.to_scalar().to_f16()?, cast_to.ty),
175 FloatTy::F32 => self.cast_from_float(src.to_scalar().to_f32()?, cast_to.ty),
176 FloatTy::F64 => self.cast_from_float(src.to_scalar().to_f64()?, cast_to.ty),
177 FloatTy::F128 => self.cast_from_float(src.to_scalar().to_f128()?, cast_to.ty),
178 };
179 interp_ok(ImmTy::from_scalar(val, cast_to))
180 }
181
182 pub fn ptr_to_ptr(
184 &self,
185 src: &ImmTy<'tcx, M::Provenance>,
186 cast_to: TyAndLayout<'tcx>,
187 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
188 assert!(src.layout.ty.is_any_ptr());
189 assert!(cast_to.ty.is_raw_ptr());
190 if cast_to.size == src.layout.size {
192 return interp_ok(ImmTy::from_immediate(**src, cast_to));
194 } else {
195 assert_eq!(src.layout.size, 2 * self.pointer_size());
197 assert_eq!(cast_to.size, self.pointer_size());
198 assert!(src.layout.ty.is_raw_ptr());
199 return match **src {
200 Immediate::ScalarPair(data, _) => interp_ok(ImmTy::from_scalar(data, cast_to)),
201 Immediate::Scalar(..) => span_bug!(
202 self.cur_span(),
203 "{:?} input to a fat-to-thin cast ({} -> {})",
204 *src,
205 src.layout.ty,
206 cast_to.ty
207 ),
208 Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
209 };
210 }
211 }
212
213 pub fn pointer_expose_provenance_cast(
214 &mut self,
215 src: &ImmTy<'tcx, M::Provenance>,
216 cast_to: TyAndLayout<'tcx>,
217 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
218 assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(..));
219 assert!(cast_to.ty.is_integral());
220
221 let scalar = src.to_scalar();
222 let ptr = scalar.to_pointer(self)?;
223 match ptr.into_pointer_or_addr() {
224 Ok(ptr) => M::expose_provenance(self, ptr.provenance)?,
225 Err(_) => {} };
227 interp_ok(ImmTy::from_scalar(
228 self.cast_from_int_like(scalar, src.layout, cast_to.ty)?,
229 cast_to,
230 ))
231 }
232
233 pub fn pointer_with_exposed_provenance_cast(
234 &self,
235 src: &ImmTy<'tcx, M::Provenance>,
236 cast_to: TyAndLayout<'tcx>,
237 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
238 assert!(src.layout.ty.is_integral());
239 assert_matches!(cast_to.ty.kind(), ty::RawPtr(_, _));
240
241 let scalar = src.to_scalar();
243 let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
244 let addr = addr.to_target_usize(self)?;
245
246 let ptr = M::ptr_from_addr_cast(self, addr)?;
248 interp_ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to))
249 }
250
251 fn cast_from_int_like(
254 &self,
255 scalar: Scalar<M::Provenance>, src_layout: TyAndLayout<'tcx>,
257 cast_ty: Ty<'tcx>,
258 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
259 let signed = src_layout.backend_repr.is_signed(); let v = match src_layout.ty.kind() {
263 ty::Uint(_) | ty::RawPtr(..) | ty::FnPtr(..) => scalar.to_uint(src_layout.size)?,
264 ty::Int(_) => scalar.to_int(src_layout.size)? as u128, ty::Bool => scalar.to_bool()?.into(),
266 ty::Char => scalar.to_char()?.into(),
267 _ => span_bug!(self.cur_span(), "invalid int-like cast from {}", src_layout.ty),
268 };
269
270 interp_ok(match *cast_ty.kind() {
271 ty::Int(_) | ty::Uint(_) => {
273 let size = match *cast_ty.kind() {
274 ty::Int(t) => Integer::from_int_ty(self, t).size(),
275 ty::Uint(t) => Integer::from_uint_ty(self, t).size(),
276 _ => bug!(),
277 };
278 let v = size.truncate(v);
279 Scalar::from_uint(v, size)
280 }
281
282 ty::Float(fty) if signed => {
284 let v = v as i128;
285 match fty {
286 FloatTy::F16 => Scalar::from_f16(Half::from_i128(v).value),
287 FloatTy::F32 => Scalar::from_f32(Single::from_i128(v).value),
288 FloatTy::F64 => Scalar::from_f64(Double::from_i128(v).value),
289 FloatTy::F128 => Scalar::from_f128(Quad::from_i128(v).value),
290 }
291 }
292 ty::Float(fty) => match fty {
294 FloatTy::F16 => Scalar::from_f16(Half::from_u128(v).value),
295 FloatTy::F32 => Scalar::from_f32(Single::from_u128(v).value),
296 FloatTy::F64 => Scalar::from_f64(Double::from_u128(v).value),
297 FloatTy::F128 => Scalar::from_f128(Quad::from_u128(v).value),
298 },
299
300 ty::Char => Scalar::from_u32(u8::try_from(v).unwrap().into()),
302
303 _ => span_bug!(self.cur_span(), "invalid int to {} cast", cast_ty),
305 })
306 }
307
308 fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance>
310 where
311 F: Float
312 + Into<Scalar<M::Provenance>>
313 + FloatConvert<Half>
314 + FloatConvert<Single>
315 + FloatConvert<Double>
316 + FloatConvert<Quad>,
317 {
318 match *dest_ty.kind() {
319 ty::Uint(t) => {
321 let size = Integer::from_uint_ty(self, t).size();
322 let v = f.to_u128(size.bits_usize()).value;
325 Scalar::from_uint(v, size)
327 }
328 ty::Int(t) => {
330 let size = Integer::from_int_ty(self, t).size();
331 let v = f.to_i128(size.bits_usize()).value;
334 Scalar::from_int(v, size)
335 }
336 ty::Float(fty) => match fty {
338 FloatTy::F16 => {
339 Scalar::from_f16(self.adjust_nan(f.convert(&mut false).value, &[f]))
340 }
341 FloatTy::F32 => {
342 Scalar::from_f32(self.adjust_nan(f.convert(&mut false).value, &[f]))
343 }
344 FloatTy::F64 => {
345 Scalar::from_f64(self.adjust_nan(f.convert(&mut false).value, &[f]))
346 }
347 FloatTy::F128 => {
348 Scalar::from_f128(self.adjust_nan(f.convert(&mut false).value, &[f]))
349 }
350 },
351 _ => span_bug!(self.cur_span(), "invalid float to {} cast", dest_ty),
353 }
354 }
355
356 fn unsize_into_ptr(
359 &mut self,
360 src: &OpTy<'tcx, M::Provenance>,
361 dest: &PlaceTy<'tcx, M::Provenance>,
362 source_ty: Ty<'tcx>,
364 cast_ty: Ty<'tcx>,
365 ) -> InterpResult<'tcx> {
366 let (src_pointee_ty, dest_pointee_ty) =
368 self.tcx.struct_lockstep_tails_for_codegen(source_ty, cast_ty, self.typing_env);
369
370 match (src_pointee_ty.kind(), dest_pointee_ty.kind()) {
371 (&ty::Array(_, length), &ty::Slice(_)) => {
372 let ptr = self.read_pointer(src)?;
373 let val = Immediate::new_slice(
374 ptr,
375 length
376 .try_to_target_usize(*self.tcx)
377 .expect("expected monomorphic const in const eval"),
378 self,
379 );
380 self.write_immediate(val, dest)
381 }
382 (ty::Dynamic(data_a, _, ty::Dyn), ty::Dynamic(data_b, _, ty::Dyn)) => {
383 let val = self.read_immediate(src)?;
384 if data_a == data_b {
388 return self.write_immediate(*val, dest);
389 }
390 let (old_data, old_vptr) = val.to_scalar_pair();
392 let old_data = old_data.to_pointer(self)?;
393 let old_vptr = old_vptr.to_pointer(self)?;
394 let ty = self.get_ptr_vtable_ty(old_vptr, Some(data_a))?;
395
396 let vptr_entry_idx =
399 self.tcx.supertrait_vtable_slot((src_pointee_ty, dest_pointee_ty));
400 let vtable_entries = self.vtable_entries(data_a.principal(), ty);
401 if let Some(entry_idx) = vptr_entry_idx {
402 let Some(&ty::VtblEntry::TraitVPtr(upcast_trait_ref)) =
403 vtable_entries.get(entry_idx)
404 else {
405 span_bug!(
406 self.cur_span(),
407 "invalid vtable entry index in {} -> {} upcast",
408 src_pointee_ty,
409 dest_pointee_ty
410 );
411 };
412 let erased_trait_ref =
413 ty::ExistentialTraitRef::erase_self_ty(*self.tcx, upcast_trait_ref);
414 assert_eq!(
415 data_b.principal().map(|b| {
416 self.tcx.normalize_erasing_late_bound_regions(self.typing_env, b)
417 }),
418 Some(erased_trait_ref),
419 );
420 } else {
421 let vtable_entries_b = self.vtable_entries(data_b.principal(), ty);
425 assert!(&vtable_entries[..vtable_entries_b.len()] == vtable_entries_b);
426 };
427
428 let new_vptr = self.get_vtable_ptr(ty, data_b)?;
430 self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
431 }
432 (_, &ty::Dynamic(data, _, ty::Dyn)) => {
433 let vtable = self.get_vtable_ptr(src_pointee_ty, data)?;
435 let ptr = self.read_pointer(src)?;
436 let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
437 self.write_immediate(val, dest)
438 }
439 _ => {
440 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
442 ensure_monomorphic_enough(*self.tcx, cast_ty)?;
443
444 span_bug!(
445 self.cur_span(),
446 "invalid pointer unsizing {} -> {}",
447 src.layout.ty,
448 cast_ty
449 )
450 }
451 }
452 }
453
454 pub fn unsize_into(
455 &mut self,
456 src: &OpTy<'tcx, M::Provenance>,
457 cast_ty: TyAndLayout<'tcx>,
458 dest: &PlaceTy<'tcx, M::Provenance>,
459 ) -> InterpResult<'tcx> {
460 trace!("Unsizing {:?} of type {} into {}", *src, src.layout.ty, cast_ty.ty);
461 match (src.layout.ty.kind(), cast_ty.ty.kind()) {
462 (&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(c, _))
463 | (&ty::RawPtr(s, _), &ty::RawPtr(c, _)) => self.unsize_into_ptr(src, dest, s, c),
464 (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
465 assert_eq!(def_a, def_b); let mut found_cast_field = false;
471 for i in 0..src.layout.fields.count() {
472 let cast_ty_field = cast_ty.field(self, i);
473 let i = FieldIdx::from_usize(i);
474 let src_field = self.project_field(src, i)?;
475 let dst_field = self.project_field(dest, i)?;
476 if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
477 } else if src_field.layout.ty == cast_ty_field.ty {
479 self.copy_op(&src_field, &dst_field)?;
480 } else {
481 if found_cast_field {
482 span_bug!(self.cur_span(), "unsize_into: more than one field to cast");
483 }
484 found_cast_field = true;
485 self.unsize_into(&src_field, cast_ty_field, &dst_field)?;
486 }
487 }
488 interp_ok(())
489 }
490 _ => {
491 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
493 ensure_monomorphic_enough(*self.tcx, cast_ty.ty)?;
494
495 span_bug!(
496 self.cur_span(),
497 "unsize_into: invalid conversion: {:?} -> {:?}",
498 src.layout,
499 dest.layout
500 )
501 }
502 }
503 }
504}