rustc_const_eval/interpret/
call.rs

1//! Manages calling a concrete function (with known MIR body) with argument passing,
2//! and returning the return value to the caller.
3use std::assert_matches::assert_matches;
4use std::borrow::Cow;
5
6use either::{Left, Right};
7use rustc_abi::{self as abi, ExternAbi, FieldIdx, Integer, VariantIdx};
8use rustc_hir::def_id::DefId;
9use rustc_middle::ty::layout::{FnAbiOf, IntegerExt, TyAndLayout};
10use rustc_middle::ty::{self, AdtDef, Instance, Ty, VariantDef};
11use rustc_middle::{bug, mir, span_bug};
12use rustc_span::sym;
13use rustc_target::callconv::{ArgAbi, FnAbi, PassMode};
14use tracing::{info, instrument, trace};
15
16use super::{
17    CtfeProvenance, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy,
18    Projectable, Provenance, ReturnAction, Scalar, StackPopCleanup, StackPopInfo, interp_ok,
19    throw_ub, throw_ub_custom, throw_unsup_format,
20};
21use crate::fluent_generated as fluent;
22
23/// An argument passed to a function.
24#[derive(Clone, Debug)]
25pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> {
26    /// Pass a copy of the given operand.
27    Copy(OpTy<'tcx, Prov>),
28    /// Allow for the argument to be passed in-place: destroy the value originally stored at that place and
29    /// make the place inaccessible for the duration of the function call.
30    InPlace(MPlaceTy<'tcx, Prov>),
31}
32
33impl<'tcx, Prov: Provenance> FnArg<'tcx, Prov> {
34    pub fn layout(&self) -> &TyAndLayout<'tcx> {
35        match self {
36            FnArg::Copy(op) => &op.layout,
37            FnArg::InPlace(mplace) => &mplace.layout,
38        }
39    }
40}
41
42impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
43    /// Make a copy of the given fn_arg. Any `InPlace` are degenerated to copies, no protection of the
44    /// original memory occurs.
45    pub fn copy_fn_arg(&self, arg: &FnArg<'tcx, M::Provenance>) -> OpTy<'tcx, M::Provenance> {
46        match arg {
47            FnArg::Copy(op) => op.clone(),
48            FnArg::InPlace(mplace) => mplace.clone().into(),
49        }
50    }
51
52    /// Make a copy of the given fn_args. Any `InPlace` are degenerated to copies, no protection of the
53    /// original memory occurs.
54    pub fn copy_fn_args(
55        &self,
56        args: &[FnArg<'tcx, M::Provenance>],
57    ) -> Vec<OpTy<'tcx, M::Provenance>> {
58        args.iter().map(|fn_arg| self.copy_fn_arg(fn_arg)).collect()
59    }
60
61    /// Helper function for argument untupling.
62    pub(super) fn fn_arg_field(
63        &self,
64        arg: &FnArg<'tcx, M::Provenance>,
65        field: FieldIdx,
66    ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
67        interp_ok(match arg {
68            FnArg::Copy(op) => FnArg::Copy(self.project_field(op, field)?),
69            FnArg::InPlace(mplace) => FnArg::InPlace(self.project_field(mplace, field)?),
70        })
71    }
72
73    /// Find the wrapped inner type of a transparent wrapper.
74    /// Must not be called on 1-ZST (as they don't have a uniquely defined "wrapped field").
75    ///
76    /// We work with `TyAndLayout` here since that makes it much easier to iterate over all fields.
77    fn unfold_transparent(
78        &self,
79        layout: TyAndLayout<'tcx>,
80        may_unfold: impl Fn(AdtDef<'tcx>) -> bool,
81    ) -> TyAndLayout<'tcx> {
82        match layout.ty.kind() {
83            ty::Adt(adt_def, _) if adt_def.repr().transparent() && may_unfold(*adt_def) => {
84                assert!(!adt_def.is_enum());
85                // Find the non-1-ZST field, and recurse.
86                let (_, field) = layout.non_1zst_field(self).unwrap();
87                self.unfold_transparent(field, may_unfold)
88            }
89            // Not a transparent type, no further unfolding.
90            _ => layout,
91        }
92    }
93
94    /// Unwrap types that are guaranteed a null-pointer-optimization
95    fn unfold_npo(&self, layout: TyAndLayout<'tcx>) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
96        // Check if this is an option-like type wrapping some type.
97        let ty::Adt(def, args) = layout.ty.kind() else {
98            // Not an ADT, so definitely no NPO.
99            return interp_ok(layout);
100        };
101        if def.variants().len() != 2 {
102            // Not a 2-variant enum, so no NPO.
103            return interp_ok(layout);
104        }
105        assert!(def.is_enum());
106
107        let all_fields_1zst = |variant: &VariantDef| -> InterpResult<'tcx, _> {
108            for field in &variant.fields {
109                let ty = field.ty(*self.tcx, args);
110                let layout = self.layout_of(ty)?;
111                if !layout.is_1zst() {
112                    return interp_ok(false);
113                }
114            }
115            interp_ok(true)
116        };
117
118        // If one variant consists entirely of 1-ZST, then the other variant
119        // is the only "relevant" one for this check.
120        let var0 = VariantIdx::from_u32(0);
121        let var1 = VariantIdx::from_u32(1);
122        let relevant_variant = if all_fields_1zst(def.variant(var0))? {
123            def.variant(var1)
124        } else if all_fields_1zst(def.variant(var1))? {
125            def.variant(var0)
126        } else {
127            // No varant is all-1-ZST, so no NPO.
128            return interp_ok(layout);
129        };
130        // The "relevant" variant must have exactly one field, and its type is the "inner" type.
131        if relevant_variant.fields.len() != 1 {
132            return interp_ok(layout);
133        }
134        let inner = relevant_variant.fields[FieldIdx::from_u32(0)].ty(*self.tcx, args);
135        let inner = self.layout_of(inner)?;
136
137        // Check if the inner type is one of the NPO-guaranteed ones.
138        // For that we first unpeel transparent *structs* (but not unions).
139        let is_npo = |def: AdtDef<'tcx>| {
140            self.tcx.has_attr(def.did(), sym::rustc_nonnull_optimization_guaranteed)
141        };
142        let inner = self.unfold_transparent(inner, /* may_unfold */ |def| {
143            // Stop at NPO types so that we don't miss that attribute in the check below!
144            def.is_struct() && !is_npo(def)
145        });
146        interp_ok(match inner.ty.kind() {
147            ty::Ref(..) | ty::FnPtr(..) => {
148                // Option<&T> behaves like &T, and same for fn()
149                inner
150            }
151            ty::Adt(def, _) if is_npo(*def) => {
152                // Once we found a `nonnull_optimization_guaranteed` type, further strip off
153                // newtype structs from it to find the underlying ABI type.
154                self.unfold_transparent(inner, /* may_unfold */ |def| def.is_struct())
155            }
156            _ => {
157                // Everything else we do not unfold.
158                layout
159            }
160        })
161    }
162
163    /// Check if these two layouts look like they are fn-ABI-compatible.
164    /// (We also compare the `PassMode`, so this doesn't have to check everything. But it turns out
165    /// that only checking the `PassMode` is insufficient.)
166    fn layout_compat(
167        &self,
168        caller: TyAndLayout<'tcx>,
169        callee: TyAndLayout<'tcx>,
170    ) -> InterpResult<'tcx, bool> {
171        // Fast path: equal types are definitely compatible.
172        if caller.ty == callee.ty {
173            return interp_ok(true);
174        }
175        // 1-ZST are compatible with all 1-ZST (and with nothing else).
176        if caller.is_1zst() || callee.is_1zst() {
177            return interp_ok(caller.is_1zst() && callee.is_1zst());
178        }
179        // Unfold newtypes and NPO optimizations.
180        let unfold = |layout: TyAndLayout<'tcx>| {
181            self.unfold_npo(self.unfold_transparent(layout, /* may_unfold */ |_def| true))
182        };
183        let caller = unfold(caller)?;
184        let callee = unfold(callee)?;
185        // Now see if these inner types are compatible.
186
187        // Compatible pointer types. For thin pointers, we have to accept even non-`repr(transparent)`
188        // things as compatible due to `DispatchFromDyn`. For instance, `Rc<i32>` and `*mut i32`
189        // must be compatible. So we just accept everything with Pointer ABI as compatible,
190        // even if this will accept some code that is not stably guaranteed to work.
191        // This also handles function pointers.
192        let thin_pointer = |layout: TyAndLayout<'tcx>| match layout.backend_repr {
193            abi::BackendRepr::Scalar(s) => match s.primitive() {
194                abi::Primitive::Pointer(addr_space) => Some(addr_space),
195                _ => None,
196            },
197            _ => None,
198        };
199        if let (Some(caller), Some(callee)) = (thin_pointer(caller), thin_pointer(callee)) {
200            return interp_ok(caller == callee);
201        }
202        // For wide pointers we have to get the pointee type.
203        let pointee_ty = |ty: Ty<'tcx>| -> InterpResult<'tcx, Option<Ty<'tcx>>> {
204            // We cannot use `builtin_deref` here since we need to reject `Box<T, MyAlloc>`.
205            interp_ok(Some(match ty.kind() {
206                ty::Ref(_, ty, _) => *ty,
207                ty::RawPtr(ty, _) => *ty,
208                // We only accept `Box` with the default allocator.
209                _ if ty.is_box_global(*self.tcx) => ty.expect_boxed_ty(),
210                _ => return interp_ok(None),
211            }))
212        };
213        if let (Some(caller), Some(callee)) = (pointee_ty(caller.ty)?, pointee_ty(callee.ty)?) {
214            // This is okay if they have the same metadata type.
215            let meta_ty = |ty: Ty<'tcx>| {
216                // Even if `ty` is normalized, the search for the unsized tail will project
217                // to fields, which can yield non-normalized types. So we need to provide a
218                // normalization function.
219                let normalize = |ty| self.tcx.normalize_erasing_regions(self.typing_env, ty);
220                ty.ptr_metadata_ty(*self.tcx, normalize)
221            };
222            return interp_ok(meta_ty(caller) == meta_ty(callee));
223        }
224
225        // Compatible integer types (in particular, usize vs ptr-sized-u32/u64).
226        // `char` counts as `u32.`
227        let int_ty = |ty: Ty<'tcx>| {
228            Some(match ty.kind() {
229                ty::Int(ity) => (Integer::from_int_ty(&self.tcx, *ity), /* signed */ true),
230                ty::Uint(uty) => (Integer::from_uint_ty(&self.tcx, *uty), /* signed */ false),
231                ty::Char => (Integer::I32, /* signed */ false),
232                _ => return None,
233            })
234        };
235        if let (Some(caller), Some(callee)) = (int_ty(caller.ty), int_ty(callee.ty)) {
236            // This is okay if they are the same integer type.
237            return interp_ok(caller == callee);
238        }
239
240        // Fall back to exact equality.
241        interp_ok(caller == callee)
242    }
243
244    /// Returns a `bool` saying whether the two arguments are ABI-compatible.
245    pub fn check_argument_compat(
246        &self,
247        caller_abi: &ArgAbi<'tcx, Ty<'tcx>>,
248        callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
249    ) -> InterpResult<'tcx, bool> {
250        // We do not want to accept things as ABI-compatible that just "happen to be" compatible on the current target,
251        // so we implement a type-based check that reflects the guaranteed rules for ABI compatibility.
252        if self.layout_compat(caller_abi.layout, callee_abi.layout)? {
253            // Ensure that our checks imply actual ABI compatibility for this concrete call.
254            // (This can fail e.g. if `#[rustc_nonnull_optimization_guaranteed]` is used incorrectly.)
255            assert!(caller_abi.eq_abi(callee_abi));
256            interp_ok(true)
257        } else {
258            trace!(
259                "check_argument_compat: incompatible ABIs:\ncaller: {:?}\ncallee: {:?}",
260                caller_abi, callee_abi
261            );
262            interp_ok(false)
263        }
264    }
265
266    /// Initialize a single callee argument, checking the types for compatibility.
267    fn pass_argument<'x, 'y>(
268        &mut self,
269        caller_args: &mut impl Iterator<
270            Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
271        >,
272        callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
273        callee_arg: &mir::Place<'tcx>,
274        callee_ty: Ty<'tcx>,
275        already_live: bool,
276    ) -> InterpResult<'tcx>
277    where
278        'tcx: 'x,
279        'tcx: 'y,
280    {
281        assert_eq!(callee_ty, callee_abi.layout.ty);
282        if matches!(callee_abi.mode, PassMode::Ignore) {
283            // This one is skipped. Still must be made live though!
284            if !already_live {
285                self.storage_live(callee_arg.as_local().unwrap())?;
286            }
287            return interp_ok(());
288        }
289        // Find next caller arg.
290        let Some((caller_arg, caller_abi)) = caller_args.next() else {
291            throw_ub_custom!(fluent::const_eval_not_enough_caller_args);
292        };
293        assert_eq!(caller_arg.layout().layout, caller_abi.layout.layout);
294        // Sadly we cannot assert that `caller_arg.layout().ty` and `caller_abi.layout.ty` are
295        // equal; in closures the types sometimes differ. We just hope that `caller_abi` is the
296        // right type to print to the user.
297
298        // Check compatibility
299        if !self.check_argument_compat(caller_abi, callee_abi)? {
300            throw_ub!(AbiMismatchArgument {
301                caller_ty: caller_abi.layout.ty,
302                callee_ty: callee_abi.layout.ty
303            });
304        }
305        // We work with a copy of the argument for now; if this is in-place argument passing, we
306        // will later protect the source it comes from. This means the callee cannot observe if we
307        // did in-place of by-copy argument passing, except for pointer equality tests.
308        let caller_arg_copy = self.copy_fn_arg(caller_arg);
309        if !already_live {
310            let local = callee_arg.as_local().unwrap();
311            let meta = caller_arg_copy.meta();
312            // `check_argument_compat` ensures that if metadata is needed, both have the same type,
313            // so we know they will use the metadata the same way.
314            assert!(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty);
315
316            self.storage_live_dyn(local, meta)?;
317        }
318        // Now we can finally actually evaluate the callee place.
319        let callee_arg = self.eval_place(*callee_arg)?;
320        // We allow some transmutes here.
321        // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
322        // is true for all `copy_op`, but there are a lot of special cases for argument passing
323        // specifically.)
324        self.copy_op_allow_transmute(&caller_arg_copy, &callee_arg)?;
325        // If this was an in-place pass, protect the place it comes from for the duration of the call.
326        if let FnArg::InPlace(mplace) = caller_arg {
327            M::protect_in_place_function_argument(self, mplace)?;
328        }
329        interp_ok(())
330    }
331
332    /// The main entry point for creating a new stack frame: performs ABI checks and initializes
333    /// arguments.
334    #[instrument(skip(self), level = "trace")]
335    pub fn init_stack_frame(
336        &mut self,
337        instance: Instance<'tcx>,
338        body: &'tcx mir::Body<'tcx>,
339        caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
340        args: &[FnArg<'tcx, M::Provenance>],
341        with_caller_location: bool,
342        destination: &PlaceTy<'tcx, M::Provenance>,
343        mut stack_pop: StackPopCleanup,
344    ) -> InterpResult<'tcx> {
345        // Compute callee information.
346        // FIXME: for variadic support, do we have to somehow determine callee's extra_args?
347        let callee_fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
348
349        if callee_fn_abi.c_variadic || caller_fn_abi.c_variadic {
350            throw_unsup_format!("calling a c-variadic function is not supported");
351        }
352
353        if caller_fn_abi.conv != callee_fn_abi.conv {
354            throw_ub_custom!(
355                fluent::const_eval_incompatible_calling_conventions,
356                callee_conv = format!("{}", callee_fn_abi.conv),
357                caller_conv = format!("{}", caller_fn_abi.conv),
358            )
359        }
360
361        // Check that all target features required by the callee (i.e., from
362        // the attribute `#[target_feature(enable = ...)]`) are enabled at
363        // compile time.
364        M::check_fn_target_features(self, instance)?;
365
366        if !callee_fn_abi.can_unwind {
367            // The callee cannot unwind, so force the `Unreachable` unwind handling.
368            match &mut stack_pop {
369                StackPopCleanup::Root { .. } => {}
370                StackPopCleanup::Goto { unwind, .. } => {
371                    *unwind = mir::UnwindAction::Unreachable;
372                }
373            }
374        }
375
376        self.push_stack_frame_raw(instance, body, destination, stack_pop)?;
377
378        // If an error is raised here, pop the frame again to get an accurate backtrace.
379        // To this end, we wrap it all in a `try` block.
380        let res: InterpResult<'tcx> = try {
381            trace!(
382                "caller ABI: {:#?}, args: {:#?}",
383                caller_fn_abi,
384                args.iter()
385                    .map(|arg| (
386                        arg.layout().ty,
387                        match arg {
388                            FnArg::Copy(op) => format!("copy({op:?})"),
389                            FnArg::InPlace(mplace) => format!("in-place({mplace:?})"),
390                        }
391                    ))
392                    .collect::<Vec<_>>()
393            );
394            trace!(
395                "spread_arg: {:?}, locals: {:#?}",
396                body.spread_arg,
397                body.args_iter()
398                    .map(|local| (
399                        local,
400                        self.layout_of_local(self.frame(), local, None).unwrap().ty,
401                    ))
402                    .collect::<Vec<_>>()
403            );
404
405            // In principle, we have two iterators: Where the arguments come from, and where
406            // they go to.
407
408            // The "where they come from" part is easy, we expect the caller to do any special handling
409            // that might be required here (e.g. for untupling).
410            // If `with_caller_location` is set we pretend there is an extra argument (that
411            // we will not pass; our `caller_location` intrinsic implementation walks the stack instead).
412            assert_eq!(
413                args.len() + if with_caller_location { 1 } else { 0 },
414                caller_fn_abi.args.len(),
415                "mismatch between caller ABI and caller arguments",
416            );
417            let mut caller_args = args
418                .iter()
419                .zip(caller_fn_abi.args.iter())
420                .filter(|arg_and_abi| !matches!(arg_and_abi.1.mode, PassMode::Ignore));
421
422            // Now we have to spread them out across the callee's locals,
423            // taking into account the `spread_arg`. If we could write
424            // this is a single iterator (that handles `spread_arg`), then
425            // `pass_argument` would be the loop body. It takes care to
426            // not advance `caller_iter` for ignored arguments.
427            let mut callee_args_abis = callee_fn_abi.args.iter();
428            for local in body.args_iter() {
429                // Construct the destination place for this argument. At this point all
430                // locals are still dead, so we cannot construct a `PlaceTy`.
431                let dest = mir::Place::from(local);
432                // `layout_of_local` does more than just the instantiation we need to get the
433                // type, but the result gets cached so this avoids calling the instantiation
434                // query *again* the next time this local is accessed.
435                let ty = self.layout_of_local(self.frame(), local, None)?.ty;
436                if Some(local) == body.spread_arg {
437                    // Make the local live once, then fill in the value field by field.
438                    self.storage_live(local)?;
439                    // Must be a tuple
440                    let ty::Tuple(fields) = ty.kind() else {
441                        span_bug!(self.cur_span(), "non-tuple type for `spread_arg`: {ty}")
442                    };
443                    for (i, field_ty) in fields.iter().enumerate() {
444                        let dest = dest.project_deeper(
445                            &[mir::ProjectionElem::Field(FieldIdx::from_usize(i), field_ty)],
446                            *self.tcx,
447                        );
448                        let callee_abi = callee_args_abis.next().unwrap();
449                        self.pass_argument(
450                            &mut caller_args,
451                            callee_abi,
452                            &dest,
453                            field_ty,
454                            /* already_live */ true,
455                        )?;
456                    }
457                } else {
458                    // Normal argument. Cannot mark it as live yet, it might be unsized!
459                    let callee_abi = callee_args_abis.next().unwrap();
460                    self.pass_argument(
461                        &mut caller_args,
462                        callee_abi,
463                        &dest,
464                        ty,
465                        /* already_live */ false,
466                    )?;
467                }
468            }
469            // If the callee needs a caller location, pretend we consume one more argument from the ABI.
470            if instance.def.requires_caller_location(*self.tcx) {
471                callee_args_abis.next().unwrap();
472            }
473            // Now we should have no more caller args or callee arg ABIs
474            assert!(
475                callee_args_abis.next().is_none(),
476                "mismatch between callee ABI and callee body arguments"
477            );
478            if caller_args.next().is_some() {
479                throw_ub_custom!(fluent::const_eval_too_many_caller_args);
480            }
481            // Don't forget to check the return type!
482            if !self.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? {
483                throw_ub!(AbiMismatchReturn {
484                    caller_ty: caller_fn_abi.ret.layout.ty,
485                    callee_ty: callee_fn_abi.ret.layout.ty
486                });
487            }
488
489            // Protect return place for in-place return value passing.
490            // We only need to protect anything if this is actually an in-memory place.
491            if let Left(mplace) = destination.as_mplace_or_local() {
492                M::protect_in_place_function_argument(self, &mplace)?;
493            }
494
495            // Don't forget to mark "initially live" locals as live.
496            self.storage_live_for_always_live_locals()?;
497        };
498        res.inspect_err_kind(|_| {
499            // Don't show the incomplete stack frame in the error stacktrace.
500            self.stack_mut().pop();
501        })
502    }
503
504    /// Initiate a call to this function -- pushing the stack frame and initializing the arguments.
505    ///
506    /// `caller_fn_abi` is used to determine if all the arguments are passed the proper way.
507    /// However, we also need `caller_abi` to determine if we need to do untupling of arguments.
508    ///
509    /// `with_caller_location` indicates whether the caller passed a caller location. Miri
510    /// implements caller locations without argument passing, but to match `FnAbi` we need to know
511    /// when those arguments are present.
512    pub(super) fn init_fn_call(
513        &mut self,
514        fn_val: FnVal<'tcx, M::ExtraFnVal>,
515        (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
516        args: &[FnArg<'tcx, M::Provenance>],
517        with_caller_location: bool,
518        destination: &PlaceTy<'tcx, M::Provenance>,
519        target: Option<mir::BasicBlock>,
520        unwind: mir::UnwindAction,
521    ) -> InterpResult<'tcx> {
522        trace!("init_fn_call: {:#?}", fn_val);
523
524        let instance = match fn_val {
525            FnVal::Instance(instance) => instance,
526            FnVal::Other(extra) => {
527                return M::call_extra_fn(
528                    self,
529                    extra,
530                    caller_fn_abi,
531                    args,
532                    destination,
533                    target,
534                    unwind,
535                );
536            }
537        };
538
539        match instance.def {
540            ty::InstanceKind::Intrinsic(def_id) => {
541                assert!(self.tcx.intrinsic(def_id).is_some());
542                // FIXME: Should `InPlace` arguments be reset to uninit?
543                if let Some(fallback) = M::call_intrinsic(
544                    self,
545                    instance,
546                    &self.copy_fn_args(args),
547                    destination,
548                    target,
549                    unwind,
550                )? {
551                    assert!(!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden);
552                    assert_matches!(fallback.def, ty::InstanceKind::Item(_));
553                    return self.init_fn_call(
554                        FnVal::Instance(fallback),
555                        (caller_abi, caller_fn_abi),
556                        args,
557                        with_caller_location,
558                        destination,
559                        target,
560                        unwind,
561                    );
562                } else {
563                    interp_ok(())
564                }
565            }
566            ty::InstanceKind::VTableShim(..)
567            | ty::InstanceKind::ReifyShim(..)
568            | ty::InstanceKind::ClosureOnceShim { .. }
569            | ty::InstanceKind::ConstructCoroutineInClosureShim { .. }
570            | ty::InstanceKind::FnPtrShim(..)
571            | ty::InstanceKind::DropGlue(..)
572            | ty::InstanceKind::CloneShim(..)
573            | ty::InstanceKind::FnPtrAddrShim(..)
574            | ty::InstanceKind::ThreadLocalShim(..)
575            | ty::InstanceKind::AsyncDropGlueCtorShim(..)
576            | ty::InstanceKind::AsyncDropGlue(..)
577            | ty::InstanceKind::FutureDropPollShim(..)
578            | ty::InstanceKind::Item(_) => {
579                // We need MIR for this fn.
580                // Note that this can be an intrinsic, if we are executing its fallback body.
581                let Some((body, instance)) = M::find_mir_or_eval_fn(
582                    self,
583                    instance,
584                    caller_fn_abi,
585                    args,
586                    destination,
587                    target,
588                    unwind,
589                )?
590                else {
591                    return interp_ok(());
592                };
593
594                // Special handling for the closure ABI: untuple the last argument.
595                let args: Cow<'_, [FnArg<'tcx, M::Provenance>]> =
596                    if caller_abi == ExternAbi::RustCall && !args.is_empty() {
597                        // Untuple
598                        let (untuple_arg, args) = args.split_last().unwrap();
599                        trace!("init_fn_call: Will pass last argument by untupling");
600                        Cow::from(
601                            args.iter()
602                                .map(|a| interp_ok(a.clone()))
603                                .chain((0..untuple_arg.layout().fields.count()).map(|i| {
604                                    self.fn_arg_field(untuple_arg, FieldIdx::from_usize(i))
605                                }))
606                                .collect::<InterpResult<'_, Vec<_>>>()?,
607                        )
608                    } else {
609                        // Plain arg passing
610                        Cow::from(args)
611                    };
612
613                self.init_stack_frame(
614                    instance,
615                    body,
616                    caller_fn_abi,
617                    &args,
618                    with_caller_location,
619                    destination,
620                    StackPopCleanup::Goto { ret: target, unwind },
621                )
622            }
623            // `InstanceKind::Virtual` does not have callable MIR. Calls to `Virtual` instances must be
624            // codegen'd / interpreted as virtual calls through the vtable.
625            ty::InstanceKind::Virtual(def_id, idx) => {
626                let mut args = args.to_vec();
627                // We have to implement all "dyn-compatible receivers". So we have to go search for a
628                // pointer or `dyn Trait` type, but it could be wrapped in newtypes. So recursively
629                // unwrap those newtypes until we are there.
630                // An `InPlace` does nothing here, we keep the original receiver intact. We can't
631                // really pass the argument in-place anyway, and we are constructing a new
632                // `Immediate` receiver.
633                let mut receiver = self.copy_fn_arg(&args[0]);
634                let receiver_place = loop {
635                    match receiver.layout.ty.kind() {
636                        ty::Ref(..) | ty::RawPtr(..) => {
637                            // We do *not* use `deref_pointer` here: we don't want to conceptually
638                            // create a place that must be dereferenceable, since the receiver might
639                            // be a raw pointer and (for `*const dyn Trait`) we don't need to
640                            // actually access memory to resolve this method.
641                            // Also see <https://github.com/rust-lang/miri/issues/2786>.
642                            let val = self.read_immediate(&receiver)?;
643                            break self.ref_to_mplace(&val)?;
644                        }
645                        ty::Dynamic(.., ty::Dyn) => break receiver.assert_mem_place(), // no immediate unsized values
646                        ty::Dynamic(.., ty::DynStar) => {
647                            // Not clear how to handle this, so far we assume the receiver is always a pointer.
648                            span_bug!(
649                                self.cur_span(),
650                                "by-value calls on a `dyn*`... are those a thing?"
651                            );
652                        }
653                        _ => {
654                            // Not there yet, search for the only non-ZST field.
655                            // (The rules for `DispatchFromDyn` ensure there's exactly one such field.)
656                            let (idx, _) = receiver.layout.non_1zst_field(self).expect(
657                                "not exactly one non-1-ZST field in a `DispatchFromDyn` type",
658                            );
659                            receiver = self.project_field(&receiver, idx)?;
660                        }
661                    }
662                };
663
664                // Obtain the underlying trait we are working on, and the adjusted receiver argument.
665                let (trait_, dyn_ty, adjusted_recv) = if let ty::Dynamic(data, _, ty::DynStar) =
666                    receiver_place.layout.ty.kind()
667                {
668                    let recv = self.unpack_dyn_star(&receiver_place, data)?;
669
670                    (data.principal(), recv.layout.ty, recv.ptr())
671                } else {
672                    // Doesn't have to be a `dyn Trait`, but the unsized tail must be `dyn Trait`.
673                    // (For that reason we also cannot use `unpack_dyn_trait`.)
674                    let receiver_tail =
675                        self.tcx.struct_tail_for_codegen(receiver_place.layout.ty, self.typing_env);
676                    let ty::Dynamic(receiver_trait, _, ty::Dyn) = receiver_tail.kind() else {
677                        span_bug!(
678                            self.cur_span(),
679                            "dynamic call on non-`dyn` type {}",
680                            receiver_tail
681                        )
682                    };
683                    assert!(receiver_place.layout.is_unsized());
684
685                    // Get the required information from the vtable.
686                    let vptr = receiver_place.meta().unwrap_meta().to_pointer(self)?;
687                    let dyn_ty = self.get_ptr_vtable_ty(vptr, Some(receiver_trait))?;
688
689                    // It might be surprising that we use a pointer as the receiver even if this
690                    // is a by-val case; this works because by-val passing of an unsized `dyn
691                    // Trait` to a function is actually desugared to a pointer.
692                    (receiver_trait.principal(), dyn_ty, receiver_place.ptr())
693                };
694
695                // Now determine the actual method to call. Usually we use the easy way of just
696                // looking up the method at index `idx`.
697                let vtable_entries = self.vtable_entries(trait_, dyn_ty);
698                let Some(ty::VtblEntry::Method(fn_inst)) = vtable_entries.get(idx).copied() else {
699                    // FIXME(fee1-dead) these could be variants of the UB info enum instead of this
700                    throw_ub_custom!(fluent::const_eval_dyn_call_not_a_method);
701                };
702                trace!("Virtual call dispatches to {fn_inst:#?}");
703                // We can also do the lookup based on `def_id` and `dyn_ty`, and check that that
704                // produces the same result.
705                self.assert_virtual_instance_matches_concrete(dyn_ty, def_id, instance, fn_inst);
706
707                // Adjust receiver argument. Layout can be any (thin) ptr.
708                let receiver_ty = Ty::new_mut_ptr(self.tcx.tcx, dyn_ty);
709                args[0] = FnArg::Copy(
710                    ImmTy::from_immediate(
711                        Scalar::from_maybe_pointer(adjusted_recv, self).into(),
712                        self.layout_of(receiver_ty)?,
713                    )
714                    .into(),
715                );
716                trace!("Patched receiver operand to {:#?}", args[0]);
717                // Need to also adjust the type in the ABI. Strangely, the layout there is actually
718                // already fine! Just the type is bogus. This is due to what `force_thin_self_ptr`
719                // does in `fn_abi_new_uncached`; supposedly, codegen relies on having the bogus
720                // type, so we just patch this up locally.
721                let mut caller_fn_abi = caller_fn_abi.clone();
722                caller_fn_abi.args[0].layout.ty = receiver_ty;
723
724                // recurse with concrete function
725                self.init_fn_call(
726                    FnVal::Instance(fn_inst),
727                    (caller_abi, &caller_fn_abi),
728                    &args,
729                    with_caller_location,
730                    destination,
731                    target,
732                    unwind,
733                )
734            }
735        }
736    }
737
738    fn assert_virtual_instance_matches_concrete(
739        &self,
740        dyn_ty: Ty<'tcx>,
741        def_id: DefId,
742        virtual_instance: ty::Instance<'tcx>,
743        concrete_instance: ty::Instance<'tcx>,
744    ) {
745        let tcx = *self.tcx;
746
747        let trait_def_id = tcx.trait_of_item(def_id).unwrap();
748        let virtual_trait_ref = ty::TraitRef::from_method(tcx, trait_def_id, virtual_instance.args);
749        let existential_trait_ref = ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref);
750        let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty);
751
752        let concrete_method = Instance::expect_resolve_for_vtable(
753            tcx,
754            self.typing_env,
755            def_id,
756            virtual_instance.args.rebase_onto(tcx, trait_def_id, concrete_trait_ref.args),
757            self.cur_span(),
758        );
759        assert_eq!(concrete_instance, concrete_method);
760    }
761
762    /// Initiate a tail call to this function -- popping the current stack frame, pushing the new
763    /// stack frame and initializing the arguments.
764    pub(super) fn init_fn_tail_call(
765        &mut self,
766        fn_val: FnVal<'tcx, M::ExtraFnVal>,
767        (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
768        args: &[FnArg<'tcx, M::Provenance>],
769        with_caller_location: bool,
770    ) -> InterpResult<'tcx> {
771        trace!("init_fn_tail_call: {:#?}", fn_val);
772
773        // This is the "canonical" implementation of tails calls,
774        // a pop of the current stack frame, followed by a normal call
775        // which pushes a new stack frame, with the return address from
776        // the popped stack frame.
777        //
778        // Note that we are using `pop_stack_frame_raw` and not `return_from_current_stack_frame`,
779        // as the latter "executes" the goto to the return block, but we don't want to,
780        // only the tail called function should return to the current return block.
781        let StackPopInfo { return_action, return_to_block, return_place } = self
782            .pop_stack_frame_raw(false, |_this, _return_place| {
783                // This function's return value is just discarded, the tail-callee will fill in the return place instead.
784                interp_ok(())
785            })?;
786
787        assert_eq!(return_action, ReturnAction::Normal);
788
789        // Take the "stack pop cleanup" info, and use that to initiate the next call.
790        let StackPopCleanup::Goto { ret, unwind } = return_to_block else {
791            bug!("can't tailcall as root");
792        };
793
794        // FIXME(explicit_tail_calls):
795        //   we should check if both caller&callee can/n't unwind,
796        //   see <https://github.com/rust-lang/rust/pull/113128#issuecomment-1614979803>
797
798        self.init_fn_call(
799            fn_val,
800            (caller_abi, caller_fn_abi),
801            args,
802            with_caller_location,
803            &return_place,
804            ret,
805            unwind,
806        )
807    }
808
809    pub(super) fn init_drop_in_place_call(
810        &mut self,
811        place: &PlaceTy<'tcx, M::Provenance>,
812        instance: ty::Instance<'tcx>,
813        target: mir::BasicBlock,
814        unwind: mir::UnwindAction,
815    ) -> InterpResult<'tcx> {
816        trace!("init_drop_in_place_call: {:?},\n  instance={:?}", place, instance);
817        // We take the address of the object. This may well be unaligned, which is fine
818        // for us here. However, unaligned accesses will probably make the actual drop
819        // implementation fail -- a problem shared by rustc.
820        let place = self.force_allocation(place)?;
821
822        // We behave a bit different from codegen here.
823        // Codegen creates an `InstanceKind::Virtual` with index 0 (the slot of the drop method) and
824        // then dispatches that to the normal call machinery. However, our call machinery currently
825        // only supports calling `VtblEntry::Method`; it would choke on a `MetadataDropInPlace`. So
826        // instead we do the virtual call stuff ourselves. It's easier here than in `eval_fn_call`
827        // since we can just get a place of the underlying type and use `mplace_to_ref`.
828        let place = match place.layout.ty.kind() {
829            ty::Dynamic(data, _, ty::Dyn) => {
830                // Dropping a trait object. Need to find actual drop fn.
831                self.unpack_dyn_trait(&place, data)?
832            }
833            ty::Dynamic(data, _, ty::DynStar) => {
834                // Dropping a `dyn*`. Need to find actual drop fn.
835                self.unpack_dyn_star(&place, data)?
836            }
837            _ => {
838                debug_assert_eq!(
839                    instance,
840                    ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
841                );
842                place
843            }
844        };
845        let instance = ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty);
846        let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
847
848        let arg = self.mplace_to_ref(&place)?;
849        let ret = MPlaceTy::fake_alloc_zst(self.layout_of(self.tcx.types.unit)?);
850
851        self.init_fn_call(
852            FnVal::Instance(instance),
853            (ExternAbi::Rust, fn_abi),
854            &[FnArg::Copy(arg.into())],
855            false,
856            &ret.into(),
857            Some(target),
858            unwind,
859        )
860    }
861
862    /// Pops the current frame from the stack, copies the return value to the caller, deallocates
863    /// the memory for allocated locals, and jumps to an appropriate place.
864    ///
865    /// If `unwinding` is `false`, then we are performing a normal return
866    /// from a function. In this case, we jump back into the frame of the caller,
867    /// and continue execution as normal.
868    ///
869    /// If `unwinding` is `true`, then we are in the middle of a panic,
870    /// and need to unwind this frame. In this case, we jump to the
871    /// `cleanup` block for the function, which is responsible for running
872    /// `Drop` impls for any locals that have been initialized at this point.
873    /// The cleanup block ends with a special `Resume` terminator, which will
874    /// cause us to continue unwinding.
875    #[instrument(skip(self), level = "trace")]
876    pub(super) fn return_from_current_stack_frame(
877        &mut self,
878        unwinding: bool,
879    ) -> InterpResult<'tcx> {
880        info!(
881            "popping stack frame ({})",
882            if unwinding { "during unwinding" } else { "returning from function" }
883        );
884
885        // Check `unwinding`.
886        assert_eq!(
887            unwinding,
888            match self.frame().loc {
889                Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
890                Right(_) => true,
891            }
892        );
893        if unwinding && self.frame_idx() == 0 {
894            throw_ub_custom!(fluent::const_eval_unwind_past_top);
895        }
896
897        // Get out the return value. Must happen *before* the frame is popped as we have to get the
898        // local's value out.
899        let return_op =
900            self.local_to_op(mir::RETURN_PLACE, None).expect("return place should always be live");
901        // Do the actual pop + copy.
902        let stack_pop_info = self.pop_stack_frame_raw(unwinding, |this, return_place| {
903            this.copy_op_allow_transmute(&return_op, return_place)?;
904            trace!("return value: {:?}", this.dump_place(return_place));
905            interp_ok(())
906        })?;
907
908        match stack_pop_info.return_action {
909            ReturnAction::Normal => {}
910            ReturnAction::NoJump => {
911                // The hook already did everything.
912                return interp_ok(());
913            }
914            ReturnAction::NoCleanup => {
915                // If we are not doing cleanup, also skip everything else.
916                assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
917                assert!(!unwinding, "tried to skip cleanup during unwinding");
918                // Don't jump anywhere.
919                return interp_ok(());
920            }
921        }
922
923        // Normal return, figure out where to jump.
924        if unwinding {
925            // Follow the unwind edge.
926            match stack_pop_info.return_to_block {
927                StackPopCleanup::Goto { unwind, .. } => {
928                    // This must be the very last thing that happens, since it can in fact push a new stack frame.
929                    self.unwind_to_block(unwind)
930                }
931                StackPopCleanup::Root { .. } => {
932                    panic!("encountered StackPopCleanup::Root when unwinding!")
933                }
934            }
935        } else {
936            // Follow the normal return edge.
937            match stack_pop_info.return_to_block {
938                StackPopCleanup::Goto { ret, .. } => self.return_to_block(ret),
939                StackPopCleanup::Root { .. } => {
940                    assert!(
941                        self.stack().is_empty(),
942                        "only the bottommost frame can have StackPopCleanup::Root"
943                    );
944                    interp_ok(())
945                }
946            }
947        }
948    }
949}