rustc_const_eval/interpret/
machine.rs

1//! This module contains everything needed to instantiate an interpreter.
2//! This separation exists to ensure that no fancy miri features like
3//! interpreting common C functions leak into CTFE.
4
5use std::borrow::{Borrow, Cow};
6use std::fmt::Debug;
7use std::hash::Hash;
8
9use rustc_abi::{Align, Size};
10use rustc_apfloat::{Float, FloatConvert};
11use rustc_middle::query::TyCtxtAt;
12use rustc_middle::ty::Ty;
13use rustc_middle::ty::layout::TyAndLayout;
14use rustc_middle::{mir, ty};
15use rustc_span::Span;
16use rustc_span::def_id::DefId;
17use rustc_target::callconv::FnAbi;
18
19use super::{
20    AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation,
21    CtfeProvenance, EnteredTraceSpan, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy,
22    MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, RangeSet, interp_ok, throw_unsup,
23};
24
25/// Data returned by [`Machine::after_stack_pop`], and consumed by
26/// [`InterpCx::return_from_current_stack_frame`] to determine what actions should be done when
27/// returning from a stack frame.
28#[derive(Eq, PartialEq, Debug, Copy, Clone)]
29pub enum ReturnAction {
30    /// Indicates that no special handling should be
31    /// done - we'll either return normally or unwind
32    /// based on the terminator for the function
33    /// we're leaving.
34    Normal,
35
36    /// Indicates that we should *not* jump to the return/unwind address, as the callback already
37    /// took care of everything.
38    NoJump,
39
40    /// Returned by [`InterpCx::pop_stack_frame_raw`] when no cleanup should be done.
41    NoCleanup,
42}
43
44/// Whether this kind of memory is allowed to leak
45pub trait MayLeak: Copy {
46    fn may_leak(self) -> bool;
47}
48
49/// The functionality needed by memory to manage its allocations
50pub trait AllocMap<K: Hash + Eq, V> {
51    /// Tests if the map contains the given key.
52    /// Deliberately takes `&mut` because that is sufficient, and some implementations
53    /// can be more efficient then (using `RefCell::get_mut`).
54    fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
55    where
56        K: Borrow<Q>;
57
58    /// Callers should prefer [`AllocMap::contains_key`] when it is possible to call because it may
59    /// be more efficient. This function exists for callers that only have a shared reference
60    /// (which might make it slightly less efficient than `contains_key`, e.g. if
61    /// the data is stored inside a `RefCell`).
62    fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool
63    where
64        K: Borrow<Q>;
65
66    /// Inserts a new entry into the map.
67    fn insert(&mut self, k: K, v: V) -> Option<V>;
68
69    /// Removes an entry from the map.
70    fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
71    where
72        K: Borrow<Q>;
73
74    /// Returns data based on the keys and values in the map.
75    fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
76
77    /// Returns a reference to entry `k`. If no such entry exists, call
78    /// `vacant` and either forward its error, or add its result to the map
79    /// and return a reference to *that*.
80    fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E>;
81
82    /// Returns a mutable reference to entry `k`. If no such entry exists, call
83    /// `vacant` and either forward its error, or add its result to the map
84    /// and return a reference to *that*.
85    fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E>;
86
87    /// Read-only lookup.
88    fn get(&self, k: K) -> Option<&V> {
89        self.get_or(k, || Err(())).ok()
90    }
91
92    /// Mutable lookup.
93    fn get_mut(&mut self, k: K) -> Option<&mut V> {
94        self.get_mut_or(k, || Err(())).ok()
95    }
96}
97
98/// Methods of this trait signifies a point where CTFE evaluation would fail
99/// and some use case dependent behaviour can instead be applied.
100pub trait Machine<'tcx>: Sized {
101    /// Additional memory kinds a machine wishes to distinguish from the builtin ones
102    type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
103
104    /// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
105    type Provenance: Provenance + Eq + Hash + 'static;
106
107    /// When getting the AllocId of a pointer, some extra data is also obtained from the provenance
108    /// that is passed to memory access hooks so they can do things with it.
109    type ProvenanceExtra: Copy + 'static;
110
111    /// Machines can define extra (non-instance) things that represent values of function pointers.
112    /// For example, Miri uses this to return a function pointer from `dlsym`
113    /// that can later be called to execute the right thing.
114    type ExtraFnVal: Debug + Copy;
115
116    /// Extra data stored in every call frame.
117    type FrameExtra;
118
119    /// Extra data stored in every allocation.
120    type AllocExtra: Debug + Clone + 'tcx;
121
122    /// Type for the bytes of the allocation.
123    type Bytes: AllocBytes + 'static;
124
125    /// Memory's allocation map
126    type MemoryMap: AllocMap<
127            AllocId,
128            (
129                MemoryKind<Self::MemoryKind>,
130                Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>,
131            ),
132        > + Default
133        + Clone;
134
135    /// The memory kind to use for copied global memory (held in `tcx`) --
136    /// or None if such memory should not be mutated and thus any such attempt will cause
137    /// a `ModifiedStatic` error to be raised.
138    /// Statics are copied under two circumstances: When they are mutated, and when
139    /// `adjust_allocation` (see below) returns an owned allocation
140    /// that is added to the memory so that the work is not done twice.
141    const GLOBAL_KIND: Option<Self::MemoryKind>;
142
143    /// Should the machine panic on allocation failures?
144    const PANIC_ON_ALLOC_FAIL: bool;
145
146    /// Determines whether `eval_mir_constant` can never fail because all required consts have
147    /// already been checked before.
148    const ALL_CONSTS_ARE_PRECHECKED: bool = true;
149
150    /// Whether memory accesses should be alignment-checked.
151    fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool;
152
153    /// Gives the machine a chance to detect more misalignment than the built-in checks would catch.
154    #[inline(always)]
155    fn alignment_check(
156        _ecx: &InterpCx<'tcx, Self>,
157        _alloc_id: AllocId,
158        _alloc_align: Align,
159        _alloc_kind: AllocKind,
160        _offset: Size,
161        _align: Align,
162    ) -> Option<Misalignment> {
163        None
164    }
165
166    /// Whether to enforce the validity invariant for a specific layout.
167    fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool;
168    /// Whether to enforce the validity invariant *recursively*.
169    fn enforce_validity_recursively(
170        _ecx: &InterpCx<'tcx, Self>,
171        _layout: TyAndLayout<'tcx>,
172    ) -> bool {
173        false
174    }
175
176    /// Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually
177    /// check for overflow.
178    fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool;
179
180    /// Entry point for obtaining the MIR of anything that should get evaluated.
181    /// So not just functions and shims, but also const/static initializers, anonymous
182    /// constants, ...
183    fn load_mir(
184        ecx: &InterpCx<'tcx, Self>,
185        instance: ty::InstanceKind<'tcx>,
186    ) -> &'tcx mir::Body<'tcx> {
187        ecx.tcx.instance_mir(instance)
188    }
189
190    /// Entry point to all function calls.
191    ///
192    /// Returns either the mir to use for the call, or `None` if execution should
193    /// just proceed (which usually means this hook did all the work that the
194    /// called function should usually have done). In the latter case, it is
195    /// this hook's responsibility to advance the instruction pointer!
196    /// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
197    /// nor just jump to `ret`, but instead push their own stack frame.)
198    /// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
199    /// was used.
200    fn find_mir_or_eval_fn(
201        ecx: &mut InterpCx<'tcx, Self>,
202        instance: ty::Instance<'tcx>,
203        abi: &FnAbi<'tcx, Ty<'tcx>>,
204        args: &[FnArg<'tcx, Self::Provenance>],
205        destination: &PlaceTy<'tcx, Self::Provenance>,
206        target: Option<mir::BasicBlock>,
207        unwind: mir::UnwindAction,
208    ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>>;
209
210    /// Execute `fn_val`. It is the hook's responsibility to advance the instruction
211    /// pointer as appropriate.
212    fn call_extra_fn(
213        ecx: &mut InterpCx<'tcx, Self>,
214        fn_val: Self::ExtraFnVal,
215        abi: &FnAbi<'tcx, Ty<'tcx>>,
216        args: &[FnArg<'tcx, Self::Provenance>],
217        destination: &PlaceTy<'tcx, Self::Provenance>,
218        target: Option<mir::BasicBlock>,
219        unwind: mir::UnwindAction,
220    ) -> InterpResult<'tcx>;
221
222    /// Directly process an intrinsic without pushing a stack frame. It is the hook's
223    /// responsibility to advance the instruction pointer as appropriate.
224    ///
225    /// Returns `None` if the intrinsic was fully handled.
226    /// Otherwise, returns an `Instance` of the function that implements the intrinsic.
227    fn call_intrinsic(
228        ecx: &mut InterpCx<'tcx, Self>,
229        instance: ty::Instance<'tcx>,
230        args: &[OpTy<'tcx, Self::Provenance>],
231        destination: &PlaceTy<'tcx, Self::Provenance>,
232        target: Option<mir::BasicBlock>,
233        unwind: mir::UnwindAction,
234    ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>>;
235
236    /// Check whether the given function may be executed on the current machine, in terms of the
237    /// target features is requires.
238    fn check_fn_target_features(
239        _ecx: &InterpCx<'tcx, Self>,
240        _instance: ty::Instance<'tcx>,
241    ) -> InterpResult<'tcx>;
242
243    /// Called to evaluate `Assert` MIR terminators that trigger a panic.
244    fn assert_panic(
245        ecx: &mut InterpCx<'tcx, Self>,
246        msg: &mir::AssertMessage<'tcx>,
247        unwind: mir::UnwindAction,
248    ) -> InterpResult<'tcx>;
249
250    /// Called to trigger a non-unwinding panic.
251    fn panic_nounwind(_ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx>;
252
253    /// Called when unwinding reached a state where execution should be terminated.
254    fn unwind_terminate(
255        ecx: &mut InterpCx<'tcx, Self>,
256        reason: mir::UnwindTerminateReason,
257    ) -> InterpResult<'tcx>;
258
259    /// Called for all binary operations where the LHS has pointer type.
260    ///
261    /// Returns a (value, overflowed) pair if the operation succeeded
262    fn binary_ptr_op(
263        ecx: &InterpCx<'tcx, Self>,
264        bin_op: mir::BinOp,
265        left: &ImmTy<'tcx, Self::Provenance>,
266        right: &ImmTy<'tcx, Self::Provenance>,
267    ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>;
268
269    /// Generate the NaN returned by a float operation, given the list of inputs.
270    /// (This is all inputs, not just NaN inputs!)
271    fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
272        _ecx: &InterpCx<'tcx, Self>,
273        _inputs: &[F1],
274    ) -> F2 {
275        // By default we always return the preferred NaN.
276        F2::NAN
277    }
278
279    /// Apply non-determinism to float operations that do not return a precise result.
280    fn apply_float_nondet(
281        _ecx: &mut InterpCx<'tcx, Self>,
282        val: ImmTy<'tcx, Self::Provenance>,
283    ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
284        interp_ok(val)
285    }
286
287    /// Determines the result of `min`/`max` on floats when the arguments are equal.
288    fn equal_float_min_max<F: Float>(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F {
289        // By default, we pick the left argument.
290        a
291    }
292
293    /// Called before a basic block terminator is executed.
294    #[inline]
295    fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
296        interp_ok(())
297    }
298
299    /// Determines the result of a `NullaryOp::UbChecks` invocation.
300    fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>;
301
302    /// Determines the result of a `NullaryOp::ContractChecks` invocation.
303    fn contract_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>;
304
305    /// Called when the interpreter encounters a `StatementKind::ConstEvalCounter` instruction.
306    /// You can use this to detect long or endlessly running programs.
307    #[inline]
308    fn increment_const_eval_counter(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
309        interp_ok(())
310    }
311
312    /// Called before a global allocation is accessed.
313    /// `def_id` is `Some` if this is the "lazy" allocation of a static.
314    #[inline]
315    fn before_access_global(
316        _tcx: TyCtxtAt<'tcx>,
317        _machine: &Self,
318        _alloc_id: AllocId,
319        _allocation: ConstAllocation<'tcx>,
320        _static_def_id: Option<DefId>,
321        _is_write: bool,
322    ) -> InterpResult<'tcx> {
323        interp_ok(())
324    }
325
326    /// Return the `AllocId` for the given thread-local static in the current thread.
327    fn thread_local_static_pointer(
328        _ecx: &mut InterpCx<'tcx, Self>,
329        def_id: DefId,
330    ) -> InterpResult<'tcx, Pointer<Self::Provenance>> {
331        throw_unsup!(ThreadLocalStatic(def_id))
332    }
333
334    /// Return the `AllocId` for the given `extern static`.
335    fn extern_static_pointer(
336        ecx: &InterpCx<'tcx, Self>,
337        def_id: DefId,
338    ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
339
340    /// "Int-to-pointer cast"
341    fn ptr_from_addr_cast(
342        ecx: &InterpCx<'tcx, Self>,
343        addr: u64,
344    ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
345
346    /// Marks a pointer as exposed, allowing its provenance
347    /// to be recovered. "Pointer-to-int cast"
348    fn expose_provenance(
349        ecx: &InterpCx<'tcx, Self>,
350        provenance: Self::Provenance,
351    ) -> InterpResult<'tcx>;
352
353    /// Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
354    /// `size` says how many bytes of memory are expected at that pointer. The *sign* of `size` can
355    /// be used to disambiguate situations where a wildcard pointer sits right in between two
356    /// allocations.
357    ///
358    /// If `ptr.provenance.get_alloc_id()` is `Some(p)`, the returned `AllocId` must be `p`.
359    /// The resulting `AllocId` will just be used for that one step and the forgotten again
360    /// (i.e., we'll never turn the data returned here back into a `Pointer` that might be
361    /// stored in machine state).
362    ///
363    /// When this fails, that means the pointer does not point to a live allocation.
364    fn ptr_get_alloc(
365        ecx: &InterpCx<'tcx, Self>,
366        ptr: Pointer<Self::Provenance>,
367        size: i64,
368    ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
369
370    /// Return a "root" pointer for the given allocation: the one that is used for direct
371    /// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
372    ///
373    /// Not called on `extern` or thread-local statics (those use the methods above).
374    ///
375    /// `kind` is the kind of the allocation the pointer points to; it can be `None` when
376    /// it's a global and `GLOBAL_KIND` is `None`.
377    fn adjust_alloc_root_pointer(
378        ecx: &InterpCx<'tcx, Self>,
379        ptr: Pointer,
380        kind: Option<MemoryKind<Self::MemoryKind>>,
381    ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
382
383    /// Called to adjust global allocations to the Provenance and AllocExtra of this machine.
384    ///
385    /// If `alloc` contains pointers, then they are all pointing to globals.
386    ///
387    /// This should avoid copying if no work has to be done! If this returns an owned
388    /// allocation (because a copy had to be done to adjust things), machine memory will
389    /// cache the result. (This relies on `AllocMap::get_or` being able to add the
390    /// owned allocation to the map even when the map is shared.)
391    fn adjust_global_allocation<'b>(
392        ecx: &InterpCx<'tcx, Self>,
393        id: AllocId,
394        alloc: &'b Allocation,
395    ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
396
397    /// Initialize the extra state of an allocation local to this machine.
398    ///
399    /// This is guaranteed to be called exactly once on all allocations local to this machine.
400    /// It will not be called automatically for global allocations; `adjust_global_allocation`
401    /// has to do that itself if that is desired.
402    fn init_local_allocation(
403        ecx: &InterpCx<'tcx, Self>,
404        id: AllocId,
405        kind: MemoryKind<Self::MemoryKind>,
406        size: Size,
407        align: Align,
408    ) -> InterpResult<'tcx, Self::AllocExtra>;
409
410    /// Hook for performing extra checks on a memory read access.
411    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
412    /// `range`.
413    ///
414    /// This will *not* be called during validation!
415    ///
416    /// Takes read-only access to the allocation so we can keep all the memory read
417    /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
418    /// need to mutate.
419    ///
420    /// This is not invoked for ZST accesses, as no read actually happens.
421    #[inline(always)]
422    fn before_memory_read(
423        _tcx: TyCtxtAt<'tcx>,
424        _machine: &Self,
425        _alloc_extra: &Self::AllocExtra,
426        _ptr: Pointer<Option<Self::Provenance>>,
427        _prov: (AllocId, Self::ProvenanceExtra),
428        _range: AllocRange,
429    ) -> InterpResult<'tcx> {
430        interp_ok(())
431    }
432
433    /// Hook for performing extra checks on any memory read access,
434    /// that involves an allocation, even ZST reads.
435    ///
436    /// This will *not* be called during validation!
437    ///
438    /// Used to prevent statics from self-initializing by reading from their own memory
439    /// as it is being initialized.
440    fn before_alloc_access(
441        _tcx: TyCtxtAt<'tcx>,
442        _machine: &Self,
443        _alloc_id: AllocId,
444    ) -> InterpResult<'tcx> {
445        interp_ok(())
446    }
447
448    /// Hook for performing extra checks on a memory write access.
449    /// This is not invoked for ZST accesses, as no write actually happens.
450    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
451    /// `range`.
452    #[inline(always)]
453    fn before_memory_write(
454        _tcx: TyCtxtAt<'tcx>,
455        _machine: &mut Self,
456        _alloc_extra: &mut Self::AllocExtra,
457        _ptr: Pointer<Option<Self::Provenance>>,
458        _prov: (AllocId, Self::ProvenanceExtra),
459        _range: AllocRange,
460    ) -> InterpResult<'tcx> {
461        interp_ok(())
462    }
463
464    /// Hook for performing extra operations on a memory deallocation.
465    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
466    /// the allocation.
467    #[inline(always)]
468    fn before_memory_deallocation(
469        _tcx: TyCtxtAt<'tcx>,
470        _machine: &mut Self,
471        _alloc_extra: &mut Self::AllocExtra,
472        _ptr: Pointer<Option<Self::Provenance>>,
473        _prov: (AllocId, Self::ProvenanceExtra),
474        _size: Size,
475        _align: Align,
476        _kind: MemoryKind<Self::MemoryKind>,
477    ) -> InterpResult<'tcx> {
478        interp_ok(())
479    }
480
481    /// Executes a retagging operation for a single pointer.
482    /// Returns the possibly adjusted pointer.
483    #[inline]
484    fn retag_ptr_value(
485        _ecx: &mut InterpCx<'tcx, Self>,
486        _kind: mir::RetagKind,
487        val: &ImmTy<'tcx, Self::Provenance>,
488    ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
489        interp_ok(val.clone())
490    }
491
492    /// Executes a retagging operation on a compound value.
493    /// Replaces all pointers stored in the given place.
494    #[inline]
495    fn retag_place_contents(
496        _ecx: &mut InterpCx<'tcx, Self>,
497        _kind: mir::RetagKind,
498        _place: &PlaceTy<'tcx, Self::Provenance>,
499    ) -> InterpResult<'tcx> {
500        interp_ok(())
501    }
502
503    /// Called on places used for in-place function argument and return value handling.
504    ///
505    /// These places need to be protected to make sure the program cannot tell whether the
506    /// argument/return value was actually copied or passed in-place..
507    fn protect_in_place_function_argument(
508        ecx: &mut InterpCx<'tcx, Self>,
509        mplace: &MPlaceTy<'tcx, Self::Provenance>,
510    ) -> InterpResult<'tcx> {
511        // Without an aliasing model, all we can do is put `Uninit` into the place.
512        // Conveniently this also ensures that the place actually points to suitable memory.
513        ecx.write_uninit(mplace)
514    }
515
516    /// Called immediately before a new stack frame gets pushed.
517    fn init_frame(
518        ecx: &mut InterpCx<'tcx, Self>,
519        frame: Frame<'tcx, Self::Provenance>,
520    ) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
521
522    /// Borrow the current thread's stack.
523    fn stack<'a>(
524        ecx: &'a InterpCx<'tcx, Self>,
525    ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>];
526
527    /// Mutably borrow the current thread's stack.
528    fn stack_mut<'a>(
529        ecx: &'a mut InterpCx<'tcx, Self>,
530    ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
531
532    /// Called immediately after a stack frame got pushed and its locals got initialized.
533    fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
534        interp_ok(())
535    }
536
537    /// Called just before the frame is removed from the stack (followed by return value copy and
538    /// local cleanup).
539    fn before_stack_pop(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
540        interp_ok(())
541    }
542
543    /// Called immediately after a stack frame got popped, but before jumping back to the caller.
544    /// The `locals` have already been destroyed!
545    #[inline(always)]
546    fn after_stack_pop(
547        _ecx: &mut InterpCx<'tcx, Self>,
548        _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
549        unwinding: bool,
550    ) -> InterpResult<'tcx, ReturnAction> {
551        // By default, we do not support unwinding from panics
552        assert!(!unwinding);
553        interp_ok(ReturnAction::Normal)
554    }
555
556    /// Called immediately after an "immediate" local variable is read in a given frame
557    /// (i.e., this is called for reads that do not end up accessing addressable memory).
558    #[inline(always)]
559    fn after_local_read(
560        _ecx: &InterpCx<'tcx, Self>,
561        _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
562        _local: mir::Local,
563    ) -> InterpResult<'tcx> {
564        interp_ok(())
565    }
566
567    /// Called immediately after an "immediate" local variable is assigned a new value
568    /// (i.e., this is called for writes that do not end up in memory).
569    /// `storage_live` indicates whether this is the initial write upon `StorageLive`.
570    #[inline(always)]
571    fn after_local_write(
572        _ecx: &mut InterpCx<'tcx, Self>,
573        _local: mir::Local,
574        _storage_live: bool,
575    ) -> InterpResult<'tcx> {
576        interp_ok(())
577    }
578
579    /// Called immediately after actual memory was allocated for a local
580    /// but before the local's stack frame is updated to point to that memory.
581    #[inline(always)]
582    fn after_local_moved_to_memory(
583        _ecx: &mut InterpCx<'tcx, Self>,
584        _local: mir::Local,
585        _mplace: &MPlaceTy<'tcx, Self::Provenance>,
586    ) -> InterpResult<'tcx> {
587        interp_ok(())
588    }
589
590    /// Evaluate the given constant. The `eval` function will do all the required evaluation,
591    /// but this hook has the chance to do some pre/postprocessing.
592    #[inline(always)]
593    fn eval_mir_constant<F>(
594        ecx: &InterpCx<'tcx, Self>,
595        val: mir::Const<'tcx>,
596        span: Span,
597        layout: Option<TyAndLayout<'tcx>>,
598        eval: F,
599    ) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>
600    where
601        F: Fn(
602            &InterpCx<'tcx, Self>,
603            mir::Const<'tcx>,
604            Span,
605            Option<TyAndLayout<'tcx>>,
606        ) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>,
607    {
608        eval(ecx, val, span, layout)
609    }
610
611    /// Returns the salt to be used for a deduplicated global alloation.
612    /// If the allocation is for a function, the instance is provided as well
613    /// (this lets Miri ensure unique addresses for some functions).
614    fn get_global_alloc_salt(
615        ecx: &InterpCx<'tcx, Self>,
616        instance: Option<ty::Instance<'tcx>>,
617    ) -> usize;
618
619    fn cached_union_data_range<'e>(
620        _ecx: &'e mut InterpCx<'tcx, Self>,
621        _ty: Ty<'tcx>,
622        compute_range: impl FnOnce() -> RangeSet,
623    ) -> Cow<'e, RangeSet> {
624        // Default to no caching.
625        Cow::Owned(compute_range())
626    }
627
628    /// Compute the value passed to the constructors of the `AllocBytes` type for
629    /// abstract machine allocations.
630    fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams;
631
632    /// Allows enabling/disabling tracing calls from within `rustc_const_eval` at compile time, by
633    /// delegating the entering of [tracing::Span]s to implementors of the [Machine] trait. The
634    /// default implementation corresponds to tracing being disabled, meaning the tracing calls will
635    /// supposedly be optimized out completely. To enable tracing, override this trait method and
636    /// return `span.entered()`. Also see [crate::enter_trace_span].
637    #[must_use]
638    #[inline(always)]
639    fn enter_trace_span(_span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
640        ()
641    }
642}
643
644/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
645/// (CTFE and ConstProp) use the same instance. Here, we share that code.
646pub macro compile_time_machine(<$tcx: lifetime>) {
647    type Provenance = CtfeProvenance;
648    type ProvenanceExtra = bool; // the "immutable" flag
649
650    type ExtraFnVal = !;
651
652    type MemoryKind = $crate::const_eval::MemoryKind;
653    type MemoryMap =
654        rustc_data_structures::fx::FxIndexMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>;
655    const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory
656
657    type AllocExtra = ();
658    type FrameExtra = ();
659    type Bytes = Box<[u8]>;
660
661    #[inline(always)]
662    fn ignore_optional_overflow_checks(_ecx: &InterpCx<$tcx, Self>) -> bool {
663        false
664    }
665
666    #[inline(always)]
667    fn unwind_terminate(
668        _ecx: &mut InterpCx<$tcx, Self>,
669        _reason: mir::UnwindTerminateReason,
670    ) -> InterpResult<$tcx> {
671        unreachable!("unwinding cannot happen during compile-time evaluation")
672    }
673
674    #[inline(always)]
675    fn check_fn_target_features(
676        _ecx: &InterpCx<$tcx, Self>,
677        _instance: ty::Instance<$tcx>,
678    ) -> InterpResult<$tcx> {
679        // For now we don't do any checking here. We can't use `tcx.sess` because that can differ
680        // between crates, and we need to ensure that const-eval always behaves the same.
681        interp_ok(())
682    }
683
684    #[inline(always)]
685    fn call_extra_fn(
686        _ecx: &mut InterpCx<$tcx, Self>,
687        fn_val: !,
688        _abi: &FnAbi<$tcx, Ty<$tcx>>,
689        _args: &[FnArg<$tcx>],
690        _destination: &PlaceTy<$tcx, Self::Provenance>,
691        _target: Option<mir::BasicBlock>,
692        _unwind: mir::UnwindAction,
693    ) -> InterpResult<$tcx> {
694        match fn_val {}
695    }
696
697    #[inline(always)]
698    fn ub_checks(_ecx: &InterpCx<$tcx, Self>) -> InterpResult<$tcx, bool> {
699        // We can't look at `tcx.sess` here as that can differ across crates, which can lead to
700        // unsound differences in evaluating the same constant at different instantiation sites.
701        interp_ok(true)
702    }
703
704    #[inline(always)]
705    fn contract_checks(_ecx: &InterpCx<$tcx, Self>) -> InterpResult<$tcx, bool> {
706        // We can't look at `tcx.sess` here as that can differ across crates, which can lead to
707        // unsound differences in evaluating the same constant at different instantiation sites.
708        interp_ok(true)
709    }
710
711    #[inline(always)]
712    fn adjust_global_allocation<'b>(
713        _ecx: &InterpCx<$tcx, Self>,
714        _id: AllocId,
715        alloc: &'b Allocation,
716    ) -> InterpResult<$tcx, Cow<'b, Allocation<Self::Provenance>>> {
717        // Overwrite default implementation: no need to adjust anything.
718        interp_ok(Cow::Borrowed(alloc))
719    }
720
721    fn init_local_allocation(
722        _ecx: &InterpCx<$tcx, Self>,
723        _id: AllocId,
724        _kind: MemoryKind<Self::MemoryKind>,
725        _size: Size,
726        _align: Align,
727    ) -> InterpResult<$tcx, Self::AllocExtra> {
728        interp_ok(())
729    }
730
731    fn extern_static_pointer(
732        ecx: &InterpCx<$tcx, Self>,
733        def_id: DefId,
734    ) -> InterpResult<$tcx, Pointer> {
735        // Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
736        interp_ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(), Size::ZERO))
737    }
738
739    #[inline(always)]
740    fn adjust_alloc_root_pointer(
741        _ecx: &InterpCx<$tcx, Self>,
742        ptr: Pointer<CtfeProvenance>,
743        _kind: Option<MemoryKind<Self::MemoryKind>>,
744    ) -> InterpResult<$tcx, Pointer<CtfeProvenance>> {
745        interp_ok(ptr)
746    }
747
748    #[inline(always)]
749    fn ptr_from_addr_cast(
750        _ecx: &InterpCx<$tcx, Self>,
751        addr: u64,
752    ) -> InterpResult<$tcx, Pointer<Option<CtfeProvenance>>> {
753        // Allow these casts, but make the pointer not dereferenceable.
754        // (I.e., they behave like transmutation.)
755        // This is correct because no pointers can ever be exposed in compile-time evaluation.
756        interp_ok(Pointer::without_provenance(addr))
757    }
758
759    #[inline(always)]
760    fn ptr_get_alloc(
761        _ecx: &InterpCx<$tcx, Self>,
762        ptr: Pointer<CtfeProvenance>,
763        _size: i64,
764    ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
765        let (prov, offset) = ptr.prov_and_relative_offset();
766        Some((prov.alloc_id(), offset, prov.immutable()))
767    }
768
769    #[inline(always)]
770    fn get_global_alloc_salt(
771        _ecx: &InterpCx<$tcx, Self>,
772        _instance: Option<ty::Instance<$tcx>>,
773    ) -> usize {
774        CTFE_ALLOC_SALT
775    }
776}