rustc_const_eval/interpret/machine.rs
1//! This module contains everything needed to instantiate an interpreter.
2//! This separation exists to ensure that no fancy miri features like
3//! interpreting common C functions leak into CTFE.
4
5use std::borrow::{Borrow, Cow};
6use std::fmt::Debug;
7use std::hash::Hash;
8
9use rustc_abi::{Align, Size};
10use rustc_apfloat::{Float, FloatConvert};
11use rustc_middle::query::TyCtxtAt;
12use rustc_middle::ty::Ty;
13use rustc_middle::ty::layout::TyAndLayout;
14use rustc_middle::{mir, ty};
15use rustc_span::Span;
16use rustc_span::def_id::DefId;
17use rustc_target::callconv::FnAbi;
18
19use super::{
20 AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation,
21 CtfeProvenance, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy, MemoryKind,
22 Misalignment, OpTy, PlaceTy, Pointer, Provenance, RangeSet, interp_ok, throw_unsup,
23};
24
25/// Data returned by [`Machine::after_stack_pop`], and consumed by
26/// [`InterpCx::return_from_current_stack_frame`] to determine what actions should be done when
27/// returning from a stack frame.
28#[derive(Eq, PartialEq, Debug, Copy, Clone)]
29pub enum ReturnAction {
30 /// Indicates that no special handling should be
31 /// done - we'll either return normally or unwind
32 /// based on the terminator for the function
33 /// we're leaving.
34 Normal,
35
36 /// Indicates that we should *not* jump to the return/unwind address, as the callback already
37 /// took care of everything.
38 NoJump,
39
40 /// Returned by [`InterpCx::pop_stack_frame_raw`] when no cleanup should be done.
41 NoCleanup,
42}
43
44/// Whether this kind of memory is allowed to leak
45pub trait MayLeak: Copy {
46 fn may_leak(self) -> bool;
47}
48
49/// The functionality needed by memory to manage its allocations
50pub trait AllocMap<K: Hash + Eq, V> {
51 /// Tests if the map contains the given key.
52 /// Deliberately takes `&mut` because that is sufficient, and some implementations
53 /// can be more efficient then (using `RefCell::get_mut`).
54 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
55 where
56 K: Borrow<Q>;
57
58 /// Callers should prefer [`AllocMap::contains_key`] when it is possible to call because it may
59 /// be more efficient. This function exists for callers that only have a shared reference
60 /// (which might make it slightly less efficient than `contains_key`, e.g. if
61 /// the data is stored inside a `RefCell`).
62 fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool
63 where
64 K: Borrow<Q>;
65
66 /// Inserts a new entry into the map.
67 fn insert(&mut self, k: K, v: V) -> Option<V>;
68
69 /// Removes an entry from the map.
70 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
71 where
72 K: Borrow<Q>;
73
74 /// Returns data based on the keys and values in the map.
75 fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
76
77 /// Returns a reference to entry `k`. If no such entry exists, call
78 /// `vacant` and either forward its error, or add its result to the map
79 /// and return a reference to *that*.
80 fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E>;
81
82 /// Returns a mutable reference to entry `k`. If no such entry exists, call
83 /// `vacant` and either forward its error, or add its result to the map
84 /// and return a reference to *that*.
85 fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E>;
86
87 /// Read-only lookup.
88 fn get(&self, k: K) -> Option<&V> {
89 self.get_or(k, || Err(())).ok()
90 }
91
92 /// Mutable lookup.
93 fn get_mut(&mut self, k: K) -> Option<&mut V> {
94 self.get_mut_or(k, || Err(())).ok()
95 }
96}
97
98/// Methods of this trait signifies a point where CTFE evaluation would fail
99/// and some use case dependent behaviour can instead be applied.
100pub trait Machine<'tcx>: Sized {
101 /// Additional memory kinds a machine wishes to distinguish from the builtin ones
102 type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
103
104 /// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
105 type Provenance: Provenance + Eq + Hash + 'static;
106
107 /// When getting the AllocId of a pointer, some extra data is also obtained from the provenance
108 /// that is passed to memory access hooks so they can do things with it.
109 type ProvenanceExtra: Copy + 'static;
110
111 /// Machines can define extra (non-instance) things that represent values of function pointers.
112 /// For example, Miri uses this to return a function pointer from `dlsym`
113 /// that can later be called to execute the right thing.
114 type ExtraFnVal: Debug + Copy;
115
116 /// Extra data stored in every call frame.
117 type FrameExtra;
118
119 /// Extra data stored in every allocation.
120 type AllocExtra: Debug + Clone + 'tcx;
121
122 /// Type for the bytes of the allocation.
123 type Bytes: AllocBytes + 'static;
124
125 /// Memory's allocation map
126 type MemoryMap: AllocMap<
127 AllocId,
128 (
129 MemoryKind<Self::MemoryKind>,
130 Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>,
131 ),
132 > + Default
133 + Clone;
134
135 /// The memory kind to use for copied global memory (held in `tcx`) --
136 /// or None if such memory should not be mutated and thus any such attempt will cause
137 /// a `ModifiedStatic` error to be raised.
138 /// Statics are copied under two circumstances: When they are mutated, and when
139 /// `adjust_allocation` (see below) returns an owned allocation
140 /// that is added to the memory so that the work is not done twice.
141 const GLOBAL_KIND: Option<Self::MemoryKind>;
142
143 /// Should the machine panic on allocation failures?
144 const PANIC_ON_ALLOC_FAIL: bool;
145
146 /// Determines whether `eval_mir_constant` can never fail because all required consts have
147 /// already been checked before.
148 const ALL_CONSTS_ARE_PRECHECKED: bool = true;
149
150 /// Determines whether rustc_const_eval functions that make use of the [Machine] should make
151 /// tracing calls (to the `tracing` library). By default this is `false`, meaning the tracing
152 /// calls will supposedly be optimized out. This flag is set to `true` inside Miri, to allow
153 /// tracing the interpretation steps, among other things.
154 const TRACING_ENABLED: bool = false;
155
156 /// Whether memory accesses should be alignment-checked.
157 fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool;
158
159 /// Gives the machine a chance to detect more misalignment than the built-in checks would catch.
160 #[inline(always)]
161 fn alignment_check(
162 _ecx: &InterpCx<'tcx, Self>,
163 _alloc_id: AllocId,
164 _alloc_align: Align,
165 _alloc_kind: AllocKind,
166 _offset: Size,
167 _align: Align,
168 ) -> Option<Misalignment> {
169 None
170 }
171
172 /// Whether to enforce the validity invariant for a specific layout.
173 fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool;
174 /// Whether to enforce the validity invariant *recursively*.
175 fn enforce_validity_recursively(
176 _ecx: &InterpCx<'tcx, Self>,
177 _layout: TyAndLayout<'tcx>,
178 ) -> bool {
179 false
180 }
181
182 /// Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually
183 /// check for overflow.
184 fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool;
185
186 /// Entry point for obtaining the MIR of anything that should get evaluated.
187 /// So not just functions and shims, but also const/static initializers, anonymous
188 /// constants, ...
189 fn load_mir(
190 ecx: &InterpCx<'tcx, Self>,
191 instance: ty::InstanceKind<'tcx>,
192 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
193 interp_ok(ecx.tcx.instance_mir(instance))
194 }
195
196 /// Entry point to all function calls.
197 ///
198 /// Returns either the mir to use for the call, or `None` if execution should
199 /// just proceed (which usually means this hook did all the work that the
200 /// called function should usually have done). In the latter case, it is
201 /// this hook's responsibility to advance the instruction pointer!
202 /// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
203 /// nor just jump to `ret`, but instead push their own stack frame.)
204 /// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
205 /// was used.
206 fn find_mir_or_eval_fn(
207 ecx: &mut InterpCx<'tcx, Self>,
208 instance: ty::Instance<'tcx>,
209 abi: &FnAbi<'tcx, Ty<'tcx>>,
210 args: &[FnArg<'tcx, Self::Provenance>],
211 destination: &PlaceTy<'tcx, Self::Provenance>,
212 target: Option<mir::BasicBlock>,
213 unwind: mir::UnwindAction,
214 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>>;
215
216 /// Execute `fn_val`. It is the hook's responsibility to advance the instruction
217 /// pointer as appropriate.
218 fn call_extra_fn(
219 ecx: &mut InterpCx<'tcx, Self>,
220 fn_val: Self::ExtraFnVal,
221 abi: &FnAbi<'tcx, Ty<'tcx>>,
222 args: &[FnArg<'tcx, Self::Provenance>],
223 destination: &PlaceTy<'tcx, Self::Provenance>,
224 target: Option<mir::BasicBlock>,
225 unwind: mir::UnwindAction,
226 ) -> InterpResult<'tcx>;
227
228 /// Directly process an intrinsic without pushing a stack frame. It is the hook's
229 /// responsibility to advance the instruction pointer as appropriate.
230 ///
231 /// Returns `None` if the intrinsic was fully handled.
232 /// Otherwise, returns an `Instance` of the function that implements the intrinsic.
233 fn call_intrinsic(
234 ecx: &mut InterpCx<'tcx, Self>,
235 instance: ty::Instance<'tcx>,
236 args: &[OpTy<'tcx, Self::Provenance>],
237 destination: &PlaceTy<'tcx, Self::Provenance>,
238 target: Option<mir::BasicBlock>,
239 unwind: mir::UnwindAction,
240 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>>;
241
242 /// Check whether the given function may be executed on the current machine, in terms of the
243 /// target features is requires.
244 fn check_fn_target_features(
245 _ecx: &InterpCx<'tcx, Self>,
246 _instance: ty::Instance<'tcx>,
247 ) -> InterpResult<'tcx>;
248
249 /// Called to evaluate `Assert` MIR terminators that trigger a panic.
250 fn assert_panic(
251 ecx: &mut InterpCx<'tcx, Self>,
252 msg: &mir::AssertMessage<'tcx>,
253 unwind: mir::UnwindAction,
254 ) -> InterpResult<'tcx>;
255
256 /// Called to trigger a non-unwinding panic.
257 fn panic_nounwind(_ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx>;
258
259 /// Called when unwinding reached a state where execution should be terminated.
260 fn unwind_terminate(
261 ecx: &mut InterpCx<'tcx, Self>,
262 reason: mir::UnwindTerminateReason,
263 ) -> InterpResult<'tcx>;
264
265 /// Called for all binary operations where the LHS has pointer type.
266 ///
267 /// Returns a (value, overflowed) pair if the operation succeeded
268 fn binary_ptr_op(
269 ecx: &InterpCx<'tcx, Self>,
270 bin_op: mir::BinOp,
271 left: &ImmTy<'tcx, Self::Provenance>,
272 right: &ImmTy<'tcx, Self::Provenance>,
273 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>;
274
275 /// Generate the NaN returned by a float operation, given the list of inputs.
276 /// (This is all inputs, not just NaN inputs!)
277 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
278 _ecx: &InterpCx<'tcx, Self>,
279 _inputs: &[F1],
280 ) -> F2 {
281 // By default we always return the preferred NaN.
282 F2::NAN
283 }
284
285 /// Apply non-determinism to float operations that do not return a precise result.
286 fn apply_float_nondet(
287 _ecx: &mut InterpCx<'tcx, Self>,
288 val: ImmTy<'tcx, Self::Provenance>,
289 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
290 interp_ok(val)
291 }
292
293 /// Determines the result of `min`/`max` on floats when the arguments are equal.
294 fn equal_float_min_max<F: Float>(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F {
295 // By default, we pick the left argument.
296 a
297 }
298
299 /// Called before a basic block terminator is executed.
300 #[inline]
301 fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
302 interp_ok(())
303 }
304
305 /// Determines the result of a `NullaryOp::UbChecks` invocation.
306 fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>;
307
308 /// Determines the result of a `NullaryOp::ContractChecks` invocation.
309 fn contract_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>;
310
311 /// Called when the interpreter encounters a `StatementKind::ConstEvalCounter` instruction.
312 /// You can use this to detect long or endlessly running programs.
313 #[inline]
314 fn increment_const_eval_counter(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
315 interp_ok(())
316 }
317
318 /// Called before a global allocation is accessed.
319 /// `def_id` is `Some` if this is the "lazy" allocation of a static.
320 #[inline]
321 fn before_access_global(
322 _tcx: TyCtxtAt<'tcx>,
323 _machine: &Self,
324 _alloc_id: AllocId,
325 _allocation: ConstAllocation<'tcx>,
326 _static_def_id: Option<DefId>,
327 _is_write: bool,
328 ) -> InterpResult<'tcx> {
329 interp_ok(())
330 }
331
332 /// Return the `AllocId` for the given thread-local static in the current thread.
333 fn thread_local_static_pointer(
334 _ecx: &mut InterpCx<'tcx, Self>,
335 def_id: DefId,
336 ) -> InterpResult<'tcx, Pointer<Self::Provenance>> {
337 throw_unsup!(ThreadLocalStatic(def_id))
338 }
339
340 /// Return the `AllocId` for the given `extern static`.
341 fn extern_static_pointer(
342 ecx: &InterpCx<'tcx, Self>,
343 def_id: DefId,
344 ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
345
346 /// "Int-to-pointer cast"
347 fn ptr_from_addr_cast(
348 ecx: &InterpCx<'tcx, Self>,
349 addr: u64,
350 ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
351
352 /// Marks a pointer as exposed, allowing its provenance
353 /// to be recovered. "Pointer-to-int cast"
354 fn expose_provenance(
355 ecx: &InterpCx<'tcx, Self>,
356 provenance: Self::Provenance,
357 ) -> InterpResult<'tcx>;
358
359 /// Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
360 /// `size` says how many bytes of memory are expected at that pointer. The *sign* of `size` can
361 /// be used to disambiguate situations where a wildcard pointer sits right in between two
362 /// allocations.
363 ///
364 /// If `ptr.provenance.get_alloc_id()` is `Some(p)`, the returned `AllocId` must be `p`.
365 /// The resulting `AllocId` will just be used for that one step and the forgotten again
366 /// (i.e., we'll never turn the data returned here back into a `Pointer` that might be
367 /// stored in machine state).
368 ///
369 /// When this fails, that means the pointer does not point to a live allocation.
370 fn ptr_get_alloc(
371 ecx: &InterpCx<'tcx, Self>,
372 ptr: Pointer<Self::Provenance>,
373 size: i64,
374 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
375
376 /// Return a "root" pointer for the given allocation: the one that is used for direct
377 /// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
378 ///
379 /// Not called on `extern` or thread-local statics (those use the methods above).
380 ///
381 /// `kind` is the kind of the allocation the pointer points to; it can be `None` when
382 /// it's a global and `GLOBAL_KIND` is `None`.
383 fn adjust_alloc_root_pointer(
384 ecx: &InterpCx<'tcx, Self>,
385 ptr: Pointer,
386 kind: Option<MemoryKind<Self::MemoryKind>>,
387 ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
388
389 /// Called to adjust global allocations to the Provenance and AllocExtra of this machine.
390 ///
391 /// If `alloc` contains pointers, then they are all pointing to globals.
392 ///
393 /// This should avoid copying if no work has to be done! If this returns an owned
394 /// allocation (because a copy had to be done to adjust things), machine memory will
395 /// cache the result. (This relies on `AllocMap::get_or` being able to add the
396 /// owned allocation to the map even when the map is shared.)
397 fn adjust_global_allocation<'b>(
398 ecx: &InterpCx<'tcx, Self>,
399 id: AllocId,
400 alloc: &'b Allocation,
401 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
402
403 /// Initialize the extra state of an allocation local to this machine.
404 ///
405 /// This is guaranteed to be called exactly once on all allocations local to this machine.
406 /// It will not be called automatically for global allocations; `adjust_global_allocation`
407 /// has to do that itself if that is desired.
408 fn init_local_allocation(
409 ecx: &InterpCx<'tcx, Self>,
410 id: AllocId,
411 kind: MemoryKind<Self::MemoryKind>,
412 size: Size,
413 align: Align,
414 ) -> InterpResult<'tcx, Self::AllocExtra>;
415
416 /// Hook for performing extra checks on a memory read access.
417 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
418 /// `range`.
419 ///
420 /// This will *not* be called during validation!
421 ///
422 /// Takes read-only access to the allocation so we can keep all the memory read
423 /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
424 /// need to mutate.
425 ///
426 /// This is not invoked for ZST accesses, as no read actually happens.
427 #[inline(always)]
428 fn before_memory_read(
429 _tcx: TyCtxtAt<'tcx>,
430 _machine: &Self,
431 _alloc_extra: &Self::AllocExtra,
432 _ptr: Pointer<Option<Self::Provenance>>,
433 _prov: (AllocId, Self::ProvenanceExtra),
434 _range: AllocRange,
435 ) -> InterpResult<'tcx> {
436 interp_ok(())
437 }
438
439 /// Hook for performing extra checks on any memory read access,
440 /// that involves an allocation, even ZST reads.
441 ///
442 /// This will *not* be called during validation!
443 ///
444 /// Used to prevent statics from self-initializing by reading from their own memory
445 /// as it is being initialized.
446 fn before_alloc_access(
447 _tcx: TyCtxtAt<'tcx>,
448 _machine: &Self,
449 _alloc_id: AllocId,
450 ) -> InterpResult<'tcx> {
451 interp_ok(())
452 }
453
454 /// Hook for performing extra checks on a memory write access.
455 /// This is not invoked for ZST accesses, as no write actually happens.
456 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
457 /// `range`.
458 #[inline(always)]
459 fn before_memory_write(
460 _tcx: TyCtxtAt<'tcx>,
461 _machine: &mut Self,
462 _alloc_extra: &mut Self::AllocExtra,
463 _ptr: Pointer<Option<Self::Provenance>>,
464 _prov: (AllocId, Self::ProvenanceExtra),
465 _range: AllocRange,
466 ) -> InterpResult<'tcx> {
467 interp_ok(())
468 }
469
470 /// Hook for performing extra operations on a memory deallocation.
471 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
472 /// the allocation.
473 #[inline(always)]
474 fn before_memory_deallocation(
475 _tcx: TyCtxtAt<'tcx>,
476 _machine: &mut Self,
477 _alloc_extra: &mut Self::AllocExtra,
478 _ptr: Pointer<Option<Self::Provenance>>,
479 _prov: (AllocId, Self::ProvenanceExtra),
480 _size: Size,
481 _align: Align,
482 _kind: MemoryKind<Self::MemoryKind>,
483 ) -> InterpResult<'tcx> {
484 interp_ok(())
485 }
486
487 /// Executes a retagging operation for a single pointer.
488 /// Returns the possibly adjusted pointer.
489 #[inline]
490 fn retag_ptr_value(
491 _ecx: &mut InterpCx<'tcx, Self>,
492 _kind: mir::RetagKind,
493 val: &ImmTy<'tcx, Self::Provenance>,
494 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
495 interp_ok(val.clone())
496 }
497
498 /// Executes a retagging operation on a compound value.
499 /// Replaces all pointers stored in the given place.
500 #[inline]
501 fn retag_place_contents(
502 _ecx: &mut InterpCx<'tcx, Self>,
503 _kind: mir::RetagKind,
504 _place: &PlaceTy<'tcx, Self::Provenance>,
505 ) -> InterpResult<'tcx> {
506 interp_ok(())
507 }
508
509 /// Called on places used for in-place function argument and return value handling.
510 ///
511 /// These places need to be protected to make sure the program cannot tell whether the
512 /// argument/return value was actually copied or passed in-place..
513 fn protect_in_place_function_argument(
514 ecx: &mut InterpCx<'tcx, Self>,
515 mplace: &MPlaceTy<'tcx, Self::Provenance>,
516 ) -> InterpResult<'tcx> {
517 // Without an aliasing model, all we can do is put `Uninit` into the place.
518 // Conveniently this also ensures that the place actually points to suitable memory.
519 ecx.write_uninit(mplace)
520 }
521
522 /// Called immediately before a new stack frame gets pushed.
523 fn init_frame(
524 ecx: &mut InterpCx<'tcx, Self>,
525 frame: Frame<'tcx, Self::Provenance>,
526 ) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
527
528 /// Borrow the current thread's stack.
529 fn stack<'a>(
530 ecx: &'a InterpCx<'tcx, Self>,
531 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>];
532
533 /// Mutably borrow the current thread's stack.
534 fn stack_mut<'a>(
535 ecx: &'a mut InterpCx<'tcx, Self>,
536 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
537
538 /// Called immediately after a stack frame got pushed and its locals got initialized.
539 fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
540 interp_ok(())
541 }
542
543 /// Called just before the frame is removed from the stack (followed by return value copy and
544 /// local cleanup).
545 fn before_stack_pop(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
546 interp_ok(())
547 }
548
549 /// Called immediately after a stack frame got popped, but before jumping back to the caller.
550 /// The `locals` have already been destroyed!
551 #[inline(always)]
552 fn after_stack_pop(
553 _ecx: &mut InterpCx<'tcx, Self>,
554 _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
555 unwinding: bool,
556 ) -> InterpResult<'tcx, ReturnAction> {
557 // By default, we do not support unwinding from panics
558 assert!(!unwinding);
559 interp_ok(ReturnAction::Normal)
560 }
561
562 /// Called immediately after an "immediate" local variable is read in a given frame
563 /// (i.e., this is called for reads that do not end up accessing addressable memory).
564 #[inline(always)]
565 fn after_local_read(
566 _ecx: &InterpCx<'tcx, Self>,
567 _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
568 _local: mir::Local,
569 ) -> InterpResult<'tcx> {
570 interp_ok(())
571 }
572
573 /// Called immediately after an "immediate" local variable is assigned a new value
574 /// (i.e., this is called for writes that do not end up in memory).
575 /// `storage_live` indicates whether this is the initial write upon `StorageLive`.
576 #[inline(always)]
577 fn after_local_write(
578 _ecx: &mut InterpCx<'tcx, Self>,
579 _local: mir::Local,
580 _storage_live: bool,
581 ) -> InterpResult<'tcx> {
582 interp_ok(())
583 }
584
585 /// Called immediately after actual memory was allocated for a local
586 /// but before the local's stack frame is updated to point to that memory.
587 #[inline(always)]
588 fn after_local_moved_to_memory(
589 _ecx: &mut InterpCx<'tcx, Self>,
590 _local: mir::Local,
591 _mplace: &MPlaceTy<'tcx, Self::Provenance>,
592 ) -> InterpResult<'tcx> {
593 interp_ok(())
594 }
595
596 /// Evaluate the given constant. The `eval` function will do all the required evaluation,
597 /// but this hook has the chance to do some pre/postprocessing.
598 #[inline(always)]
599 fn eval_mir_constant<F>(
600 ecx: &InterpCx<'tcx, Self>,
601 val: mir::Const<'tcx>,
602 span: Span,
603 layout: Option<TyAndLayout<'tcx>>,
604 eval: F,
605 ) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>
606 where
607 F: Fn(
608 &InterpCx<'tcx, Self>,
609 mir::Const<'tcx>,
610 Span,
611 Option<TyAndLayout<'tcx>>,
612 ) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>,
613 {
614 eval(ecx, val, span, layout)
615 }
616
617 /// Returns the salt to be used for a deduplicated global alloation.
618 /// If the allocation is for a function, the instance is provided as well
619 /// (this lets Miri ensure unique addresses for some functions).
620 fn get_global_alloc_salt(
621 ecx: &InterpCx<'tcx, Self>,
622 instance: Option<ty::Instance<'tcx>>,
623 ) -> usize;
624
625 fn cached_union_data_range<'e>(
626 _ecx: &'e mut InterpCx<'tcx, Self>,
627 _ty: Ty<'tcx>,
628 compute_range: impl FnOnce() -> RangeSet,
629 ) -> Cow<'e, RangeSet> {
630 // Default to no caching.
631 Cow::Owned(compute_range())
632 }
633
634 /// Compute the value passed to the constructors of the `AllocBytes` type for
635 /// abstract machine allocations.
636 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams;
637}
638
639/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
640/// (CTFE and ConstProp) use the same instance. Here, we share that code.
641pub macro compile_time_machine(<$tcx: lifetime>) {
642 type Provenance = CtfeProvenance;
643 type ProvenanceExtra = bool; // the "immutable" flag
644
645 type ExtraFnVal = !;
646
647 type MemoryMap =
648 rustc_data_structures::fx::FxIndexMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>;
649 const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory
650
651 type AllocExtra = ();
652 type FrameExtra = ();
653 type Bytes = Box<[u8]>;
654
655 #[inline(always)]
656 fn ignore_optional_overflow_checks(_ecx: &InterpCx<$tcx, Self>) -> bool {
657 false
658 }
659
660 #[inline(always)]
661 fn unwind_terminate(
662 _ecx: &mut InterpCx<$tcx, Self>,
663 _reason: mir::UnwindTerminateReason,
664 ) -> InterpResult<$tcx> {
665 unreachable!("unwinding cannot happen during compile-time evaluation")
666 }
667
668 #[inline(always)]
669 fn check_fn_target_features(
670 _ecx: &InterpCx<$tcx, Self>,
671 _instance: ty::Instance<$tcx>,
672 ) -> InterpResult<$tcx> {
673 // For now we don't do any checking here. We can't use `tcx.sess` because that can differ
674 // between crates, and we need to ensure that const-eval always behaves the same.
675 interp_ok(())
676 }
677
678 #[inline(always)]
679 fn call_extra_fn(
680 _ecx: &mut InterpCx<$tcx, Self>,
681 fn_val: !,
682 _abi: &FnAbi<$tcx, Ty<$tcx>>,
683 _args: &[FnArg<$tcx>],
684 _destination: &PlaceTy<$tcx, Self::Provenance>,
685 _target: Option<mir::BasicBlock>,
686 _unwind: mir::UnwindAction,
687 ) -> InterpResult<$tcx> {
688 match fn_val {}
689 }
690
691 #[inline(always)]
692 fn ub_checks(_ecx: &InterpCx<$tcx, Self>) -> InterpResult<$tcx, bool> {
693 // We can't look at `tcx.sess` here as that can differ across crates, which can lead to
694 // unsound differences in evaluating the same constant at different instantiation sites.
695 interp_ok(true)
696 }
697
698 #[inline(always)]
699 fn contract_checks(_ecx: &InterpCx<$tcx, Self>) -> InterpResult<$tcx, bool> {
700 // We can't look at `tcx.sess` here as that can differ across crates, which can lead to
701 // unsound differences in evaluating the same constant at different instantiation sites.
702 interp_ok(true)
703 }
704
705 #[inline(always)]
706 fn adjust_global_allocation<'b>(
707 _ecx: &InterpCx<$tcx, Self>,
708 _id: AllocId,
709 alloc: &'b Allocation,
710 ) -> InterpResult<$tcx, Cow<'b, Allocation<Self::Provenance>>> {
711 // Overwrite default implementation: no need to adjust anything.
712 interp_ok(Cow::Borrowed(alloc))
713 }
714
715 fn init_local_allocation(
716 _ecx: &InterpCx<$tcx, Self>,
717 _id: AllocId,
718 _kind: MemoryKind<Self::MemoryKind>,
719 _size: Size,
720 _align: Align,
721 ) -> InterpResult<$tcx, Self::AllocExtra> {
722 interp_ok(())
723 }
724
725 fn extern_static_pointer(
726 ecx: &InterpCx<$tcx, Self>,
727 def_id: DefId,
728 ) -> InterpResult<$tcx, Pointer> {
729 // Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
730 interp_ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(), Size::ZERO))
731 }
732
733 #[inline(always)]
734 fn adjust_alloc_root_pointer(
735 _ecx: &InterpCx<$tcx, Self>,
736 ptr: Pointer<CtfeProvenance>,
737 _kind: Option<MemoryKind<Self::MemoryKind>>,
738 ) -> InterpResult<$tcx, Pointer<CtfeProvenance>> {
739 interp_ok(ptr)
740 }
741
742 #[inline(always)]
743 fn ptr_from_addr_cast(
744 _ecx: &InterpCx<$tcx, Self>,
745 addr: u64,
746 ) -> InterpResult<$tcx, Pointer<Option<CtfeProvenance>>> {
747 // Allow these casts, but make the pointer not dereferenceable.
748 // (I.e., they behave like transmutation.)
749 // This is correct because no pointers can ever be exposed in compile-time evaluation.
750 interp_ok(Pointer::from_addr_invalid(addr))
751 }
752
753 #[inline(always)]
754 fn ptr_get_alloc(
755 _ecx: &InterpCx<$tcx, Self>,
756 ptr: Pointer<CtfeProvenance>,
757 _size: i64,
758 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
759 // We know `offset` is relative to the allocation, so we can use `into_parts`.
760 let (prov, offset) = ptr.into_parts();
761 Some((prov.alloc_id(), offset, prov.immutable()))
762 }
763
764 #[inline(always)]
765 fn get_global_alloc_salt(
766 _ecx: &InterpCx<$tcx, Self>,
767 _instance: Option<ty::Instance<$tcx>>,
768 ) -> usize {
769 CTFE_ALLOC_SALT
770 }
771}