1use std::any::Any;
5use std::borrow::Cow;
6use std::cell::{Cell, RefCell};
7use std::collections::hash_map::Entry;
8use std::path::Path;
9use std::rc::Rc;
10use std::{fmt, process};
11
12use rand::rngs::StdRng;
13use rand::{Rng, SeedableRng};
14use rustc_abi::{Align, ExternAbi, Size};
15use rustc_apfloat::{Float, FloatConvert};
16use rustc_attr_data_structures::InlineAttr;
17use rustc_data_structures::fx::{FxHashMap, FxHashSet};
18#[allow(unused)]
19use rustc_data_structures::static_assert_size;
20use rustc_middle::mir;
21use rustc_middle::query::TyCtxtAt;
22use rustc_middle::ty::layout::{
23 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
24};
25use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
26use rustc_session::config::InliningThreshold;
27use rustc_span::def_id::{CrateNum, DefId};
28use rustc_span::{Span, SpanData, Symbol};
29use rustc_target::callconv::FnAbi;
30
31use crate::alloc_addresses::EvalContextExt;
32use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
33use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
34use crate::concurrency::{AllocDataRaceHandler, GenmcCtx, GlobalDataRaceHandler, weak_memory};
35use crate::*;
36
37pub const SIGRTMIN: i32 = 34;
41
42pub const SIGRTMAX: i32 = 42;
46
47const ADDRS_PER_ANON_GLOBAL: usize = 32;
51
52pub struct FrameExtra<'tcx> {
54 pub borrow_tracker: Option<borrow_tracker::FrameState>,
56
57 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
61
62 pub timing: Option<measureme::DetachedTiming>,
66
67 pub is_user_relevant: bool,
72
73 salt: usize,
78
79 pub data_race: Option<data_race::FrameState>,
81}
82
83impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
84 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
85 let FrameExtra {
87 borrow_tracker,
88 catch_unwind,
89 timing: _,
90 is_user_relevant,
91 salt,
92 data_race,
93 } = self;
94 f.debug_struct("FrameData")
95 .field("borrow_tracker", borrow_tracker)
96 .field("catch_unwind", catch_unwind)
97 .field("is_user_relevant", is_user_relevant)
98 .field("salt", salt)
99 .field("data_race", data_race)
100 .finish()
101 }
102}
103
104impl VisitProvenance for FrameExtra<'_> {
105 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
106 let FrameExtra {
107 catch_unwind,
108 borrow_tracker,
109 timing: _,
110 is_user_relevant: _,
111 salt: _,
112 data_race: _,
113 } = self;
114
115 catch_unwind.visit_provenance(visit);
116 borrow_tracker.visit_provenance(visit);
117 }
118}
119
120#[derive(Debug, Copy, Clone, PartialEq, Eq)]
122pub enum MiriMemoryKind {
123 Rust,
125 Miri,
127 C,
129 WinHeap,
131 WinLocal,
133 Machine,
136 Runtime,
139 Global,
142 ExternStatic,
145 Tls,
148 Mmap,
150}
151
152impl From<MiriMemoryKind> for MemoryKind {
153 #[inline(always)]
154 fn from(kind: MiriMemoryKind) -> MemoryKind {
155 MemoryKind::Machine(kind)
156 }
157}
158
159impl MayLeak for MiriMemoryKind {
160 #[inline(always)]
161 fn may_leak(self) -> bool {
162 use self::MiriMemoryKind::*;
163 match self {
164 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
165 Machine | Global | ExternStatic | Tls | Mmap => true,
166 }
167 }
168}
169
170impl MiriMemoryKind {
171 fn should_save_allocation_span(self) -> bool {
173 use self::MiriMemoryKind::*;
174 match self {
175 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
177 Machine | Global | ExternStatic | Tls | Runtime => false,
179 }
180 }
181}
182
183impl fmt::Display for MiriMemoryKind {
184 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
185 use self::MiriMemoryKind::*;
186 match self {
187 Rust => write!(f, "Rust heap"),
188 Miri => write!(f, "Miri bare-metal heap"),
189 C => write!(f, "C heap"),
190 WinHeap => write!(f, "Windows heap"),
191 WinLocal => write!(f, "Windows local memory"),
192 Machine => write!(f, "machine-managed memory"),
193 Runtime => write!(f, "language runtime memory"),
194 Global => write!(f, "global (static or const)"),
195 ExternStatic => write!(f, "extern static"),
196 Tls => write!(f, "thread-local static"),
197 Mmap => write!(f, "mmap"),
198 }
199 }
200}
201
202pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
203
204#[derive(Clone, Copy, PartialEq, Eq, Hash)]
210pub enum Provenance {
211 Concrete {
214 alloc_id: AllocId,
215 tag: BorTag,
217 },
218 Wildcard,
235}
236
237#[derive(Copy, Clone, PartialEq)]
239pub enum ProvenanceExtra {
240 Concrete(BorTag),
241 Wildcard,
242}
243
244#[cfg(target_pointer_width = "64")]
245static_assert_size!(StrictPointer, 24);
246#[cfg(target_pointer_width = "64")]
250static_assert_size!(Scalar, 32);
251
252impl fmt::Debug for Provenance {
253 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
254 match self {
255 Provenance::Concrete { alloc_id, tag } => {
256 if f.alternate() {
258 write!(f, "[{alloc_id:#?}]")?;
259 } else {
260 write!(f, "[{alloc_id:?}]")?;
261 }
262 write!(f, "{tag:?}")?;
264 }
265 Provenance::Wildcard => {
266 write!(f, "[wildcard]")?;
267 }
268 }
269 Ok(())
270 }
271}
272
273impl interpret::Provenance for Provenance {
274 const OFFSET_IS_ADDR: bool = true;
276
277 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
279
280 fn get_alloc_id(self) -> Option<AllocId> {
281 match self {
282 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
283 Provenance::Wildcard => None,
284 }
285 }
286
287 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
288 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
290 if f.alternate() {
291 write!(f, "{prov:#?}")?;
292 } else {
293 write!(f, "{prov:?}")?;
294 }
295 Ok(())
296 }
297
298 fn join(left: Option<Self>, right: Option<Self>) -> Option<Self> {
299 match (left, right) {
300 (
302 Some(Provenance::Concrete { alloc_id: left_alloc, tag: left_tag }),
303 Some(Provenance::Concrete { alloc_id: right_alloc, tag: right_tag }),
304 ) if left_alloc == right_alloc && left_tag == right_tag => left,
305 (Some(Provenance::Wildcard), o) | (o, Some(Provenance::Wildcard)) => o,
308 _ => None,
310 }
311 }
312}
313
314impl fmt::Debug for ProvenanceExtra {
315 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
316 match self {
317 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
318 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
319 }
320 }
321}
322
323impl ProvenanceExtra {
324 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
325 match self {
326 ProvenanceExtra::Concrete(pid) => f(pid),
327 ProvenanceExtra::Wildcard => None,
328 }
329 }
330}
331
332#[derive(Debug)]
334pub struct AllocExtra<'tcx> {
335 pub borrow_tracker: Option<borrow_tracker::AllocState>,
337 pub data_race: AllocDataRaceHandler,
341 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
346 pub sync: FxHashMap<Size, Box<dyn Any>>,
351}
352
353impl<'tcx> Clone for AllocExtra<'tcx> {
356 fn clone(&self) -> Self {
357 panic!("our allocations should never be cloned");
358 }
359}
360
361impl VisitProvenance for AllocExtra<'_> {
362 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
363 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync: _ } = self;
364
365 borrow_tracker.visit_provenance(visit);
366 data_race.visit_provenance(visit);
367 }
368}
369
370pub struct PrimitiveLayouts<'tcx> {
372 pub unit: TyAndLayout<'tcx>,
373 pub i8: TyAndLayout<'tcx>,
374 pub i16: TyAndLayout<'tcx>,
375 pub i32: TyAndLayout<'tcx>,
376 pub i64: TyAndLayout<'tcx>,
377 pub i128: TyAndLayout<'tcx>,
378 pub isize: TyAndLayout<'tcx>,
379 pub u8: TyAndLayout<'tcx>,
380 pub u16: TyAndLayout<'tcx>,
381 pub u32: TyAndLayout<'tcx>,
382 pub u64: TyAndLayout<'tcx>,
383 pub u128: TyAndLayout<'tcx>,
384 pub usize: TyAndLayout<'tcx>,
385 pub bool: TyAndLayout<'tcx>,
386 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
389
390impl<'tcx> PrimitiveLayouts<'tcx> {
391 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
392 let tcx = layout_cx.tcx();
393 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
394 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
395 Ok(Self {
396 unit: layout_cx.layout_of(tcx.types.unit)?,
397 i8: layout_cx.layout_of(tcx.types.i8)?,
398 i16: layout_cx.layout_of(tcx.types.i16)?,
399 i32: layout_cx.layout_of(tcx.types.i32)?,
400 i64: layout_cx.layout_of(tcx.types.i64)?,
401 i128: layout_cx.layout_of(tcx.types.i128)?,
402 isize: layout_cx.layout_of(tcx.types.isize)?,
403 u8: layout_cx.layout_of(tcx.types.u8)?,
404 u16: layout_cx.layout_of(tcx.types.u16)?,
405 u32: layout_cx.layout_of(tcx.types.u32)?,
406 u64: layout_cx.layout_of(tcx.types.u64)?,
407 u128: layout_cx.layout_of(tcx.types.u128)?,
408 usize: layout_cx.layout_of(tcx.types.usize)?,
409 bool: layout_cx.layout_of(tcx.types.bool)?,
410 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
411 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
412 })
413 }
414
415 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
416 match size.bits() {
417 8 => Some(self.u8),
418 16 => Some(self.u16),
419 32 => Some(self.u32),
420 64 => Some(self.u64),
421 128 => Some(self.u128),
422 _ => None,
423 }
424 }
425
426 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
427 match size.bits() {
428 8 => Some(self.i8),
429 16 => Some(self.i16),
430 32 => Some(self.i32),
431 64 => Some(self.i64),
432 128 => Some(self.i128),
433 _ => None,
434 }
435 }
436}
437
438pub struct MiriMachine<'tcx> {
443 pub tcx: TyCtxt<'tcx>,
445
446 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
448
449 pub data_race: GlobalDataRaceHandler,
455
456 pub alloc_addresses: alloc_addresses::GlobalState,
458
459 pub(crate) env_vars: EnvVars<'tcx>,
461
462 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
464
465 pub(crate) argc: Option<Pointer>,
469 pub(crate) argv: Option<Pointer>,
470 pub(crate) cmd_line: Option<Pointer>,
471
472 pub(crate) tls: TlsData<'tcx>,
474
475 pub(crate) isolated_op: IsolatedOp,
479
480 pub(crate) validation: ValidationMode,
482
483 pub(crate) fds: shims::FdTable,
485 pub(crate) dirs: shims::DirTable,
487
488 pub(crate) epoll_interests: shims::EpollInterestTable,
490
491 pub(crate) monotonic_clock: MonotonicClock,
493
494 pub(crate) threads: ThreadManager<'tcx>,
496
497 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
501
502 pub(crate) layouts: PrimitiveLayouts<'tcx>,
504
505 pub(crate) static_roots: Vec<AllocId>,
507
508 profiler: Option<measureme::Profiler>,
511 string_cache: FxHashMap<String, measureme::StringId>,
514
515 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
518
519 pub(crate) backtrace_style: BacktraceStyle,
521
522 pub(crate) local_crates: Vec<CrateNum>,
524
525 extern_statics: FxHashMap<Symbol, StrictPointer>,
527
528 pub(crate) rng: RefCell<StdRng>,
531
532 #[cfg(target_os = "linux")]
534 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
535
536 tracked_alloc_ids: FxHashSet<AllocId>,
539 track_alloc_accesses: bool,
541
542 pub(crate) check_alignment: AlignmentCheck,
544
545 pub(crate) cmpxchg_weak_failure_rate: f64,
547
548 pub(crate) preemption_rate: f64,
550
551 pub(crate) report_progress: Option<u32>,
553 pub(crate) basic_block_count: u64,
555
556 #[cfg(unix)]
558 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
559 #[cfg(not(unix))]
560 pub native_lib: Vec<!>,
561
562 pub(crate) gc_interval: u32,
564 pub(crate) since_gc: u32,
566
567 pub(crate) num_cpus: u32,
569
570 pub(crate) page_size: u64,
572 pub(crate) stack_addr: u64,
573 pub(crate) stack_size: u64,
574
575 pub(crate) collect_leak_backtraces: bool,
577
578 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
581
582 const_cache: RefCell<FxHashMap<(mir::Const<'tcx>, usize), OpTy<'tcx>>>,
586
587 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
594
595 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
597
598 pub(crate) pthread_mutex_sanity: Cell<bool>,
600 pub(crate) pthread_rwlock_sanity: Cell<bool>,
601 pub(crate) pthread_condvar_sanity: Cell<bool>,
602
603 pub(crate) sb_extern_type_warned: Cell<bool>,
605 #[cfg(unix)]
607 pub(crate) native_call_mem_warned: Cell<bool>,
608 pub(crate) reject_in_isolation_warned: RefCell<FxHashSet<String>>,
610 pub(crate) int2ptr_warned: RefCell<FxHashSet<Span>>,
612
613 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
615
616 pub force_intrinsic_fallback: bool,
618
619 pub float_nondet: bool,
621}
622
623impl<'tcx> MiriMachine<'tcx> {
624 pub(crate) fn new(
625 config: &MiriConfig,
626 layout_cx: LayoutCx<'tcx>,
627 genmc_ctx: Option<Rc<GenmcCtx>>,
628 ) -> Self {
629 let tcx = layout_cx.tcx();
630 let local_crates = helpers::get_local_crates(tcx);
631 let layouts =
632 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
633 let profiler = config.measureme_out.as_ref().map(|out| {
634 let crate_name =
635 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
636 let pid = process::id();
637 let filename = format!("{crate_name}-{pid:07}");
642 let path = Path::new(out).join(filename);
643 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
644 });
645 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
646 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
647 let data_race = if config.genmc_mode {
648 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
650 } else if config.data_race_detector {
651 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
652 } else {
653 GlobalDataRaceHandler::None
654 };
655 let page_size = if let Some(page_size) = config.page_size {
659 page_size
660 } else {
661 let target = &tcx.sess.target;
662 match target.arch.as_ref() {
663 "wasm32" | "wasm64" => 64 * 1024, "aarch64" => {
665 if target.options.vendor.as_ref() == "apple" {
666 16 * 1024
670 } else {
671 4 * 1024
672 }
673 }
674 _ => 4 * 1024,
675 }
676 };
677 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
679 let stack_size =
680 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
681 assert!(
682 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
683 "miri only supports up to {} CPUs, but {} were configured",
684 cpu_affinity::MAX_CPUS,
685 config.num_cpus
686 );
687 let threads = ThreadManager::new(config);
688 let mut thread_cpu_affinity = FxHashMap::default();
689 if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") {
690 thread_cpu_affinity
691 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
692 }
693 MiriMachine {
694 tcx,
695 borrow_tracker,
696 data_race,
697 alloc_addresses: RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr)),
698 env_vars: EnvVars::default(),
700 main_fn_ret_place: None,
701 argc: None,
702 argv: None,
703 cmd_line: None,
704 tls: TlsData::default(),
705 isolated_op: config.isolated_op,
706 validation: config.validation,
707 fds: shims::FdTable::init(config.mute_stdout_stderr),
708 epoll_interests: shims::EpollInterestTable::new(),
709 dirs: Default::default(),
710 layouts,
711 threads,
712 thread_cpu_affinity,
713 static_roots: Vec::new(),
714 profiler,
715 string_cache: Default::default(),
716 exported_symbols_cache: FxHashMap::default(),
717 backtrace_style: config.backtrace_style,
718 local_crates,
719 extern_statics: FxHashMap::default(),
720 rng: RefCell::new(rng),
721 #[cfg(target_os = "linux")]
722 allocator: if !config.native_lib.is_empty() {
723 Some(Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new())))
724 } else { None },
725 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
726 track_alloc_accesses: config.track_alloc_accesses,
727 check_alignment: config.check_alignment,
728 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
729 preemption_rate: config.preemption_rate,
730 report_progress: config.report_progress,
731 basic_block_count: 0,
732 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
733 #[cfg(unix)]
734 native_lib: config.native_lib.iter().map(|lib_file_path| {
735 let host_triple = rustc_session::config::host_tuple();
736 let target_triple = tcx.sess.opts.target_triple.tuple();
737 if host_triple != target_triple {
739 panic!(
740 "calling native C functions in linked .so file requires host and target to be the same: \
741 host={host_triple}, target={target_triple}",
742 );
743 }
744 (
748 unsafe {
749 libloading::Library::new(lib_file_path)
750 .expect("failed to read specified extern shared object file")
751 },
752 lib_file_path.clone(),
753 )
754 }).collect(),
755 #[cfg(not(unix))]
756 native_lib: config.native_lib.iter().map(|_| {
757 panic!("calling functions from native libraries via FFI is only supported on Unix")
758 }).collect(),
759 gc_interval: config.gc_interval,
760 since_gc: 0,
761 num_cpus: config.num_cpus,
762 page_size,
763 stack_addr,
764 stack_size,
765 collect_leak_backtraces: config.collect_leak_backtraces,
766 allocation_spans: RefCell::new(FxHashMap::default()),
767 const_cache: RefCell::new(FxHashMap::default()),
768 symbolic_alignment: RefCell::new(FxHashMap::default()),
769 union_data_ranges: FxHashMap::default(),
770 pthread_mutex_sanity: Cell::new(false),
771 pthread_rwlock_sanity: Cell::new(false),
772 pthread_condvar_sanity: Cell::new(false),
773 sb_extern_type_warned: Cell::new(false),
774 #[cfg(unix)]
775 native_call_mem_warned: Cell::new(false),
776 reject_in_isolation_warned: Default::default(),
777 int2ptr_warned: Default::default(),
778 mangle_internal_symbol_cache: Default::default(),
779 force_intrinsic_fallback: config.force_intrinsic_fallback,
780 float_nondet: config.float_nondet,
781 }
782 }
783
784 pub(crate) fn late_init(
785 ecx: &mut MiriInterpCx<'tcx>,
786 config: &MiriConfig,
787 on_main_stack_empty: StackEmptyCallback<'tcx>,
788 ) -> InterpResult<'tcx> {
789 EnvVars::init(ecx, config)?;
790 MiriMachine::init_extern_statics(ecx)?;
791 ThreadManager::init(ecx, on_main_stack_empty);
792 interp_ok(())
793 }
794
795 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
796 let ptr = ptr.into_pointer_or_addr().unwrap();
798 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
799 }
800
801 pub(crate) fn communicate(&self) -> bool {
802 self.isolated_op == IsolatedOp::Allow
803 }
804
805 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
807 let def_id = frame.instance.def_id();
808 def_id.is_local() || self.local_crates.contains(&def_id.krate)
809 }
810
811 pub(crate) fn handle_abnormal_termination(&mut self) {
813 drop(self.profiler.take());
818 }
819
820 pub(crate) fn page_align(&self) -> Align {
821 Align::from_bytes(self.page_size).unwrap()
822 }
823
824 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
825 self.allocation_spans
826 .borrow()
827 .get(&alloc_id)
828 .map(|(allocated, _deallocated)| allocated.data())
829 }
830
831 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
832 self.allocation_spans
833 .borrow()
834 .get(&alloc_id)
835 .and_then(|(_allocated, deallocated)| *deallocated)
836 .map(Span::data)
837 }
838
839 fn init_allocation(
840 ecx: &MiriInterpCx<'tcx>,
841 id: AllocId,
842 kind: MemoryKind,
843 size: Size,
844 align: Align,
845 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
846 if ecx.machine.tracked_alloc_ids.contains(&id) {
847 ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind));
848 }
849
850 let borrow_tracker = ecx
851 .machine
852 .borrow_tracker
853 .as_ref()
854 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
855
856 let data_race = match &ecx.machine.data_race {
857 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
858 GlobalDataRaceHandler::Vclocks(data_race) =>
859 AllocDataRaceHandler::Vclocks(
860 data_race::AllocState::new_allocation(
861 data_race,
862 &ecx.machine.threads,
863 size,
864 kind,
865 ecx.machine.current_span(),
866 ),
867 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
868 ),
869 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
870 AllocDataRaceHandler::Genmc
873 }
874 };
875
876 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
880 None
881 } else {
882 Some(ecx.generate_stacktrace())
883 };
884
885 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
886 ecx.machine
887 .allocation_spans
888 .borrow_mut()
889 .insert(id, (ecx.machine.current_span(), None));
890 }
891
892 interp_ok(AllocExtra { borrow_tracker, data_race, backtrace, sync: FxHashMap::default() })
893 }
894}
895
896impl VisitProvenance for MiriMachine<'_> {
897 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
898 #[rustfmt::skip]
899 let MiriMachine {
900 threads,
901 thread_cpu_affinity: _,
902 tls,
903 env_vars,
904 main_fn_ret_place,
905 argc,
906 argv,
907 cmd_line,
908 extern_statics,
909 dirs,
910 borrow_tracker,
911 data_race,
912 alloc_addresses,
913 fds,
914 epoll_interests:_,
915 tcx: _,
916 isolated_op: _,
917 validation: _,
918 monotonic_clock: _,
919 layouts: _,
920 static_roots: _,
921 profiler: _,
922 string_cache: _,
923 exported_symbols_cache: _,
924 backtrace_style: _,
925 local_crates: _,
926 rng: _,
927 #[cfg(target_os = "linux")]
928 allocator: _,
929 tracked_alloc_ids: _,
930 track_alloc_accesses: _,
931 check_alignment: _,
932 cmpxchg_weak_failure_rate: _,
933 preemption_rate: _,
934 report_progress: _,
935 basic_block_count: _,
936 native_lib: _,
937 gc_interval: _,
938 since_gc: _,
939 num_cpus: _,
940 page_size: _,
941 stack_addr: _,
942 stack_size: _,
943 collect_leak_backtraces: _,
944 allocation_spans: _,
945 const_cache: _,
946 symbolic_alignment: _,
947 union_data_ranges: _,
948 pthread_mutex_sanity: _,
949 pthread_rwlock_sanity: _,
950 pthread_condvar_sanity: _,
951 sb_extern_type_warned: _,
952 #[cfg(unix)]
953 native_call_mem_warned: _,
954 reject_in_isolation_warned: _,
955 int2ptr_warned: _,
956 mangle_internal_symbol_cache: _,
957 force_intrinsic_fallback: _,
958 float_nondet: _,
959 } = self;
960
961 threads.visit_provenance(visit);
962 tls.visit_provenance(visit);
963 env_vars.visit_provenance(visit);
964 dirs.visit_provenance(visit);
965 fds.visit_provenance(visit);
966 data_race.visit_provenance(visit);
967 borrow_tracker.visit_provenance(visit);
968 alloc_addresses.visit_provenance(visit);
969 main_fn_ret_place.visit_provenance(visit);
970 argc.visit_provenance(visit);
971 argv.visit_provenance(visit);
972 cmd_line.visit_provenance(visit);
973 for ptr in extern_statics.values() {
974 ptr.visit_provenance(visit);
975 }
976 }
977}
978
979pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
981
982pub trait MiriInterpCxExt<'tcx> {
984 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
985 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
986}
987impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
988 #[inline(always)]
989 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
990 self
991 }
992 #[inline(always)]
993 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
994 self
995 }
996}
997
998impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
1000 type MemoryKind = MiriMemoryKind;
1001 type ExtraFnVal = DynSym;
1002
1003 type FrameExtra = FrameExtra<'tcx>;
1004 type AllocExtra = AllocExtra<'tcx>;
1005
1006 type Provenance = Provenance;
1007 type ProvenanceExtra = ProvenanceExtra;
1008 type Bytes = MiriAllocBytes;
1009
1010 type MemoryMap =
1011 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
1012
1013 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
1014
1015 const PANIC_ON_ALLOC_FAIL: bool = false;
1016
1017 const TRACING_ENABLED: bool = cfg!(feature = "tracing");
1018
1019 #[inline(always)]
1020 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
1021 ecx.machine.check_alignment != AlignmentCheck::None
1022 }
1023
1024 #[inline(always)]
1025 fn alignment_check(
1026 ecx: &MiriInterpCx<'tcx>,
1027 alloc_id: AllocId,
1028 alloc_align: Align,
1029 alloc_kind: AllocKind,
1030 offset: Size,
1031 align: Align,
1032 ) -> Option<Misalignment> {
1033 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1034 return None;
1036 }
1037 if alloc_kind != AllocKind::LiveData {
1038 return None;
1040 }
1041 let (promised_offset, promised_align) = ecx
1043 .machine
1044 .symbolic_alignment
1045 .borrow()
1046 .get(&alloc_id)
1047 .copied()
1048 .unwrap_or((Size::ZERO, alloc_align));
1049 if promised_align < align {
1050 Some(Misalignment { has: promised_align, required: align })
1052 } else {
1053 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1055 if distance.is_multiple_of(align.bytes()) {
1057 None
1059 } else {
1060 let distance_pow2 = 1 << distance.trailing_zeros();
1062 Some(Misalignment {
1063 has: Align::from_bytes(distance_pow2).unwrap(),
1064 required: align,
1065 })
1066 }
1067 }
1068 }
1069
1070 #[inline(always)]
1071 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1072 ecx.machine.validation != ValidationMode::No
1073 }
1074 #[inline(always)]
1075 fn enforce_validity_recursively(
1076 ecx: &InterpCx<'tcx, Self>,
1077 _layout: TyAndLayout<'tcx>,
1078 ) -> bool {
1079 ecx.machine.validation == ValidationMode::Deep
1080 }
1081
1082 #[inline(always)]
1083 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1084 !ecx.tcx.sess.overflow_checks()
1085 }
1086
1087 fn check_fn_target_features(
1088 ecx: &MiriInterpCx<'tcx>,
1089 instance: ty::Instance<'tcx>,
1090 ) -> InterpResult<'tcx> {
1091 let attrs = ecx.tcx.codegen_fn_attrs(instance.def_id());
1092 if attrs
1093 .target_features
1094 .iter()
1095 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1096 {
1097 let unavailable = attrs
1098 .target_features
1099 .iter()
1100 .filter(|&feature| {
1101 !feature.implied && !ecx.tcx.sess.target_features.contains(&feature.name)
1102 })
1103 .fold(String::new(), |mut s, feature| {
1104 if !s.is_empty() {
1105 s.push_str(", ");
1106 }
1107 s.push_str(feature.name.as_str());
1108 s
1109 });
1110 let msg = format!(
1111 "calling a function that requires unavailable target features: {unavailable}"
1112 );
1113 if ecx.tcx.sess.target.is_like_wasm {
1116 throw_machine_stop!(TerminationInfo::Abort(msg));
1117 } else {
1118 throw_ub_format!("{msg}");
1119 }
1120 }
1121 interp_ok(())
1122 }
1123
1124 #[inline(always)]
1125 fn find_mir_or_eval_fn(
1126 ecx: &mut MiriInterpCx<'tcx>,
1127 instance: ty::Instance<'tcx>,
1128 abi: &FnAbi<'tcx, Ty<'tcx>>,
1129 args: &[FnArg<'tcx, Provenance>],
1130 dest: &PlaceTy<'tcx>,
1131 ret: Option<mir::BasicBlock>,
1132 unwind: mir::UnwindAction,
1133 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1134 if ecx.tcx.is_foreign_item(instance.def_id()) {
1136 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1144 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1145 }
1146
1147 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1149 }
1150
1151 #[inline(always)]
1152 fn call_extra_fn(
1153 ecx: &mut MiriInterpCx<'tcx>,
1154 fn_val: DynSym,
1155 abi: &FnAbi<'tcx, Ty<'tcx>>,
1156 args: &[FnArg<'tcx, Provenance>],
1157 dest: &PlaceTy<'tcx>,
1158 ret: Option<mir::BasicBlock>,
1159 unwind: mir::UnwindAction,
1160 ) -> InterpResult<'tcx> {
1161 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1163 }
1164
1165 #[inline(always)]
1166 fn call_intrinsic(
1167 ecx: &mut MiriInterpCx<'tcx>,
1168 instance: ty::Instance<'tcx>,
1169 args: &[OpTy<'tcx>],
1170 dest: &PlaceTy<'tcx>,
1171 ret: Option<mir::BasicBlock>,
1172 unwind: mir::UnwindAction,
1173 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1174 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1175 }
1176
1177 #[inline(always)]
1178 fn assert_panic(
1179 ecx: &mut MiriInterpCx<'tcx>,
1180 msg: &mir::AssertMessage<'tcx>,
1181 unwind: mir::UnwindAction,
1182 ) -> InterpResult<'tcx> {
1183 ecx.assert_panic(msg, unwind)
1184 }
1185
1186 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1187 ecx.start_panic_nounwind(msg)
1188 }
1189
1190 fn unwind_terminate(
1191 ecx: &mut InterpCx<'tcx, Self>,
1192 reason: mir::UnwindTerminateReason,
1193 ) -> InterpResult<'tcx> {
1194 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1196 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1197 ecx.call_function(
1198 panic,
1199 ExternAbi::Rust,
1200 &[],
1201 None,
1202 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1203 )?;
1204 interp_ok(())
1205 }
1206
1207 #[inline(always)]
1208 fn binary_ptr_op(
1209 ecx: &MiriInterpCx<'tcx>,
1210 bin_op: mir::BinOp,
1211 left: &ImmTy<'tcx>,
1212 right: &ImmTy<'tcx>,
1213 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1214 ecx.binary_ptr_op(bin_op, left, right)
1215 }
1216
1217 #[inline(always)]
1218 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1219 ecx: &InterpCx<'tcx, Self>,
1220 inputs: &[F1],
1221 ) -> F2 {
1222 ecx.generate_nan(inputs)
1223 }
1224
1225 #[inline(always)]
1226 fn apply_float_nondet(
1227 ecx: &mut InterpCx<'tcx, Self>,
1228 val: ImmTy<'tcx>,
1229 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1230 crate::math::apply_random_float_error_to_imm(ecx, val, 2 )
1231 }
1232
1233 #[inline(always)]
1234 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1235 ecx.equal_float_min_max(a, b)
1236 }
1237
1238 #[inline(always)]
1239 fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1240 interp_ok(ecx.tcx.sess.ub_checks())
1241 }
1242
1243 #[inline(always)]
1244 fn contract_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1245 interp_ok(ecx.tcx.sess.contract_checks())
1246 }
1247
1248 #[inline(always)]
1249 fn thread_local_static_pointer(
1250 ecx: &mut MiriInterpCx<'tcx>,
1251 def_id: DefId,
1252 ) -> InterpResult<'tcx, StrictPointer> {
1253 ecx.get_or_create_thread_local_alloc(def_id)
1254 }
1255
1256 fn extern_static_pointer(
1257 ecx: &MiriInterpCx<'tcx>,
1258 def_id: DefId,
1259 ) -> InterpResult<'tcx, StrictPointer> {
1260 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1261 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1262 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1266 panic!("extern_statics cannot contain wildcards")
1267 };
1268 let info = ecx.get_alloc_info(alloc_id);
1269 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1270 let extern_decl_layout =
1271 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1272 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1273 throw_unsup_format!(
1274 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1275 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1276 but Miri emulates it via an extern static shim \
1277 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1278 name = ecx.tcx.def_path_str(def_id),
1279 krate = ecx.tcx.crate_name(def_id.krate),
1280 decl_size = extern_decl_layout.size.bytes(),
1281 decl_align = extern_decl_layout.align.abi.bytes(),
1282 shim_size = info.size.bytes(),
1283 shim_align = info.align.bytes(),
1284 )
1285 }
1286 interp_ok(ptr)
1287 } else {
1288 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1289 }
1290 }
1291
1292 fn init_local_allocation(
1293 ecx: &MiriInterpCx<'tcx>,
1294 id: AllocId,
1295 kind: MemoryKind,
1296 size: Size,
1297 align: Align,
1298 ) -> InterpResult<'tcx, Self::AllocExtra> {
1299 assert!(kind != MiriMemoryKind::Global.into());
1300 MiriMachine::init_allocation(ecx, id, kind, size, align)
1301 }
1302
1303 fn adjust_alloc_root_pointer(
1304 ecx: &MiriInterpCx<'tcx>,
1305 ptr: interpret::Pointer<CtfeProvenance>,
1306 kind: Option<MemoryKind>,
1307 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1308 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1309 let alloc_id = ptr.provenance.alloc_id();
1310 if cfg!(debug_assertions) {
1311 match ecx.tcx.try_get_global_alloc(alloc_id) {
1313 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1314 panic!("adjust_alloc_root_pointer called on thread-local static")
1315 }
1316 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1317 panic!("adjust_alloc_root_pointer called on extern static")
1318 }
1319 _ => {}
1320 }
1321 }
1322 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1324 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1325 } else {
1326 BorTag::default()
1328 };
1329 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1330 }
1331
1332 #[inline(always)]
1334 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1335 ecx.ptr_from_addr_cast(addr)
1336 }
1337
1338 #[inline(always)]
1342 fn expose_provenance(
1343 ecx: &InterpCx<'tcx, Self>,
1344 provenance: Self::Provenance,
1345 ) -> InterpResult<'tcx> {
1346 ecx.expose_provenance(provenance)
1347 }
1348
1349 fn ptr_get_alloc(
1361 ecx: &MiriInterpCx<'tcx>,
1362 ptr: StrictPointer,
1363 size: i64,
1364 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1365 let rel = ecx.ptr_get_alloc(ptr, size);
1366
1367 rel.map(|(alloc_id, size)| {
1368 let tag = match ptr.provenance {
1369 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1370 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1371 };
1372 (alloc_id, size, tag)
1373 })
1374 }
1375
1376 fn adjust_global_allocation<'b>(
1385 ecx: &InterpCx<'tcx, Self>,
1386 id: AllocId,
1387 alloc: &'b Allocation,
1388 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1389 {
1390 let alloc = alloc.adjust_from_tcx(
1391 &ecx.tcx,
1392 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1393 |ptr| ecx.global_root_pointer(ptr),
1394 )?;
1395 let kind = MiriMemoryKind::Global.into();
1396 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1397 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1398 }
1399
1400 #[inline(always)]
1401 fn before_memory_read(
1402 _tcx: TyCtxtAt<'tcx>,
1403 machine: &Self,
1404 alloc_extra: &AllocExtra<'tcx>,
1405 ptr: Pointer,
1406 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1407 range: AllocRange,
1408 ) -> InterpResult<'tcx> {
1409 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1410 machine
1411 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Read));
1412 }
1413 match &machine.data_race {
1415 GlobalDataRaceHandler::None => {}
1416 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1417 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1418 GlobalDataRaceHandler::Vclocks(_data_race) => {
1419 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) = &alloc_extra.data_race
1420 else {
1421 unreachable!();
1422 };
1423 data_race.read(alloc_id, range, NaReadType::Read, None, machine)?;
1424 if let Some(weak_memory) = weak_memory {
1425 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1426 }
1427 }
1428 }
1429 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1430 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1431 }
1432 interp_ok(())
1433 }
1434
1435 #[inline(always)]
1436 fn before_memory_write(
1437 _tcx: TyCtxtAt<'tcx>,
1438 machine: &mut Self,
1439 alloc_extra: &mut AllocExtra<'tcx>,
1440 ptr: Pointer,
1441 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1442 range: AllocRange,
1443 ) -> InterpResult<'tcx> {
1444 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1445 machine
1446 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Write));
1447 }
1448 match &machine.data_race {
1449 GlobalDataRaceHandler::None => {}
1450 GlobalDataRaceHandler::Genmc(genmc_ctx) => {
1451 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?;
1452 }
1453 GlobalDataRaceHandler::Vclocks(_global_state) => {
1454 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1455 &mut alloc_extra.data_race
1456 else {
1457 unreachable!()
1458 };
1459 data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?;
1460 if let Some(weak_memory) = weak_memory {
1461 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1462 }
1463 }
1464 }
1465 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1466 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1467 }
1468 interp_ok(())
1469 }
1470
1471 #[inline(always)]
1472 fn before_memory_deallocation(
1473 _tcx: TyCtxtAt<'tcx>,
1474 machine: &mut Self,
1475 alloc_extra: &mut AllocExtra<'tcx>,
1476 ptr: Pointer,
1477 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1478 size: Size,
1479 align: Align,
1480 kind: MemoryKind,
1481 ) -> InterpResult<'tcx> {
1482 if machine.tracked_alloc_ids.contains(&alloc_id) {
1483 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1484 }
1485 match &machine.data_race {
1486 GlobalDataRaceHandler::None => {}
1487 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1488 genmc_ctx.handle_dealloc(machine, ptr.addr(), size, align, kind)?,
1489 GlobalDataRaceHandler::Vclocks(_global_state) => {
1490 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1491 data_race.write(
1492 alloc_id,
1493 alloc_range(Size::ZERO, size),
1494 NaWriteType::Deallocate,
1495 None,
1496 machine,
1497 )?;
1498 }
1499 }
1500 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1501 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1502 }
1503 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1504 {
1505 *deallocated_at = Some(machine.current_span());
1506 }
1507 machine.free_alloc_id(alloc_id, size, align, kind);
1508 interp_ok(())
1509 }
1510
1511 #[inline(always)]
1512 fn retag_ptr_value(
1513 ecx: &mut InterpCx<'tcx, Self>,
1514 kind: mir::RetagKind,
1515 val: &ImmTy<'tcx>,
1516 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1517 if ecx.machine.borrow_tracker.is_some() {
1518 ecx.retag_ptr_value(kind, val)
1519 } else {
1520 interp_ok(val.clone())
1521 }
1522 }
1523
1524 #[inline(always)]
1525 fn retag_place_contents(
1526 ecx: &mut InterpCx<'tcx, Self>,
1527 kind: mir::RetagKind,
1528 place: &PlaceTy<'tcx>,
1529 ) -> InterpResult<'tcx> {
1530 if ecx.machine.borrow_tracker.is_some() {
1531 ecx.retag_place_contents(kind, place)?;
1532 }
1533 interp_ok(())
1534 }
1535
1536 fn protect_in_place_function_argument(
1537 ecx: &mut InterpCx<'tcx, Self>,
1538 place: &MPlaceTy<'tcx>,
1539 ) -> InterpResult<'tcx> {
1540 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1543 ecx.protect_place(place)?
1544 } else {
1545 place.clone()
1547 };
1548 ecx.write_uninit(&protected_place)?;
1553 interp_ok(())
1555 }
1556
1557 #[inline(always)]
1558 fn init_frame(
1559 ecx: &mut InterpCx<'tcx, Self>,
1560 frame: Frame<'tcx, Provenance>,
1561 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1562 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1564 let fn_name = frame.instance().to_string();
1565 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1566 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1567
1568 Some(profiler.start_recording_interval_event_detached(
1569 *name,
1570 measureme::EventId::from_label(*name),
1571 ecx.active_thread().to_u32(),
1572 ))
1573 } else {
1574 None
1575 };
1576
1577 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1578
1579 let extra = FrameExtra {
1580 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1581 catch_unwind: None,
1582 timing,
1583 is_user_relevant: ecx.machine.is_user_relevant(&frame),
1584 salt: ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL),
1585 data_race: ecx
1586 .machine
1587 .data_race
1588 .as_vclocks_ref()
1589 .map(|_| data_race::FrameState::default()),
1590 };
1591
1592 interp_ok(frame.with_extra(extra))
1593 }
1594
1595 fn stack<'a>(
1596 ecx: &'a InterpCx<'tcx, Self>,
1597 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1598 ecx.active_thread_stack()
1599 }
1600
1601 fn stack_mut<'a>(
1602 ecx: &'a mut InterpCx<'tcx, Self>,
1603 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1604 ecx.active_thread_stack_mut()
1605 }
1606
1607 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1608 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1610 if let Some(report_progress) = ecx.machine.report_progress {
1612 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1613 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1614 block_count: ecx.machine.basic_block_count,
1615 });
1616 }
1617 }
1618
1619 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1624 ecx.machine.since_gc = 0;
1625 ecx.run_provenance_gc();
1626 }
1627
1628 ecx.maybe_preempt_active_thread();
1631
1632 ecx.machine.monotonic_clock.tick();
1634
1635 interp_ok(())
1636 }
1637
1638 #[inline(always)]
1639 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1640 if ecx.frame().extra.is_user_relevant {
1641 let stack_len = ecx.active_thread_stack().len();
1644 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1645 }
1646 interp_ok(())
1647 }
1648
1649 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1650 let frame = ecx.frame();
1651 if ecx.machine.borrow_tracker.is_some() {
1654 ecx.on_stack_pop(frame)?;
1655 }
1656 if frame.extra.is_user_relevant {
1657 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1663 }
1664 info!("Leaving {}", ecx.frame().instance());
1668 interp_ok(())
1669 }
1670
1671 #[inline(always)]
1672 fn after_stack_pop(
1673 ecx: &mut InterpCx<'tcx, Self>,
1674 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1675 unwinding: bool,
1676 ) -> InterpResult<'tcx, ReturnAction> {
1677 let res = {
1678 let mut frame = frame;
1680 let timing = frame.extra.timing.take();
1681 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1682 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1683 profiler.finish_recording_interval_event(timing.unwrap());
1684 }
1685 res
1686 };
1687 if !ecx.active_thread_stack().is_empty() {
1690 info!("Continuing in {}", ecx.frame().instance());
1691 }
1692 res
1693 }
1694
1695 fn after_local_read(
1696 ecx: &InterpCx<'tcx, Self>,
1697 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1698 local: mir::Local,
1699 ) -> InterpResult<'tcx> {
1700 if let Some(data_race) = &frame.extra.data_race {
1701 data_race.local_read(local, &ecx.machine);
1702 }
1703 interp_ok(())
1704 }
1705
1706 fn after_local_write(
1707 ecx: &mut InterpCx<'tcx, Self>,
1708 local: mir::Local,
1709 storage_live: bool,
1710 ) -> InterpResult<'tcx> {
1711 if let Some(data_race) = &ecx.frame().extra.data_race {
1712 data_race.local_write(local, storage_live, &ecx.machine);
1713 }
1714 interp_ok(())
1715 }
1716
1717 fn after_local_moved_to_memory(
1718 ecx: &mut InterpCx<'tcx, Self>,
1719 local: mir::Local,
1720 mplace: &MPlaceTy<'tcx>,
1721 ) -> InterpResult<'tcx> {
1722 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1723 panic!("after_local_allocated should only be called on fresh allocations");
1724 };
1725 let local_decl = &ecx.frame().body().local_decls[local];
1727 let span = local_decl.source_info.span;
1728 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1729 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1731 if let Some(data_race) =
1732 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1733 {
1734 data_race.local_moved_to_memory(
1735 local,
1736 alloc_info.data_race.as_vclocks_mut().unwrap(),
1737 machine,
1738 );
1739 }
1740 interp_ok(())
1741 }
1742
1743 fn eval_mir_constant<F>(
1744 ecx: &InterpCx<'tcx, Self>,
1745 val: mir::Const<'tcx>,
1746 span: Span,
1747 layout: Option<TyAndLayout<'tcx>>,
1748 eval: F,
1749 ) -> InterpResult<'tcx, OpTy<'tcx>>
1750 where
1751 F: Fn(
1752 &InterpCx<'tcx, Self>,
1753 mir::Const<'tcx>,
1754 Span,
1755 Option<TyAndLayout<'tcx>>,
1756 ) -> InterpResult<'tcx, OpTy<'tcx>>,
1757 {
1758 let frame = ecx.active_thread_stack().last().unwrap();
1759 let mut cache = ecx.machine.const_cache.borrow_mut();
1760 match cache.entry((val, frame.extra.salt)) {
1761 Entry::Vacant(ve) => {
1762 let op = eval(ecx, val, span, layout)?;
1763 ve.insert(op.clone());
1764 interp_ok(op)
1765 }
1766 Entry::Occupied(oe) => interp_ok(oe.get().clone()),
1767 }
1768 }
1769
1770 fn get_global_alloc_salt(
1771 ecx: &InterpCx<'tcx, Self>,
1772 instance: Option<ty::Instance<'tcx>>,
1773 ) -> usize {
1774 let unique = if let Some(instance) = instance {
1775 let is_generic = instance
1788 .args
1789 .into_iter()
1790 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1791 let can_be_inlined = matches!(
1792 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1793 InliningThreshold::Always
1794 ) || !matches!(
1795 ecx.tcx.codegen_fn_attrs(instance.def_id()).inline,
1796 InlineAttr::Never
1797 );
1798 !is_generic && !can_be_inlined
1799 } else {
1800 false
1802 };
1803 if unique {
1805 CTFE_ALLOC_SALT
1806 } else {
1807 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1808 }
1809 }
1810
1811 fn cached_union_data_range<'e>(
1812 ecx: &'e mut InterpCx<'tcx, Self>,
1813 ty: Ty<'tcx>,
1814 compute_range: impl FnOnce() -> RangeSet,
1815 ) -> Cow<'e, RangeSet> {
1816 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1817 }
1818
1819 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1820 use crate::alloc::MiriAllocParams;
1821
1822 #[cfg(target_os = "linux")]
1823 match &self.allocator {
1824 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1825 None => MiriAllocParams::Global,
1826 }
1827 #[cfg(not(target_os = "linux"))]
1828 MiriAllocParams::Global
1829 }
1830}
1831
1832pub trait MachineCallback<'tcx, T>: VisitProvenance {
1834 fn call(
1836 self: Box<Self>,
1837 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1838 arg: T,
1839 ) -> InterpResult<'tcx>;
1840}
1841
1842pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1844
1845#[macro_export]
1862macro_rules! callback {
1863 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1864 { $($name:ident: $type:ty),* $(,)? }
1865 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1866 struct Callback<$tcx, $($lft),*> {
1867 $($name: $type,)*
1868 _phantom: std::marker::PhantomData<&$tcx ()>,
1869 }
1870
1871 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1872 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1873 $(
1874 self.$name.visit_provenance(_visit);
1875 )*
1876 }
1877 }
1878
1879 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
1880 fn call(
1881 self: Box<Self>,
1882 $this: &mut MiriInterpCx<$tcx>,
1883 $arg: $arg_ty
1884 ) -> InterpResult<$tcx> {
1885 #[allow(unused_variables)]
1886 let Callback { $($name,)* _phantom } = *self;
1887 $body
1888 }
1889 }
1890
1891 Box::new(Callback {
1892 $($name,)*
1893 _phantom: std::marker::PhantomData
1894 })
1895 }};
1896}