1use std::any::Any;
5use std::borrow::Cow;
6use std::cell::{Cell, RefCell};
7use std::path::Path;
8use std::rc::Rc;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_ast::expand::allocator::{self, SpecialAllocatorMethod};
16use rustc_data_structures::either::Either;
17use rustc_data_structures::fx::{FxHashMap, FxHashSet};
18#[allow(unused)]
19use rustc_data_structures::static_assert_size;
20use rustc_hir::attrs::InlineAttr;
21use rustc_log::tracing;
22use rustc_middle::middle::codegen_fn_attrs::TargetFeatureKind;
23use rustc_middle::mir;
24use rustc_middle::query::TyCtxtAt;
25use rustc_middle::ty::layout::{
26 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
27};
28use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
29use rustc_session::config::InliningThreshold;
30use rustc_span::def_id::{CrateNum, DefId};
31use rustc_span::{Span, SpanData, Symbol};
32use rustc_symbol_mangling::mangle_internal_symbol;
33use rustc_target::callconv::FnAbi;
34use rustc_target::spec::Arch;
35
36use crate::alloc_addresses::EvalContextExt;
37use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
38use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
39use crate::concurrency::{
40 AllocDataRaceHandler, GenmcCtx, GenmcEvalContextExt as _, GlobalDataRaceHandler, weak_memory,
41};
42use crate::*;
43
44pub const SIGRTMIN: i32 = 34;
48
49pub const SIGRTMAX: i32 = 42;
53
54const ADDRS_PER_ANON_GLOBAL: usize = 32;
58
59#[derive(Copy, Clone, Debug, PartialEq)]
60pub enum AlignmentCheck {
61 None,
63 Symbolic,
65 Int,
67}
68
69#[derive(Copy, Clone, Debug, PartialEq)]
70pub enum RejectOpWith {
71 Abort,
73
74 NoWarning,
78
79 Warning,
81
82 WarningWithoutBacktrace,
84}
85
86#[derive(Copy, Clone, Debug, PartialEq)]
87pub enum IsolatedOp {
88 Reject(RejectOpWith),
93
94 Allow,
96}
97
98#[derive(Debug, Copy, Clone, PartialEq, Eq)]
99pub enum BacktraceStyle {
100 Short,
102 Full,
104 Off,
106}
107
108#[derive(Debug, Copy, Clone, PartialEq, Eq)]
109pub enum ValidationMode {
110 No,
112 Shallow,
114 Deep,
116}
117
118#[derive(Debug, Copy, Clone, PartialEq, Eq)]
119pub enum FloatRoundingErrorMode {
120 Random,
122 None,
124 Max,
126}
127
128pub struct FrameExtra<'tcx> {
130 pub borrow_tracker: Option<borrow_tracker::FrameState>,
132
133 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
137
138 pub timing: Option<measureme::DetachedTiming>,
142
143 pub user_relevance: u8,
147
148 pub data_race: Option<data_race::FrameState>,
150}
151
152impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
153 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
154 let FrameExtra { borrow_tracker, catch_unwind, timing: _, user_relevance, data_race } =
156 self;
157 f.debug_struct("FrameData")
158 .field("borrow_tracker", borrow_tracker)
159 .field("catch_unwind", catch_unwind)
160 .field("user_relevance", user_relevance)
161 .field("data_race", data_race)
162 .finish()
163 }
164}
165
166impl VisitProvenance for FrameExtra<'_> {
167 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
168 let FrameExtra { catch_unwind, borrow_tracker, timing: _, user_relevance: _, data_race: _ } =
169 self;
170
171 catch_unwind.visit_provenance(visit);
172 borrow_tracker.visit_provenance(visit);
173 }
174}
175
176#[derive(Debug, Copy, Clone, PartialEq, Eq)]
178pub enum MiriMemoryKind {
179 Rust,
181 Miri,
183 C,
185 WinHeap,
187 WinLocal,
189 Machine,
192 Runtime,
195 Global,
198 ExternStatic,
201 Tls,
204 Mmap,
206}
207
208impl From<MiriMemoryKind> for MemoryKind {
209 #[inline(always)]
210 fn from(kind: MiriMemoryKind) -> MemoryKind {
211 MemoryKind::Machine(kind)
212 }
213}
214
215impl MayLeak for MiriMemoryKind {
216 #[inline(always)]
217 fn may_leak(self) -> bool {
218 use self::MiriMemoryKind::*;
219 match self {
220 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
221 Machine | Global | ExternStatic | Tls | Mmap => true,
222 }
223 }
224}
225
226impl MiriMemoryKind {
227 fn should_save_allocation_span(self) -> bool {
229 use self::MiriMemoryKind::*;
230 match self {
231 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
233 Machine | Global | ExternStatic | Tls | Runtime => false,
235 }
236 }
237}
238
239impl fmt::Display for MiriMemoryKind {
240 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
241 use self::MiriMemoryKind::*;
242 match self {
243 Rust => write!(f, "Rust heap"),
244 Miri => write!(f, "Miri bare-metal heap"),
245 C => write!(f, "C heap"),
246 WinHeap => write!(f, "Windows heap"),
247 WinLocal => write!(f, "Windows local memory"),
248 Machine => write!(f, "machine-managed memory"),
249 Runtime => write!(f, "language runtime memory"),
250 Global => write!(f, "global (static or const)"),
251 ExternStatic => write!(f, "extern static"),
252 Tls => write!(f, "thread-local static"),
253 Mmap => write!(f, "mmap"),
254 }
255 }
256}
257
258pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
259
260#[derive(Clone, Copy, PartialEq, Eq, Hash)]
266pub enum Provenance {
267 Concrete {
270 alloc_id: AllocId,
271 tag: BorTag,
273 },
274 Wildcard,
291}
292
293#[derive(Copy, Clone, PartialEq)]
295pub enum ProvenanceExtra {
296 Concrete(BorTag),
297 Wildcard,
298}
299
300#[cfg(target_pointer_width = "64")]
301static_assert_size!(StrictPointer, 24);
302#[cfg(target_pointer_width = "64")]
306static_assert_size!(Scalar, 32);
307
308impl fmt::Debug for Provenance {
309 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
310 match self {
311 Provenance::Concrete { alloc_id, tag } => {
312 if f.alternate() {
314 write!(f, "[{alloc_id:#?}]")?;
315 } else {
316 write!(f, "[{alloc_id:?}]")?;
317 }
318 write!(f, "{tag:?}")?;
320 }
321 Provenance::Wildcard => {
322 write!(f, "[wildcard]")?;
323 }
324 }
325 Ok(())
326 }
327}
328
329impl interpret::Provenance for Provenance {
330 const OFFSET_IS_ADDR: bool = true;
332
333 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
335
336 fn get_alloc_id(self) -> Option<AllocId> {
337 match self {
338 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
339 Provenance::Wildcard => None,
340 }
341 }
342
343 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
344 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
346 if f.alternate() {
347 write!(f, "{prov:#?}")?;
348 } else {
349 write!(f, "{prov:?}")?;
350 }
351 Ok(())
352 }
353
354 fn join(left: Self, right: Self) -> Option<Self> {
355 match (left, right) {
356 (
358 Provenance::Concrete { alloc_id: left_alloc, tag: left_tag },
359 Provenance::Concrete { alloc_id: right_alloc, tag: right_tag },
360 ) if left_alloc == right_alloc && left_tag == right_tag => Some(left),
361 (Provenance::Wildcard, o) | (o, Provenance::Wildcard) => Some(o),
364 _ => None,
366 }
367 }
368}
369
370impl fmt::Debug for ProvenanceExtra {
371 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
372 match self {
373 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
374 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
375 }
376 }
377}
378
379impl ProvenanceExtra {
380 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
381 match self {
382 ProvenanceExtra::Concrete(pid) => f(pid),
383 ProvenanceExtra::Wildcard => None,
384 }
385 }
386}
387
388#[derive(Debug)]
390pub struct AllocExtra<'tcx> {
391 pub borrow_tracker: Option<borrow_tracker::AllocState>,
393 pub data_race: AllocDataRaceHandler,
397 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
402 pub sync: FxHashMap<Size, Box<dyn Any>>,
407}
408
409impl<'tcx> Clone for AllocExtra<'tcx> {
412 fn clone(&self) -> Self {
413 panic!("our allocations should never be cloned");
414 }
415}
416
417impl VisitProvenance for AllocExtra<'_> {
418 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
419 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync: _ } = self;
420
421 borrow_tracker.visit_provenance(visit);
422 data_race.visit_provenance(visit);
423 }
424}
425
426pub struct PrimitiveLayouts<'tcx> {
428 pub unit: TyAndLayout<'tcx>,
429 pub i8: TyAndLayout<'tcx>,
430 pub i16: TyAndLayout<'tcx>,
431 pub i32: TyAndLayout<'tcx>,
432 pub i64: TyAndLayout<'tcx>,
433 pub i128: TyAndLayout<'tcx>,
434 pub isize: TyAndLayout<'tcx>,
435 pub u8: TyAndLayout<'tcx>,
436 pub u16: TyAndLayout<'tcx>,
437 pub u32: TyAndLayout<'tcx>,
438 pub u64: TyAndLayout<'tcx>,
439 pub u128: TyAndLayout<'tcx>,
440 pub usize: TyAndLayout<'tcx>,
441 pub bool: TyAndLayout<'tcx>,
442 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
445
446impl<'tcx> PrimitiveLayouts<'tcx> {
447 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
448 let tcx = layout_cx.tcx();
449 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
450 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
451 Ok(Self {
452 unit: layout_cx.layout_of(tcx.types.unit)?,
453 i8: layout_cx.layout_of(tcx.types.i8)?,
454 i16: layout_cx.layout_of(tcx.types.i16)?,
455 i32: layout_cx.layout_of(tcx.types.i32)?,
456 i64: layout_cx.layout_of(tcx.types.i64)?,
457 i128: layout_cx.layout_of(tcx.types.i128)?,
458 isize: layout_cx.layout_of(tcx.types.isize)?,
459 u8: layout_cx.layout_of(tcx.types.u8)?,
460 u16: layout_cx.layout_of(tcx.types.u16)?,
461 u32: layout_cx.layout_of(tcx.types.u32)?,
462 u64: layout_cx.layout_of(tcx.types.u64)?,
463 u128: layout_cx.layout_of(tcx.types.u128)?,
464 usize: layout_cx.layout_of(tcx.types.usize)?,
465 bool: layout_cx.layout_of(tcx.types.bool)?,
466 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
467 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
468 })
469 }
470
471 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
472 match size.bits() {
473 8 => Some(self.u8),
474 16 => Some(self.u16),
475 32 => Some(self.u32),
476 64 => Some(self.u64),
477 128 => Some(self.u128),
478 _ => None,
479 }
480 }
481
482 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
483 match size.bits() {
484 8 => Some(self.i8),
485 16 => Some(self.i16),
486 32 => Some(self.i32),
487 64 => Some(self.i64),
488 128 => Some(self.i128),
489 _ => None,
490 }
491 }
492}
493
494pub struct MiriMachine<'tcx> {
499 pub tcx: TyCtxt<'tcx>,
501
502 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
504
505 pub data_race: GlobalDataRaceHandler,
511
512 pub alloc_addresses: alloc_addresses::GlobalState,
514
515 pub(crate) env_vars: EnvVars<'tcx>,
517
518 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
520
521 pub(crate) argc: Option<Pointer>,
525 pub(crate) argv: Option<Pointer>,
526 pub(crate) cmd_line: Option<Pointer>,
527
528 pub(crate) tls: TlsData<'tcx>,
530
531 pub(crate) isolated_op: IsolatedOp,
535
536 pub(crate) validation: ValidationMode,
538
539 pub(crate) fds: shims::FdTable,
541 pub(crate) dirs: shims::DirTable,
543
544 pub(crate) epoll_interests: shims::EpollInterestTable,
546
547 pub(crate) monotonic_clock: MonotonicClock,
549
550 pub(crate) threads: ThreadManager<'tcx>,
552
553 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
557
558 pub(crate) layouts: PrimitiveLayouts<'tcx>,
560
561 pub(crate) static_roots: Vec<AllocId>,
563
564 profiler: Option<measureme::Profiler>,
567 string_cache: FxHashMap<String, measureme::StringId>,
570
571 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
574
575 pub(crate) backtrace_style: BacktraceStyle,
577
578 pub(crate) user_relevant_crates: Vec<CrateNum>,
580
581 extern_statics: FxHashMap<Symbol, StrictPointer>,
583
584 pub(crate) rng: RefCell<StdRng>,
587
588 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
590
591 pub(crate) tracked_alloc_ids: FxHashSet<AllocId>,
594 track_alloc_accesses: bool,
596
597 pub(crate) check_alignment: AlignmentCheck,
599
600 pub(crate) cmpxchg_weak_failure_rate: f64,
602
603 pub(crate) preemption_rate: f64,
605
606 pub(crate) report_progress: Option<u32>,
608 pub(crate) basic_block_count: u64,
610
611 #[cfg(all(unix, feature = "native-lib"))]
613 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
614 #[cfg(not(all(unix, feature = "native-lib")))]
615 pub native_lib: Vec<!>,
616
617 pub(crate) gc_interval: u32,
619 pub(crate) since_gc: u32,
621
622 pub(crate) num_cpus: u32,
624
625 pub(crate) page_size: u64,
627 pub(crate) stack_addr: u64,
628 pub(crate) stack_size: u64,
629
630 pub(crate) collect_leak_backtraces: bool,
632
633 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
636
637 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
644
645 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
647
648 pub(crate) pthread_mutex_sanity: Cell<bool>,
650 pub(crate) pthread_rwlock_sanity: Cell<bool>,
651 pub(crate) pthread_condvar_sanity: Cell<bool>,
652
653 pub(crate) allocator_shim_symbols: FxHashMap<Symbol, Either<Symbol, SpecialAllocatorMethod>>,
657 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
659
660 pub force_intrinsic_fallback: bool,
662
663 pub float_nondet: bool,
665 pub float_rounding_error: FloatRoundingErrorMode,
667
668 pub short_fd_operations: bool,
670}
671
672impl<'tcx> MiriMachine<'tcx> {
673 pub(crate) fn new(
677 config: &MiriConfig,
678 layout_cx: LayoutCx<'tcx>,
679 genmc_ctx: Option<Rc<GenmcCtx>>,
680 ) -> Self {
681 let tcx = layout_cx.tcx();
682 let user_relevant_crates = Self::get_user_relevant_crates(tcx, config);
683 let layouts =
684 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
685 let profiler = config.measureme_out.as_ref().map(|out| {
686 let crate_name =
687 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
688 let pid = process::id();
689 let filename = format!("{crate_name}-{pid:07}");
694 let path = Path::new(out).join(filename);
695 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
696 });
697 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
698 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
699 let data_race = if config.genmc_config.is_some() {
700 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
702 } else if config.data_race_detector {
703 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
704 } else {
705 GlobalDataRaceHandler::None
706 };
707 let page_size = if let Some(page_size) = config.page_size {
711 page_size
712 } else {
713 let target = &tcx.sess.target;
714 match target.arch {
715 Arch::Wasm32 | Arch::Wasm64 => 64 * 1024, Arch::AArch64 => {
717 if target.options.vendor.as_ref() == "apple" {
718 16 * 1024
722 } else {
723 4 * 1024
724 }
725 }
726 _ => 4 * 1024,
727 }
728 };
729 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
731 let stack_size =
732 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
733 assert!(
734 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
735 "miri only supports up to {} CPUs, but {} were configured",
736 cpu_affinity::MAX_CPUS,
737 config.num_cpus
738 );
739 let threads = ThreadManager::new(config);
740 let mut thread_cpu_affinity = FxHashMap::default();
741 if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") {
742 thread_cpu_affinity
743 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
744 }
745 let alloc_addresses =
746 RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr, tcx));
747 MiriMachine {
748 tcx,
749 borrow_tracker,
750 data_race,
751 alloc_addresses,
752 env_vars: EnvVars::default(),
754 main_fn_ret_place: None,
755 argc: None,
756 argv: None,
757 cmd_line: None,
758 tls: TlsData::default(),
759 isolated_op: config.isolated_op,
760 validation: config.validation,
761 fds: shims::FdTable::init(config.mute_stdout_stderr),
762 epoll_interests: shims::EpollInterestTable::new(),
763 dirs: Default::default(),
764 layouts,
765 threads,
766 thread_cpu_affinity,
767 static_roots: Vec::new(),
768 profiler,
769 string_cache: Default::default(),
770 exported_symbols_cache: FxHashMap::default(),
771 backtrace_style: config.backtrace_style,
772 user_relevant_crates,
773 extern_statics: FxHashMap::default(),
774 rng: RefCell::new(rng),
775 allocator: (!config.native_lib.is_empty())
776 .then(|| Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new()))),
777 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
778 track_alloc_accesses: config.track_alloc_accesses,
779 check_alignment: config.check_alignment,
780 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
781 preemption_rate: config.preemption_rate,
782 report_progress: config.report_progress,
783 basic_block_count: 0,
784 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
785 #[cfg(all(unix, feature = "native-lib"))]
786 native_lib: config.native_lib.iter().map(|lib_file_path| {
787 let host_triple = rustc_session::config::host_tuple();
788 let target_triple = tcx.sess.opts.target_triple.tuple();
789 if host_triple != target_triple {
791 panic!(
792 "calling native C functions in linked .so file requires host and target to be the same: \
793 host={host_triple}, target={target_triple}",
794 );
795 }
796 (
800 unsafe {
801 libloading::Library::new(lib_file_path)
802 .expect("failed to read specified extern shared object file")
803 },
804 lib_file_path.clone(),
805 )
806 }).collect(),
807 #[cfg(not(all(unix, feature = "native-lib")))]
808 native_lib: config.native_lib.iter().map(|_| {
809 panic!("calling functions from native libraries via FFI is not supported in this build of Miri")
810 }).collect(),
811 gc_interval: config.gc_interval,
812 since_gc: 0,
813 num_cpus: config.num_cpus,
814 page_size,
815 stack_addr,
816 stack_size,
817 collect_leak_backtraces: config.collect_leak_backtraces,
818 allocation_spans: RefCell::new(FxHashMap::default()),
819 symbolic_alignment: RefCell::new(FxHashMap::default()),
820 union_data_ranges: FxHashMap::default(),
821 pthread_mutex_sanity: Cell::new(false),
822 pthread_rwlock_sanity: Cell::new(false),
823 pthread_condvar_sanity: Cell::new(false),
824 allocator_shim_symbols: Self::allocator_shim_symbols(tcx),
825 mangle_internal_symbol_cache: Default::default(),
826 force_intrinsic_fallback: config.force_intrinsic_fallback,
827 float_nondet: config.float_nondet,
828 float_rounding_error: config.float_rounding_error,
829 short_fd_operations: config.short_fd_operations,
830 }
831 }
832
833 fn allocator_shim_symbols(
834 tcx: TyCtxt<'tcx>,
835 ) -> FxHashMap<Symbol, Either<Symbol, SpecialAllocatorMethod>> {
836 use rustc_codegen_ssa::base::allocator_shim_contents;
837
838 let Some(kind) = tcx.allocator_kind(()) else {
841 return Default::default();
842 };
843 let methods = allocator_shim_contents(tcx, kind);
844 let mut symbols = FxHashMap::default();
845 for method in methods {
846 let from_name = Symbol::intern(&mangle_internal_symbol(
847 tcx,
848 &allocator::global_fn_name(method.name),
849 ));
850 let to = match method.special {
851 Some(special) => Either::Right(special),
852 None =>
853 Either::Left(Symbol::intern(&mangle_internal_symbol(
854 tcx,
855 &allocator::default_fn_name(method.name),
856 ))),
857 };
858 symbols.try_insert(from_name, to).unwrap();
859 }
860 symbols
861 }
862
863 fn get_user_relevant_crates(tcx: TyCtxt<'_>, config: &MiriConfig) -> Vec<CrateNum> {
866 let local_crate_names = std::env::var("MIRI_LOCAL_CRATES")
869 .map(|crates| crates.split(',').map(|krate| krate.to_string()).collect::<Vec<_>>())
870 .unwrap_or_default();
871 let mut local_crates = Vec::new();
872 for &crate_num in tcx.crates(()) {
873 let name = tcx.crate_name(crate_num);
874 let name = name.as_str();
875 if local_crate_names
876 .iter()
877 .chain(&config.user_relevant_crates)
878 .any(|local_name| local_name == name)
879 {
880 local_crates.push(crate_num);
881 }
882 }
883 local_crates
884 }
885
886 pub(crate) fn late_init(
887 ecx: &mut MiriInterpCx<'tcx>,
888 config: &MiriConfig,
889 on_main_stack_empty: StackEmptyCallback<'tcx>,
890 ) -> InterpResult<'tcx> {
891 EnvVars::init(ecx, config)?;
892 MiriMachine::init_extern_statics(ecx)?;
893 ThreadManager::init(ecx, on_main_stack_empty);
894 interp_ok(())
895 }
896
897 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
898 let ptr = ptr.into_pointer_or_addr().unwrap();
900 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
901 }
902
903 pub(crate) fn communicate(&self) -> bool {
904 self.isolated_op == IsolatedOp::Allow
905 }
906
907 pub(crate) fn is_local(&self, instance: ty::Instance<'tcx>) -> bool {
909 let def_id = instance.def_id();
910 def_id.is_local() || self.user_relevant_crates.contains(&def_id.krate)
911 }
912
913 pub(crate) fn handle_abnormal_termination(&mut self) {
915 drop(self.profiler.take());
920 }
921
922 pub(crate) fn page_align(&self) -> Align {
923 Align::from_bytes(self.page_size).unwrap()
924 }
925
926 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
927 self.allocation_spans
928 .borrow()
929 .get(&alloc_id)
930 .map(|(allocated, _deallocated)| allocated.data())
931 }
932
933 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
934 self.allocation_spans
935 .borrow()
936 .get(&alloc_id)
937 .and_then(|(_allocated, deallocated)| *deallocated)
938 .map(Span::data)
939 }
940
941 fn init_allocation(
942 ecx: &MiriInterpCx<'tcx>,
943 id: AllocId,
944 kind: MemoryKind,
945 size: Size,
946 align: Align,
947 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
948 if ecx.machine.tracked_alloc_ids.contains(&id) {
949 ecx.emit_diagnostic(NonHaltingDiagnostic::TrackingAlloc(id, size, align));
950 }
951
952 let borrow_tracker = ecx
953 .machine
954 .borrow_tracker
955 .as_ref()
956 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
957
958 let data_race = match &ecx.machine.data_race {
959 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
960 GlobalDataRaceHandler::Vclocks(data_race) =>
961 AllocDataRaceHandler::Vclocks(
962 data_race::AllocState::new_allocation(
963 data_race,
964 &ecx.machine.threads,
965 size,
966 kind,
967 ecx.machine.current_user_relevant_span(),
968 ),
969 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
970 ),
971 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
972 AllocDataRaceHandler::Genmc
975 }
976 };
977
978 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
982 None
983 } else {
984 Some(ecx.generate_stacktrace())
985 };
986
987 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
988 ecx.machine
989 .allocation_spans
990 .borrow_mut()
991 .insert(id, (ecx.machine.current_user_relevant_span(), None));
992 }
993
994 interp_ok(AllocExtra { borrow_tracker, data_race, backtrace, sync: FxHashMap::default() })
995 }
996}
997
998impl VisitProvenance for MiriMachine<'_> {
999 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
1000 #[rustfmt::skip]
1001 let MiriMachine {
1002 threads,
1003 thread_cpu_affinity: _,
1004 tls,
1005 env_vars,
1006 main_fn_ret_place,
1007 argc,
1008 argv,
1009 cmd_line,
1010 extern_statics,
1011 dirs,
1012 borrow_tracker,
1013 data_race,
1014 alloc_addresses,
1015 fds,
1016 epoll_interests:_,
1017 tcx: _,
1018 isolated_op: _,
1019 validation: _,
1020 monotonic_clock: _,
1021 layouts: _,
1022 static_roots: _,
1023 profiler: _,
1024 string_cache: _,
1025 exported_symbols_cache: _,
1026 backtrace_style: _,
1027 user_relevant_crates: _,
1028 rng: _,
1029 allocator: _,
1030 tracked_alloc_ids: _,
1031 track_alloc_accesses: _,
1032 check_alignment: _,
1033 cmpxchg_weak_failure_rate: _,
1034 preemption_rate: _,
1035 report_progress: _,
1036 basic_block_count: _,
1037 native_lib: _,
1038 gc_interval: _,
1039 since_gc: _,
1040 num_cpus: _,
1041 page_size: _,
1042 stack_addr: _,
1043 stack_size: _,
1044 collect_leak_backtraces: _,
1045 allocation_spans: _,
1046 symbolic_alignment: _,
1047 union_data_ranges: _,
1048 pthread_mutex_sanity: _,
1049 pthread_rwlock_sanity: _,
1050 pthread_condvar_sanity: _,
1051 allocator_shim_symbols: _,
1052 mangle_internal_symbol_cache: _,
1053 force_intrinsic_fallback: _,
1054 float_nondet: _,
1055 float_rounding_error: _,
1056 short_fd_operations: _,
1057 } = self;
1058
1059 threads.visit_provenance(visit);
1060 tls.visit_provenance(visit);
1061 env_vars.visit_provenance(visit);
1062 dirs.visit_provenance(visit);
1063 fds.visit_provenance(visit);
1064 data_race.visit_provenance(visit);
1065 borrow_tracker.visit_provenance(visit);
1066 alloc_addresses.visit_provenance(visit);
1067 main_fn_ret_place.visit_provenance(visit);
1068 argc.visit_provenance(visit);
1069 argv.visit_provenance(visit);
1070 cmd_line.visit_provenance(visit);
1071 for ptr in extern_statics.values() {
1072 ptr.visit_provenance(visit);
1073 }
1074 }
1075}
1076
1077pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
1079
1080pub trait MiriInterpCxExt<'tcx> {
1082 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
1083 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
1084}
1085impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
1086 #[inline(always)]
1087 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
1088 self
1089 }
1090 #[inline(always)]
1091 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
1092 self
1093 }
1094}
1095
1096impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
1098 type MemoryKind = MiriMemoryKind;
1099 type ExtraFnVal = DynSym;
1100
1101 type FrameExtra = FrameExtra<'tcx>;
1102 type AllocExtra = AllocExtra<'tcx>;
1103
1104 type Provenance = Provenance;
1105 type ProvenanceExtra = ProvenanceExtra;
1106 type Bytes = MiriAllocBytes;
1107
1108 type MemoryMap =
1109 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
1110
1111 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
1112
1113 const PANIC_ON_ALLOC_FAIL: bool = false;
1114
1115 #[inline(always)]
1116 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
1117 ecx.machine.check_alignment != AlignmentCheck::None
1118 }
1119
1120 #[inline(always)]
1121 fn alignment_check(
1122 ecx: &MiriInterpCx<'tcx>,
1123 alloc_id: AllocId,
1124 alloc_align: Align,
1125 alloc_kind: AllocKind,
1126 offset: Size,
1127 align: Align,
1128 ) -> Option<Misalignment> {
1129 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1130 return None;
1132 }
1133 if alloc_kind != AllocKind::LiveData {
1134 return None;
1136 }
1137 let (promised_offset, promised_align) = ecx
1139 .machine
1140 .symbolic_alignment
1141 .borrow()
1142 .get(&alloc_id)
1143 .copied()
1144 .unwrap_or((Size::ZERO, alloc_align));
1145 if promised_align < align {
1146 Some(Misalignment { has: promised_align, required: align })
1148 } else {
1149 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1151 if distance.is_multiple_of(align.bytes()) {
1153 None
1155 } else {
1156 let distance_pow2 = 1 << distance.trailing_zeros();
1158 Some(Misalignment {
1159 has: Align::from_bytes(distance_pow2).unwrap(),
1160 required: align,
1161 })
1162 }
1163 }
1164 }
1165
1166 #[inline(always)]
1167 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1168 ecx.machine.validation != ValidationMode::No
1169 }
1170 #[inline(always)]
1171 fn enforce_validity_recursively(
1172 ecx: &InterpCx<'tcx, Self>,
1173 _layout: TyAndLayout<'tcx>,
1174 ) -> bool {
1175 ecx.machine.validation == ValidationMode::Deep
1176 }
1177
1178 #[inline(always)]
1179 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1180 !ecx.tcx.sess.overflow_checks()
1181 }
1182
1183 fn check_fn_target_features(
1184 ecx: &MiriInterpCx<'tcx>,
1185 instance: ty::Instance<'tcx>,
1186 ) -> InterpResult<'tcx> {
1187 let attrs = ecx.tcx.codegen_instance_attrs(instance.def);
1188 if attrs
1189 .target_features
1190 .iter()
1191 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1192 {
1193 let unavailable = attrs
1194 .target_features
1195 .iter()
1196 .filter(|&feature| {
1197 feature.kind != TargetFeatureKind::Implied
1198 && !ecx.tcx.sess.target_features.contains(&feature.name)
1199 })
1200 .fold(String::new(), |mut s, feature| {
1201 if !s.is_empty() {
1202 s.push_str(", ");
1203 }
1204 s.push_str(feature.name.as_str());
1205 s
1206 });
1207 let msg = format!(
1208 "calling a function that requires unavailable target features: {unavailable}"
1209 );
1210 if ecx.tcx.sess.target.is_like_wasm {
1213 throw_machine_stop!(TerminationInfo::Abort(msg));
1214 } else {
1215 throw_ub_format!("{msg}");
1216 }
1217 }
1218 interp_ok(())
1219 }
1220
1221 #[inline(always)]
1222 fn find_mir_or_eval_fn(
1223 ecx: &mut MiriInterpCx<'tcx>,
1224 instance: ty::Instance<'tcx>,
1225 abi: &FnAbi<'tcx, Ty<'tcx>>,
1226 args: &[FnArg<'tcx>],
1227 dest: &PlaceTy<'tcx>,
1228 ret: Option<mir::BasicBlock>,
1229 unwind: mir::UnwindAction,
1230 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1231 if ecx.tcx.is_foreign_item(instance.def_id()) {
1233 let _trace = enter_trace_span!("emulate_foreign_item");
1234 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1242 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1243 }
1244
1245 if ecx.machine.data_race.as_genmc_ref().is_some()
1246 && ecx.genmc_intercept_function(instance, args, dest)?
1247 {
1248 ecx.return_to_block(ret)?;
1249 return interp_ok(None);
1250 }
1251
1252 let _trace = enter_trace_span!("load_mir");
1254 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1255 }
1256
1257 #[inline(always)]
1258 fn call_extra_fn(
1259 ecx: &mut MiriInterpCx<'tcx>,
1260 fn_val: DynSym,
1261 abi: &FnAbi<'tcx, Ty<'tcx>>,
1262 args: &[FnArg<'tcx>],
1263 dest: &PlaceTy<'tcx>,
1264 ret: Option<mir::BasicBlock>,
1265 unwind: mir::UnwindAction,
1266 ) -> InterpResult<'tcx> {
1267 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1269 }
1270
1271 #[inline(always)]
1272 fn call_intrinsic(
1273 ecx: &mut MiriInterpCx<'tcx>,
1274 instance: ty::Instance<'tcx>,
1275 args: &[OpTy<'tcx>],
1276 dest: &PlaceTy<'tcx>,
1277 ret: Option<mir::BasicBlock>,
1278 unwind: mir::UnwindAction,
1279 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1280 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1281 }
1282
1283 #[inline(always)]
1284 fn assert_panic(
1285 ecx: &mut MiriInterpCx<'tcx>,
1286 msg: &mir::AssertMessage<'tcx>,
1287 unwind: mir::UnwindAction,
1288 ) -> InterpResult<'tcx> {
1289 ecx.assert_panic(msg, unwind)
1290 }
1291
1292 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1293 ecx.start_panic_nounwind(msg)
1294 }
1295
1296 fn unwind_terminate(
1297 ecx: &mut InterpCx<'tcx, Self>,
1298 reason: mir::UnwindTerminateReason,
1299 ) -> InterpResult<'tcx> {
1300 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1302 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1303 ecx.call_function(
1304 panic,
1305 ExternAbi::Rust,
1306 &[],
1307 None,
1308 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1309 )?;
1310 interp_ok(())
1311 }
1312
1313 #[inline(always)]
1314 fn binary_ptr_op(
1315 ecx: &MiriInterpCx<'tcx>,
1316 bin_op: mir::BinOp,
1317 left: &ImmTy<'tcx>,
1318 right: &ImmTy<'tcx>,
1319 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1320 ecx.binary_ptr_op(bin_op, left, right)
1321 }
1322
1323 #[inline(always)]
1324 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1325 ecx: &InterpCx<'tcx, Self>,
1326 inputs: &[F1],
1327 ) -> F2 {
1328 ecx.generate_nan(inputs)
1329 }
1330
1331 #[inline(always)]
1332 fn apply_float_nondet(
1333 ecx: &mut InterpCx<'tcx, Self>,
1334 val: ImmTy<'tcx>,
1335 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1336 crate::math::apply_random_float_error_to_imm(ecx, val, 4)
1337 }
1338
1339 #[inline(always)]
1340 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1341 ecx.equal_float_min_max(a, b)
1342 }
1343
1344 #[inline(always)]
1345 fn float_fuse_mul_add(ecx: &InterpCx<'tcx, Self>) -> bool {
1346 ecx.machine.float_nondet && ecx.machine.rng.borrow_mut().random()
1347 }
1348
1349 #[inline(always)]
1350 fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1351 interp_ok(ecx.tcx.sess.ub_checks())
1352 }
1353
1354 #[inline(always)]
1355 fn contract_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1356 interp_ok(ecx.tcx.sess.contract_checks())
1357 }
1358
1359 #[inline(always)]
1360 fn thread_local_static_pointer(
1361 ecx: &mut MiriInterpCx<'tcx>,
1362 def_id: DefId,
1363 ) -> InterpResult<'tcx, StrictPointer> {
1364 ecx.get_or_create_thread_local_alloc(def_id)
1365 }
1366
1367 fn extern_static_pointer(
1368 ecx: &MiriInterpCx<'tcx>,
1369 def_id: DefId,
1370 ) -> InterpResult<'tcx, StrictPointer> {
1371 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1372 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1373 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1377 panic!("extern_statics cannot contain wildcards")
1378 };
1379 let info = ecx.get_alloc_info(alloc_id);
1380 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1381 let extern_decl_layout =
1382 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1383 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1384 throw_unsup_format!(
1385 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1386 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1387 but Miri emulates it via an extern static shim \
1388 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1389 name = ecx.tcx.def_path_str(def_id),
1390 krate = ecx.tcx.crate_name(def_id.krate),
1391 decl_size = extern_decl_layout.size.bytes(),
1392 decl_align = extern_decl_layout.align.bytes(),
1393 shim_size = info.size.bytes(),
1394 shim_align = info.align.bytes(),
1395 )
1396 }
1397 interp_ok(ptr)
1398 } else {
1399 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1400 }
1401 }
1402
1403 fn init_local_allocation(
1404 ecx: &MiriInterpCx<'tcx>,
1405 id: AllocId,
1406 kind: MemoryKind,
1407 size: Size,
1408 align: Align,
1409 ) -> InterpResult<'tcx, Self::AllocExtra> {
1410 assert!(kind != MiriMemoryKind::Global.into());
1411 MiriMachine::init_allocation(ecx, id, kind, size, align)
1412 }
1413
1414 fn adjust_alloc_root_pointer(
1415 ecx: &MiriInterpCx<'tcx>,
1416 ptr: interpret::Pointer<CtfeProvenance>,
1417 kind: Option<MemoryKind>,
1418 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1419 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1420 let alloc_id = ptr.provenance.alloc_id();
1421 if cfg!(debug_assertions) {
1422 match ecx.tcx.try_get_global_alloc(alloc_id) {
1424 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1425 panic!("adjust_alloc_root_pointer called on thread-local static")
1426 }
1427 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1428 panic!("adjust_alloc_root_pointer called on extern static")
1429 }
1430 _ => {}
1431 }
1432 }
1433 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1435 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1436 } else {
1437 BorTag::default()
1439 };
1440 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1441 }
1442
1443 #[inline(always)]
1445 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1446 ecx.ptr_from_addr_cast(addr)
1447 }
1448
1449 #[inline(always)]
1453 fn expose_provenance(
1454 ecx: &InterpCx<'tcx, Self>,
1455 provenance: Self::Provenance,
1456 ) -> InterpResult<'tcx> {
1457 ecx.expose_provenance(provenance)
1458 }
1459
1460 fn ptr_get_alloc(
1472 ecx: &MiriInterpCx<'tcx>,
1473 ptr: StrictPointer,
1474 size: i64,
1475 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1476 let rel = ecx.ptr_get_alloc(ptr, size);
1477
1478 rel.map(|(alloc_id, size)| {
1479 let tag = match ptr.provenance {
1480 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1481 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1482 };
1483 (alloc_id, size, tag)
1484 })
1485 }
1486
1487 fn adjust_global_allocation<'b>(
1496 ecx: &InterpCx<'tcx, Self>,
1497 id: AllocId,
1498 alloc: &'b Allocation,
1499 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1500 {
1501 let alloc = alloc.adjust_from_tcx(
1502 &ecx.tcx,
1503 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1504 |ptr| ecx.global_root_pointer(ptr),
1505 )?;
1506 let kind = MiriMemoryKind::Global.into();
1507 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1508 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1509 }
1510
1511 #[inline(always)]
1512 fn before_memory_read(
1513 _tcx: TyCtxtAt<'tcx>,
1514 machine: &Self,
1515 alloc_extra: &AllocExtra<'tcx>,
1516 ptr: Pointer,
1517 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1518 range: AllocRange,
1519 ) -> InterpResult<'tcx> {
1520 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1521 machine.emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(
1522 alloc_id,
1523 range,
1524 AccessKind::Read,
1525 ));
1526 }
1527 match &machine.data_race {
1529 GlobalDataRaceHandler::None => {}
1530 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1531 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1532 GlobalDataRaceHandler::Vclocks(_data_race) => {
1533 let _trace = enter_trace_span!(data_race::before_memory_read);
1534 let AllocDataRaceHandler::Vclocks(data_race, _weak_memory) = &alloc_extra.data_race
1535 else {
1536 unreachable!();
1537 };
1538 data_race.read_non_atomic(alloc_id, range, NaReadType::Read, None, machine)?;
1539 }
1540 }
1541 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1542 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1543 }
1544 interp_ok(())
1545 }
1546
1547 #[inline(always)]
1548 fn before_memory_write(
1549 _tcx: TyCtxtAt<'tcx>,
1550 machine: &mut Self,
1551 alloc_extra: &mut AllocExtra<'tcx>,
1552 ptr: Pointer,
1553 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1554 range: AllocRange,
1555 ) -> InterpResult<'tcx> {
1556 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1557 machine.emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(
1558 alloc_id,
1559 range,
1560 AccessKind::Write,
1561 ));
1562 }
1563 match &machine.data_race {
1564 GlobalDataRaceHandler::None => {}
1565 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1566 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?,
1567 GlobalDataRaceHandler::Vclocks(_global_state) => {
1568 let _trace = enter_trace_span!(data_race::before_memory_write);
1569 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1570 &mut alloc_extra.data_race
1571 else {
1572 unreachable!()
1573 };
1574 data_race.write_non_atomic(alloc_id, range, NaWriteType::Write, None, machine)?;
1575 if let Some(weak_memory) = weak_memory {
1576 weak_memory
1577 .non_atomic_write(range, machine.data_race.as_vclocks_ref().unwrap());
1578 }
1579 }
1580 }
1581 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1582 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1583 }
1584 interp_ok(())
1585 }
1586
1587 #[inline(always)]
1588 fn before_memory_deallocation(
1589 _tcx: TyCtxtAt<'tcx>,
1590 machine: &mut Self,
1591 alloc_extra: &mut AllocExtra<'tcx>,
1592 ptr: Pointer,
1593 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1594 size: Size,
1595 align: Align,
1596 kind: MemoryKind,
1597 ) -> InterpResult<'tcx> {
1598 if machine.tracked_alloc_ids.contains(&alloc_id) {
1599 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1600 }
1601 match &machine.data_race {
1602 GlobalDataRaceHandler::None => {}
1603 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1604 genmc_ctx.handle_dealloc(machine, alloc_id, ptr.addr(), kind)?,
1605 GlobalDataRaceHandler::Vclocks(_global_state) => {
1606 let _trace = enter_trace_span!(data_race::before_memory_deallocation);
1607 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1608 data_race.write_non_atomic(
1609 alloc_id,
1610 alloc_range(Size::ZERO, size),
1611 NaWriteType::Deallocate,
1612 None,
1613 machine,
1614 )?;
1615 }
1616 }
1617 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1618 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1619 }
1620 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1621 {
1622 *deallocated_at = Some(machine.current_user_relevant_span());
1623 }
1624 machine.free_alloc_id(alloc_id, size, align, kind);
1625 interp_ok(())
1626 }
1627
1628 #[inline(always)]
1629 fn retag_ptr_value(
1630 ecx: &mut InterpCx<'tcx, Self>,
1631 kind: mir::RetagKind,
1632 val: &ImmTy<'tcx>,
1633 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1634 if ecx.machine.borrow_tracker.is_some() {
1635 ecx.retag_ptr_value(kind, val)
1636 } else {
1637 interp_ok(val.clone())
1638 }
1639 }
1640
1641 #[inline(always)]
1642 fn retag_place_contents(
1643 ecx: &mut InterpCx<'tcx, Self>,
1644 kind: mir::RetagKind,
1645 place: &PlaceTy<'tcx>,
1646 ) -> InterpResult<'tcx> {
1647 if ecx.machine.borrow_tracker.is_some() {
1648 ecx.retag_place_contents(kind, place)?;
1649 }
1650 interp_ok(())
1651 }
1652
1653 fn protect_in_place_function_argument(
1654 ecx: &mut InterpCx<'tcx, Self>,
1655 place: &MPlaceTy<'tcx>,
1656 ) -> InterpResult<'tcx> {
1657 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1660 ecx.protect_place(place)?
1661 } else {
1662 place.clone()
1664 };
1665 ecx.write_uninit(&protected_place)?;
1670 interp_ok(())
1672 }
1673
1674 #[inline(always)]
1675 fn init_frame(
1676 ecx: &mut InterpCx<'tcx, Self>,
1677 frame: Frame<'tcx, Provenance>,
1678 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1679 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1681 let fn_name = frame.instance().to_string();
1682 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1683 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1684
1685 Some(profiler.start_recording_interval_event_detached(
1686 *name,
1687 measureme::EventId::from_label(*name),
1688 ecx.active_thread().to_u32(),
1689 ))
1690 } else {
1691 None
1692 };
1693
1694 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1695
1696 let extra = FrameExtra {
1697 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1698 catch_unwind: None,
1699 timing,
1700 user_relevance: ecx.machine.user_relevance(&frame),
1701 data_race: ecx
1702 .machine
1703 .data_race
1704 .as_vclocks_ref()
1705 .map(|_| data_race::FrameState::default()),
1706 };
1707
1708 interp_ok(frame.with_extra(extra))
1709 }
1710
1711 fn stack<'a>(
1712 ecx: &'a InterpCx<'tcx, Self>,
1713 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1714 ecx.active_thread_stack()
1715 }
1716
1717 fn stack_mut<'a>(
1718 ecx: &'a mut InterpCx<'tcx, Self>,
1719 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1720 ecx.active_thread_stack_mut()
1721 }
1722
1723 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1724 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1726 if let Some(report_progress) = ecx.machine.report_progress {
1728 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1729 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1730 block_count: ecx.machine.basic_block_count,
1731 });
1732 }
1733 }
1734
1735 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1740 ecx.machine.since_gc = 0;
1741 ecx.run_provenance_gc();
1742 }
1743
1744 ecx.maybe_preempt_active_thread();
1747
1748 ecx.machine.monotonic_clock.tick();
1750
1751 interp_ok(())
1752 }
1753
1754 #[inline(always)]
1755 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1756 if ecx.frame().extra.user_relevance >= ecx.active_thread_ref().current_user_relevance() {
1757 let stack_len = ecx.active_thread_stack().len();
1760 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1761 }
1762 interp_ok(())
1763 }
1764
1765 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1766 let frame = ecx.frame();
1767 if ecx.machine.borrow_tracker.is_some() {
1770 ecx.on_stack_pop(frame)?;
1771 }
1772 if ecx
1773 .active_thread_ref()
1774 .top_user_relevant_frame()
1775 .expect("there should always be a most relevant frame for a non-empty stack")
1776 == ecx.frame_idx()
1777 {
1778 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1784 }
1785 info!("Leaving {}", ecx.frame().instance());
1789 interp_ok(())
1790 }
1791
1792 #[inline(always)]
1793 fn after_stack_pop(
1794 ecx: &mut InterpCx<'tcx, Self>,
1795 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1796 unwinding: bool,
1797 ) -> InterpResult<'tcx, ReturnAction> {
1798 let res = {
1799 let mut frame = frame;
1801 let timing = frame.extra.timing.take();
1802 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1803 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1804 profiler.finish_recording_interval_event(timing.unwrap());
1805 }
1806 res
1807 };
1808 if !ecx.active_thread_stack().is_empty() {
1811 info!("Continuing in {}", ecx.frame().instance());
1812 }
1813 res
1814 }
1815
1816 fn after_local_read(
1817 ecx: &InterpCx<'tcx, Self>,
1818 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1819 local: mir::Local,
1820 ) -> InterpResult<'tcx> {
1821 if let Some(data_race) = &frame.extra.data_race {
1822 let _trace = enter_trace_span!(data_race::after_local_read);
1823 data_race.local_read(local, &ecx.machine);
1824 }
1825 interp_ok(())
1826 }
1827
1828 fn after_local_write(
1829 ecx: &mut InterpCx<'tcx, Self>,
1830 local: mir::Local,
1831 storage_live: bool,
1832 ) -> InterpResult<'tcx> {
1833 if let Some(data_race) = &ecx.frame().extra.data_race {
1834 let _trace = enter_trace_span!(data_race::after_local_write);
1835 data_race.local_write(local, storage_live, &ecx.machine);
1836 }
1837 interp_ok(())
1838 }
1839
1840 fn after_local_moved_to_memory(
1841 ecx: &mut InterpCx<'tcx, Self>,
1842 local: mir::Local,
1843 mplace: &MPlaceTy<'tcx>,
1844 ) -> InterpResult<'tcx> {
1845 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1846 panic!("after_local_allocated should only be called on fresh allocations");
1847 };
1848 let local_decl = &ecx.frame().body().local_decls[local];
1850 let span = local_decl.source_info.span;
1851 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1852 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1854 if let Some(data_race) =
1855 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1856 {
1857 let _trace = enter_trace_span!(data_race::after_local_moved_to_memory);
1858 data_race.local_moved_to_memory(
1859 local,
1860 alloc_info.data_race.as_vclocks_mut().unwrap(),
1861 machine,
1862 );
1863 }
1864 interp_ok(())
1865 }
1866
1867 fn get_global_alloc_salt(
1868 ecx: &InterpCx<'tcx, Self>,
1869 instance: Option<ty::Instance<'tcx>>,
1870 ) -> usize {
1871 let unique = if let Some(instance) = instance {
1872 let is_generic = instance
1885 .args
1886 .into_iter()
1887 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1888 let can_be_inlined = matches!(
1889 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1890 InliningThreshold::Always
1891 ) || !matches!(
1892 ecx.tcx.codegen_instance_attrs(instance.def).inline,
1893 InlineAttr::Never
1894 );
1895 !is_generic && !can_be_inlined
1896 } else {
1897 false
1899 };
1900 if unique {
1902 CTFE_ALLOC_SALT
1903 } else {
1904 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1905 }
1906 }
1907
1908 fn cached_union_data_range<'e>(
1909 ecx: &'e mut InterpCx<'tcx, Self>,
1910 ty: Ty<'tcx>,
1911 compute_range: impl FnOnce() -> RangeSet,
1912 ) -> Cow<'e, RangeSet> {
1913 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1914 }
1915
1916 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1917 use crate::alloc::MiriAllocParams;
1918
1919 match &self.allocator {
1920 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1921 None => MiriAllocParams::Global,
1922 }
1923 }
1924
1925 fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
1926 #[cfg(feature = "tracing")]
1927 {
1928 span().entered()
1929 }
1930 #[cfg(not(feature = "tracing"))]
1931 #[expect(clippy::unused_unit)]
1932 {
1933 let _ = span; ()
1935 }
1936 }
1937}
1938
1939pub trait MachineCallback<'tcx, T>: VisitProvenance {
1941 fn call(
1943 self: Box<Self>,
1944 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1945 arg: T,
1946 ) -> InterpResult<'tcx>;
1947}
1948
1949pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1951
1952#[macro_export]
1969macro_rules! callback {
1970 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1971 { $($name:ident: $type:ty),* $(,)? }
1972 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1973 struct Callback<$tcx, $($lft),*> {
1974 $($name: $type,)*
1975 _phantom: std::marker::PhantomData<&$tcx ()>,
1976 }
1977
1978 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1979 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1980 $(
1981 self.$name.visit_provenance(_visit);
1982 )*
1983 }
1984 }
1985
1986 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
1987 fn call(
1988 self: Box<Self>,
1989 $this: &mut MiriInterpCx<$tcx>,
1990 $arg: $arg_ty
1991 ) -> InterpResult<$tcx> {
1992 #[allow(unused_variables)]
1993 let Callback { $($name,)* _phantom } = *self;
1994 $body
1995 }
1996 }
1997
1998 Box::new(Callback {
1999 $($name,)*
2000 _phantom: std::marker::PhantomData
2001 })
2002 }};
2003}