1use std::borrow::Cow;
5use std::cell::{Cell, RefCell};
6use std::collections::BTreeMap;
7use std::path::Path;
8use std::rc::Rc;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_ast::expand::allocator::{self, SpecialAllocatorMethod};
16use rustc_data_structures::either::Either;
17use rustc_data_structures::fx::{FxHashMap, FxHashSet};
18#[allow(unused)]
19use rustc_data_structures::static_assert_size;
20use rustc_hir::attrs::InlineAttr;
21use rustc_log::tracing;
22use rustc_middle::middle::codegen_fn_attrs::TargetFeatureKind;
23use rustc_middle::mir;
24use rustc_middle::query::TyCtxtAt;
25use rustc_middle::ty::layout::{
26 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
27};
28use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
29use rustc_session::config::InliningThreshold;
30use rustc_span::def_id::{CrateNum, DefId};
31use rustc_span::{Span, SpanData, Symbol};
32use rustc_symbol_mangling::mangle_internal_symbol;
33use rustc_target::callconv::FnAbi;
34use rustc_target::spec::{Arch, Os};
35
36use crate::alloc_addresses::EvalContextExt;
37use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
38use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
39use crate::concurrency::sync::SyncObj;
40use crate::concurrency::{
41 AllocDataRaceHandler, GenmcCtx, GenmcEvalContextExt as _, GlobalDataRaceHandler, weak_memory,
42};
43use crate::*;
44
45pub const SIGRTMIN: i32 = 34;
49
50pub const SIGRTMAX: i32 = 42;
54
55const ADDRS_PER_ANON_GLOBAL: usize = 32;
59
60#[derive(Copy, Clone, Debug, PartialEq)]
61pub enum AlignmentCheck {
62 None,
64 Symbolic,
66 Int,
68}
69
70#[derive(Copy, Clone, Debug, PartialEq)]
71pub enum RejectOpWith {
72 Abort,
74
75 NoWarning,
79
80 Warning,
82
83 WarningWithoutBacktrace,
85}
86
87#[derive(Copy, Clone, Debug, PartialEq)]
88pub enum IsolatedOp {
89 Reject(RejectOpWith),
94
95 Allow,
97}
98
99#[derive(Debug, Copy, Clone, PartialEq, Eq)]
100pub enum BacktraceStyle {
101 Short,
103 Full,
105 Off,
107}
108
109#[derive(Debug, Copy, Clone, PartialEq, Eq)]
110pub enum ValidationMode {
111 No,
113 Shallow,
115 Deep,
117}
118
119#[derive(Debug, Copy, Clone, PartialEq, Eq)]
120pub enum FloatRoundingErrorMode {
121 Random,
123 None,
125 Max,
127}
128
129pub struct FrameExtra<'tcx> {
131 pub borrow_tracker: Option<borrow_tracker::FrameState>,
133
134 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
138
139 pub timing: Option<measureme::DetachedTiming>,
143
144 pub user_relevance: u8,
148
149 pub data_race: Option<data_race::FrameState>,
151}
152
153impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
154 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
155 let FrameExtra { borrow_tracker, catch_unwind, timing: _, user_relevance, data_race } =
157 self;
158 f.debug_struct("FrameData")
159 .field("borrow_tracker", borrow_tracker)
160 .field("catch_unwind", catch_unwind)
161 .field("user_relevance", user_relevance)
162 .field("data_race", data_race)
163 .finish()
164 }
165}
166
167impl VisitProvenance for FrameExtra<'_> {
168 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
169 let FrameExtra { catch_unwind, borrow_tracker, timing: _, user_relevance: _, data_race: _ } =
170 self;
171
172 catch_unwind.visit_provenance(visit);
173 borrow_tracker.visit_provenance(visit);
174 }
175}
176
177#[derive(Debug, Copy, Clone, PartialEq, Eq)]
179pub enum MiriMemoryKind {
180 Rust,
182 Miri,
184 C,
186 WinHeap,
188 WinLocal,
190 Machine,
193 Runtime,
196 Global,
199 ExternStatic,
202 Tls,
205 Mmap,
207}
208
209impl From<MiriMemoryKind> for MemoryKind {
210 #[inline(always)]
211 fn from(kind: MiriMemoryKind) -> MemoryKind {
212 MemoryKind::Machine(kind)
213 }
214}
215
216impl MayLeak for MiriMemoryKind {
217 #[inline(always)]
218 fn may_leak(self) -> bool {
219 use self::MiriMemoryKind::*;
220 match self {
221 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
222 Machine | Global | ExternStatic | Tls | Mmap => true,
223 }
224 }
225}
226
227impl MiriMemoryKind {
228 fn should_save_allocation_span(self) -> bool {
230 use self::MiriMemoryKind::*;
231 match self {
232 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
234 Machine | Global | ExternStatic | Tls | Runtime => false,
236 }
237 }
238}
239
240impl fmt::Display for MiriMemoryKind {
241 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
242 use self::MiriMemoryKind::*;
243 match self {
244 Rust => write!(f, "Rust heap"),
245 Miri => write!(f, "Miri bare-metal heap"),
246 C => write!(f, "C heap"),
247 WinHeap => write!(f, "Windows heap"),
248 WinLocal => write!(f, "Windows local memory"),
249 Machine => write!(f, "machine-managed memory"),
250 Runtime => write!(f, "language runtime memory"),
251 Global => write!(f, "global (static or const)"),
252 ExternStatic => write!(f, "extern static"),
253 Tls => write!(f, "thread-local static"),
254 Mmap => write!(f, "mmap"),
255 }
256 }
257}
258
259pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
260
261#[derive(Clone, Copy, PartialEq, Eq, Hash)]
267pub enum Provenance {
268 Concrete {
271 alloc_id: AllocId,
272 tag: BorTag,
274 },
275 Wildcard,
292}
293
294#[derive(Copy, Clone, PartialEq)]
296pub enum ProvenanceExtra {
297 Concrete(BorTag),
298 Wildcard,
299}
300
301#[cfg(target_pointer_width = "64")]
302static_assert_size!(StrictPointer, 24);
303#[cfg(target_pointer_width = "64")]
307static_assert_size!(Scalar, 32);
308
309impl fmt::Debug for Provenance {
310 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
311 match self {
312 Provenance::Concrete { alloc_id, tag } => {
313 if f.alternate() {
315 write!(f, "[{alloc_id:#?}]")?;
316 } else {
317 write!(f, "[{alloc_id:?}]")?;
318 }
319 write!(f, "{tag:?}")?;
321 }
322 Provenance::Wildcard => {
323 write!(f, "[wildcard]")?;
324 }
325 }
326 Ok(())
327 }
328}
329
330impl interpret::Provenance for Provenance {
331 const OFFSET_IS_ADDR: bool = true;
333
334 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
336
337 fn get_alloc_id(self) -> Option<AllocId> {
338 match self {
339 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
340 Provenance::Wildcard => None,
341 }
342 }
343
344 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
345 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
347 if f.alternate() {
348 write!(f, "{prov:#?}")?;
349 } else {
350 write!(f, "{prov:?}")?;
351 }
352 Ok(())
353 }
354}
355
356impl fmt::Debug for ProvenanceExtra {
357 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
358 match self {
359 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
360 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
361 }
362 }
363}
364
365impl ProvenanceExtra {
366 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
367 match self {
368 ProvenanceExtra::Concrete(pid) => f(pid),
369 ProvenanceExtra::Wildcard => None,
370 }
371 }
372}
373
374#[derive(Debug)]
376pub struct AllocExtra<'tcx> {
377 pub borrow_tracker: Option<borrow_tracker::AllocState>,
379 pub data_race: AllocDataRaceHandler,
383 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
388 pub sync_objs: BTreeMap<Size, Box<dyn SyncObj>>,
393}
394
395impl<'tcx> Clone for AllocExtra<'tcx> {
398 fn clone(&self) -> Self {
399 panic!("our allocations should never be cloned");
400 }
401}
402
403impl VisitProvenance for AllocExtra<'_> {
404 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
405 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync_objs: _ } = self;
406
407 borrow_tracker.visit_provenance(visit);
408 data_race.visit_provenance(visit);
409 }
410}
411
412pub struct PrimitiveLayouts<'tcx> {
414 pub unit: TyAndLayout<'tcx>,
415 pub i8: TyAndLayout<'tcx>,
416 pub i16: TyAndLayout<'tcx>,
417 pub i32: TyAndLayout<'tcx>,
418 pub i64: TyAndLayout<'tcx>,
419 pub i128: TyAndLayout<'tcx>,
420 pub isize: TyAndLayout<'tcx>,
421 pub u8: TyAndLayout<'tcx>,
422 pub u16: TyAndLayout<'tcx>,
423 pub u32: TyAndLayout<'tcx>,
424 pub u64: TyAndLayout<'tcx>,
425 pub u128: TyAndLayout<'tcx>,
426 pub usize: TyAndLayout<'tcx>,
427 pub bool: TyAndLayout<'tcx>,
428 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
431
432impl<'tcx> PrimitiveLayouts<'tcx> {
433 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
434 let tcx = layout_cx.tcx();
435 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
436 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
437 Ok(Self {
438 unit: layout_cx.layout_of(tcx.types.unit)?,
439 i8: layout_cx.layout_of(tcx.types.i8)?,
440 i16: layout_cx.layout_of(tcx.types.i16)?,
441 i32: layout_cx.layout_of(tcx.types.i32)?,
442 i64: layout_cx.layout_of(tcx.types.i64)?,
443 i128: layout_cx.layout_of(tcx.types.i128)?,
444 isize: layout_cx.layout_of(tcx.types.isize)?,
445 u8: layout_cx.layout_of(tcx.types.u8)?,
446 u16: layout_cx.layout_of(tcx.types.u16)?,
447 u32: layout_cx.layout_of(tcx.types.u32)?,
448 u64: layout_cx.layout_of(tcx.types.u64)?,
449 u128: layout_cx.layout_of(tcx.types.u128)?,
450 usize: layout_cx.layout_of(tcx.types.usize)?,
451 bool: layout_cx.layout_of(tcx.types.bool)?,
452 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
453 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
454 })
455 }
456
457 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
458 match size.bits() {
459 8 => Some(self.u8),
460 16 => Some(self.u16),
461 32 => Some(self.u32),
462 64 => Some(self.u64),
463 128 => Some(self.u128),
464 _ => None,
465 }
466 }
467
468 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
469 match size.bits() {
470 8 => Some(self.i8),
471 16 => Some(self.i16),
472 32 => Some(self.i32),
473 64 => Some(self.i64),
474 128 => Some(self.i128),
475 _ => None,
476 }
477 }
478}
479
480pub struct MiriMachine<'tcx> {
485 pub tcx: TyCtxt<'tcx>,
487
488 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
490
491 pub data_race: GlobalDataRaceHandler,
497
498 pub alloc_addresses: alloc_addresses::GlobalState,
500
501 pub(crate) env_vars: EnvVars<'tcx>,
503
504 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
506
507 pub(crate) argc: Option<Pointer>,
511 pub(crate) argv: Option<Pointer>,
512 pub(crate) cmd_line: Option<Pointer>,
513
514 pub(crate) tls: TlsData<'tcx>,
516
517 pub(crate) isolated_op: IsolatedOp,
521
522 pub(crate) validation: ValidationMode,
524
525 pub(crate) fds: shims::FdTable,
527 pub(crate) dirs: shims::DirTable,
529
530 pub(crate) epoll_interests: shims::EpollInterestTable,
532
533 pub(crate) monotonic_clock: MonotonicClock,
535
536 pub(crate) threads: ThreadManager<'tcx>,
538
539 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
543
544 pub(crate) layouts: PrimitiveLayouts<'tcx>,
546
547 pub(crate) static_roots: Vec<AllocId>,
549
550 profiler: Option<measureme::Profiler>,
553 string_cache: FxHashMap<String, measureme::StringId>,
556
557 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
560
561 pub(crate) backtrace_style: BacktraceStyle,
563
564 pub(crate) user_relevant_crates: Vec<CrateNum>,
566
567 extern_statics: FxHashMap<Symbol, StrictPointer>,
569
570 pub(crate) rng: RefCell<StdRng>,
573
574 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
576
577 pub(crate) tracked_alloc_ids: FxHashSet<AllocId>,
580 track_alloc_accesses: bool,
582
583 pub(crate) check_alignment: AlignmentCheck,
585
586 pub(crate) cmpxchg_weak_failure_rate: f64,
588
589 pub(crate) preemption_rate: f64,
591
592 pub(crate) report_progress: Option<u32>,
594 pub(crate) basic_block_count: u64,
596
597 #[cfg(all(unix, feature = "native-lib"))]
599 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
600 #[cfg(not(all(unix, feature = "native-lib")))]
601 pub native_lib: Vec<!>,
602
603 pub(crate) gc_interval: u32,
605 pub(crate) since_gc: u32,
607
608 pub(crate) num_cpus: u32,
610
611 pub(crate) page_size: u64,
613 pub(crate) stack_addr: u64,
614 pub(crate) stack_size: u64,
615
616 pub(crate) collect_leak_backtraces: bool,
618
619 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
622
623 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
630
631 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
633
634 pub(crate) pthread_mutex_sanity: Cell<bool>,
636 pub(crate) pthread_rwlock_sanity: Cell<bool>,
637 pub(crate) pthread_condvar_sanity: Cell<bool>,
638
639 pub(crate) allocator_shim_symbols: FxHashMap<Symbol, Either<Symbol, SpecialAllocatorMethod>>,
643 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
645
646 pub force_intrinsic_fallback: bool,
648
649 pub float_nondet: bool,
651 pub float_rounding_error: FloatRoundingErrorMode,
653
654 pub short_fd_operations: bool,
656}
657
658impl<'tcx> MiriMachine<'tcx> {
659 pub(crate) fn new(
663 config: &MiriConfig,
664 layout_cx: LayoutCx<'tcx>,
665 genmc_ctx: Option<Rc<GenmcCtx>>,
666 ) -> Self {
667 let tcx = layout_cx.tcx();
668 let user_relevant_crates = Self::get_user_relevant_crates(tcx, config);
669 let layouts =
670 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
671 let profiler = config.measureme_out.as_ref().map(|out| {
672 let crate_name =
673 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
674 let pid = process::id();
675 let filename = format!("{crate_name}-{pid:07}");
680 let path = Path::new(out).join(filename);
681 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
682 });
683 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
684 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
685 let data_race = if config.genmc_config.is_some() {
686 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
688 } else if config.data_race_detector {
689 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
690 } else {
691 GlobalDataRaceHandler::None
692 };
693 let page_size = if let Some(page_size) = config.page_size {
697 page_size
698 } else {
699 let target = &tcx.sess.target;
700 match target.arch {
701 Arch::Wasm32 | Arch::Wasm64 => 64 * 1024, Arch::AArch64 => {
703 if target.is_like_darwin {
704 16 * 1024
708 } else {
709 4 * 1024
710 }
711 }
712 _ => 4 * 1024,
713 }
714 };
715 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
717 let stack_size =
718 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
719 assert!(
720 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
721 "miri only supports up to {} CPUs, but {} were configured",
722 cpu_affinity::MAX_CPUS,
723 config.num_cpus
724 );
725 let threads = ThreadManager::new(config);
726 let mut thread_cpu_affinity = FxHashMap::default();
727 if matches!(&tcx.sess.target.os, Os::Linux | Os::FreeBsd | Os::Android) {
728 thread_cpu_affinity
729 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
730 }
731 let alloc_addresses =
732 RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr, tcx));
733 MiriMachine {
734 tcx,
735 borrow_tracker,
736 data_race,
737 alloc_addresses,
738 env_vars: EnvVars::default(),
740 main_fn_ret_place: None,
741 argc: None,
742 argv: None,
743 cmd_line: None,
744 tls: TlsData::default(),
745 isolated_op: config.isolated_op,
746 validation: config.validation,
747 fds: shims::FdTable::init(config.mute_stdout_stderr),
748 epoll_interests: shims::EpollInterestTable::new(),
749 dirs: Default::default(),
750 layouts,
751 threads,
752 thread_cpu_affinity,
753 static_roots: Vec::new(),
754 profiler,
755 string_cache: Default::default(),
756 exported_symbols_cache: FxHashMap::default(),
757 backtrace_style: config.backtrace_style,
758 user_relevant_crates,
759 extern_statics: FxHashMap::default(),
760 rng: RefCell::new(rng),
761 allocator: (!config.native_lib.is_empty())
762 .then(|| Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new()))),
763 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
764 track_alloc_accesses: config.track_alloc_accesses,
765 check_alignment: config.check_alignment,
766 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
767 preemption_rate: config.preemption_rate,
768 report_progress: config.report_progress,
769 basic_block_count: 0,
770 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
771 #[cfg(all(unix, feature = "native-lib"))]
772 native_lib: config.native_lib.iter().map(|lib_file_path| {
773 let host_triple = rustc_session::config::host_tuple();
774 let target_triple = tcx.sess.opts.target_triple.tuple();
775 if host_triple != target_triple {
777 panic!(
778 "calling native C functions in linked .so file requires host and target to be the same: \
779 host={host_triple}, target={target_triple}",
780 );
781 }
782 (
786 unsafe {
787 libloading::Library::new(lib_file_path)
788 .expect("failed to read specified extern shared object file")
789 },
790 lib_file_path.clone(),
791 )
792 }).collect(),
793 #[cfg(not(all(unix, feature = "native-lib")))]
794 native_lib: config.native_lib.iter().map(|_| {
795 panic!("calling functions from native libraries via FFI is not supported in this build of Miri")
796 }).collect(),
797 gc_interval: config.gc_interval,
798 since_gc: 0,
799 num_cpus: config.num_cpus,
800 page_size,
801 stack_addr,
802 stack_size,
803 collect_leak_backtraces: config.collect_leak_backtraces,
804 allocation_spans: RefCell::new(FxHashMap::default()),
805 symbolic_alignment: RefCell::new(FxHashMap::default()),
806 union_data_ranges: FxHashMap::default(),
807 pthread_mutex_sanity: Cell::new(false),
808 pthread_rwlock_sanity: Cell::new(false),
809 pthread_condvar_sanity: Cell::new(false),
810 allocator_shim_symbols: Self::allocator_shim_symbols(tcx),
811 mangle_internal_symbol_cache: Default::default(),
812 force_intrinsic_fallback: config.force_intrinsic_fallback,
813 float_nondet: config.float_nondet,
814 float_rounding_error: config.float_rounding_error,
815 short_fd_operations: config.short_fd_operations,
816 }
817 }
818
819 fn allocator_shim_symbols(
820 tcx: TyCtxt<'tcx>,
821 ) -> FxHashMap<Symbol, Either<Symbol, SpecialAllocatorMethod>> {
822 use rustc_codegen_ssa::base::allocator_shim_contents;
823
824 let Some(kind) = tcx.allocator_kind(()) else {
827 return Default::default();
828 };
829 let methods = allocator_shim_contents(tcx, kind);
830 let mut symbols = FxHashMap::default();
831 for method in methods {
832 let from_name = Symbol::intern(&mangle_internal_symbol(
833 tcx,
834 &allocator::global_fn_name(method.name),
835 ));
836 let to = match method.special {
837 Some(special) => Either::Right(special),
838 None =>
839 Either::Left(Symbol::intern(&mangle_internal_symbol(
840 tcx,
841 &allocator::default_fn_name(method.name),
842 ))),
843 };
844 symbols.try_insert(from_name, to).unwrap();
845 }
846 symbols
847 }
848
849 fn get_user_relevant_crates(tcx: TyCtxt<'_>, config: &MiriConfig) -> Vec<CrateNum> {
852 let local_crate_names = std::env::var("MIRI_LOCAL_CRATES")
855 .map(|crates| crates.split(',').map(|krate| krate.to_string()).collect::<Vec<_>>())
856 .unwrap_or_default();
857 let mut local_crates = Vec::new();
858 for &crate_num in tcx.crates(()) {
859 let name = tcx.crate_name(crate_num);
860 let name = name.as_str();
861 if local_crate_names
862 .iter()
863 .chain(&config.user_relevant_crates)
864 .any(|local_name| local_name == name)
865 {
866 local_crates.push(crate_num);
867 }
868 }
869 local_crates
870 }
871
872 pub(crate) fn late_init(
873 ecx: &mut MiriInterpCx<'tcx>,
874 config: &MiriConfig,
875 on_main_stack_empty: StackEmptyCallback<'tcx>,
876 ) -> InterpResult<'tcx> {
877 EnvVars::init(ecx, config)?;
878 MiriMachine::init_extern_statics(ecx)?;
879 ThreadManager::init(ecx, on_main_stack_empty);
880 interp_ok(())
881 }
882
883 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
884 let ptr = ptr.into_pointer_or_addr().unwrap();
886 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
887 }
888
889 pub(crate) fn communicate(&self) -> bool {
890 self.isolated_op == IsolatedOp::Allow
891 }
892
893 pub(crate) fn is_local(&self, instance: ty::Instance<'tcx>) -> bool {
895 let def_id = instance.def_id();
896 def_id.is_local() || self.user_relevant_crates.contains(&def_id.krate)
897 }
898
899 pub(crate) fn handle_abnormal_termination(&mut self) {
901 drop(self.profiler.take());
906 }
907
908 pub(crate) fn page_align(&self) -> Align {
909 Align::from_bytes(self.page_size).unwrap()
910 }
911
912 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
913 self.allocation_spans
914 .borrow()
915 .get(&alloc_id)
916 .map(|(allocated, _deallocated)| allocated.data())
917 }
918
919 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
920 self.allocation_spans
921 .borrow()
922 .get(&alloc_id)
923 .and_then(|(_allocated, deallocated)| *deallocated)
924 .map(Span::data)
925 }
926
927 fn init_allocation(
928 ecx: &MiriInterpCx<'tcx>,
929 id: AllocId,
930 kind: MemoryKind,
931 size: Size,
932 align: Align,
933 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
934 if ecx.machine.tracked_alloc_ids.contains(&id) {
935 ecx.emit_diagnostic(NonHaltingDiagnostic::TrackingAlloc(id, size, align));
936 }
937
938 let borrow_tracker = ecx
939 .machine
940 .borrow_tracker
941 .as_ref()
942 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
943
944 let data_race = match &ecx.machine.data_race {
945 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
946 GlobalDataRaceHandler::Vclocks(data_race) =>
947 AllocDataRaceHandler::Vclocks(
948 data_race::AllocState::new_allocation(
949 data_race,
950 &ecx.machine.threads,
951 size,
952 kind,
953 ecx.machine.current_user_relevant_span(),
954 ),
955 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
956 ),
957 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
958 AllocDataRaceHandler::Genmc
961 }
962 };
963
964 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
968 None
969 } else {
970 Some(ecx.generate_stacktrace())
971 };
972
973 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
974 ecx.machine
975 .allocation_spans
976 .borrow_mut()
977 .insert(id, (ecx.machine.current_user_relevant_span(), None));
978 }
979
980 interp_ok(AllocExtra {
981 borrow_tracker,
982 data_race,
983 backtrace,
984 sync_objs: BTreeMap::default(),
985 })
986 }
987}
988
989impl VisitProvenance for MiriMachine<'_> {
990 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
991 #[rustfmt::skip]
992 let MiriMachine {
993 threads,
994 thread_cpu_affinity: _,
995 tls,
996 env_vars,
997 main_fn_ret_place,
998 argc,
999 argv,
1000 cmd_line,
1001 extern_statics,
1002 dirs,
1003 borrow_tracker,
1004 data_race,
1005 alloc_addresses,
1006 fds,
1007 epoll_interests:_,
1008 tcx: _,
1009 isolated_op: _,
1010 validation: _,
1011 monotonic_clock: _,
1012 layouts: _,
1013 static_roots: _,
1014 profiler: _,
1015 string_cache: _,
1016 exported_symbols_cache: _,
1017 backtrace_style: _,
1018 user_relevant_crates: _,
1019 rng: _,
1020 allocator: _,
1021 tracked_alloc_ids: _,
1022 track_alloc_accesses: _,
1023 check_alignment: _,
1024 cmpxchg_weak_failure_rate: _,
1025 preemption_rate: _,
1026 report_progress: _,
1027 basic_block_count: _,
1028 native_lib: _,
1029 gc_interval: _,
1030 since_gc: _,
1031 num_cpus: _,
1032 page_size: _,
1033 stack_addr: _,
1034 stack_size: _,
1035 collect_leak_backtraces: _,
1036 allocation_spans: _,
1037 symbolic_alignment: _,
1038 union_data_ranges: _,
1039 pthread_mutex_sanity: _,
1040 pthread_rwlock_sanity: _,
1041 pthread_condvar_sanity: _,
1042 allocator_shim_symbols: _,
1043 mangle_internal_symbol_cache: _,
1044 force_intrinsic_fallback: _,
1045 float_nondet: _,
1046 float_rounding_error: _,
1047 short_fd_operations: _,
1048 } = self;
1049
1050 threads.visit_provenance(visit);
1051 tls.visit_provenance(visit);
1052 env_vars.visit_provenance(visit);
1053 dirs.visit_provenance(visit);
1054 fds.visit_provenance(visit);
1055 data_race.visit_provenance(visit);
1056 borrow_tracker.visit_provenance(visit);
1057 alloc_addresses.visit_provenance(visit);
1058 main_fn_ret_place.visit_provenance(visit);
1059 argc.visit_provenance(visit);
1060 argv.visit_provenance(visit);
1061 cmd_line.visit_provenance(visit);
1062 for ptr in extern_statics.values() {
1063 ptr.visit_provenance(visit);
1064 }
1065 }
1066}
1067
1068pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
1070
1071pub trait MiriInterpCxExt<'tcx> {
1073 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
1074 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
1075}
1076impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
1077 #[inline(always)]
1078 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
1079 self
1080 }
1081 #[inline(always)]
1082 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
1083 self
1084 }
1085}
1086
1087impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
1089 type MemoryKind = MiriMemoryKind;
1090 type ExtraFnVal = DynSym;
1091
1092 type FrameExtra = FrameExtra<'tcx>;
1093 type AllocExtra = AllocExtra<'tcx>;
1094
1095 type Provenance = Provenance;
1096 type ProvenanceExtra = ProvenanceExtra;
1097 type Bytes = MiriAllocBytes;
1098
1099 type MemoryMap =
1100 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
1101
1102 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
1103
1104 const PANIC_ON_ALLOC_FAIL: bool = false;
1105
1106 #[inline(always)]
1107 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
1108 ecx.machine.check_alignment != AlignmentCheck::None
1109 }
1110
1111 #[inline(always)]
1112 fn alignment_check(
1113 ecx: &MiriInterpCx<'tcx>,
1114 alloc_id: AllocId,
1115 alloc_align: Align,
1116 alloc_kind: AllocKind,
1117 offset: Size,
1118 align: Align,
1119 ) -> Option<Misalignment> {
1120 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1121 return None;
1123 }
1124 if alloc_kind != AllocKind::LiveData {
1125 return None;
1127 }
1128 let (promised_offset, promised_align) = ecx
1130 .machine
1131 .symbolic_alignment
1132 .borrow()
1133 .get(&alloc_id)
1134 .copied()
1135 .unwrap_or((Size::ZERO, alloc_align));
1136 if promised_align < align {
1137 Some(Misalignment { has: promised_align, required: align })
1139 } else {
1140 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1142 if distance.is_multiple_of(align.bytes()) {
1144 None
1146 } else {
1147 let distance_pow2 = 1 << distance.trailing_zeros();
1149 Some(Misalignment {
1150 has: Align::from_bytes(distance_pow2).unwrap(),
1151 required: align,
1152 })
1153 }
1154 }
1155 }
1156
1157 #[inline(always)]
1158 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1159 ecx.machine.validation != ValidationMode::No
1160 }
1161 #[inline(always)]
1162 fn enforce_validity_recursively(
1163 ecx: &InterpCx<'tcx, Self>,
1164 _layout: TyAndLayout<'tcx>,
1165 ) -> bool {
1166 ecx.machine.validation == ValidationMode::Deep
1167 }
1168
1169 #[inline(always)]
1170 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1171 !ecx.tcx.sess.overflow_checks()
1172 }
1173
1174 fn check_fn_target_features(
1175 ecx: &MiriInterpCx<'tcx>,
1176 instance: ty::Instance<'tcx>,
1177 ) -> InterpResult<'tcx> {
1178 let attrs = ecx.tcx.codegen_instance_attrs(instance.def);
1179 if attrs
1180 .target_features
1181 .iter()
1182 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1183 {
1184 let unavailable = attrs
1185 .target_features
1186 .iter()
1187 .filter(|&feature| {
1188 feature.kind != TargetFeatureKind::Implied
1189 && !ecx.tcx.sess.target_features.contains(&feature.name)
1190 })
1191 .fold(String::new(), |mut s, feature| {
1192 if !s.is_empty() {
1193 s.push_str(", ");
1194 }
1195 s.push_str(feature.name.as_str());
1196 s
1197 });
1198 let msg = format!(
1199 "calling a function that requires unavailable target features: {unavailable}"
1200 );
1201 if ecx.tcx.sess.target.is_like_wasm {
1204 throw_machine_stop!(TerminationInfo::Abort(msg));
1205 } else {
1206 throw_ub_format!("{msg}");
1207 }
1208 }
1209 interp_ok(())
1210 }
1211
1212 #[inline(always)]
1213 fn find_mir_or_eval_fn(
1214 ecx: &mut MiriInterpCx<'tcx>,
1215 instance: ty::Instance<'tcx>,
1216 abi: &FnAbi<'tcx, Ty<'tcx>>,
1217 args: &[FnArg<'tcx>],
1218 dest: &PlaceTy<'tcx>,
1219 ret: Option<mir::BasicBlock>,
1220 unwind: mir::UnwindAction,
1221 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1222 if ecx.tcx.is_foreign_item(instance.def_id()) {
1224 let _trace = enter_trace_span!("emulate_foreign_item");
1225 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1233 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1234 }
1235
1236 if ecx.machine.data_race.as_genmc_ref().is_some()
1237 && ecx.genmc_intercept_function(instance, args, dest)?
1238 {
1239 ecx.return_to_block(ret)?;
1240 return interp_ok(None);
1241 }
1242
1243 let _trace = enter_trace_span!("load_mir");
1245 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1246 }
1247
1248 #[inline(always)]
1249 fn call_extra_fn(
1250 ecx: &mut MiriInterpCx<'tcx>,
1251 fn_val: DynSym,
1252 abi: &FnAbi<'tcx, Ty<'tcx>>,
1253 args: &[FnArg<'tcx>],
1254 dest: &PlaceTy<'tcx>,
1255 ret: Option<mir::BasicBlock>,
1256 unwind: mir::UnwindAction,
1257 ) -> InterpResult<'tcx> {
1258 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1260 }
1261
1262 #[inline(always)]
1263 fn call_intrinsic(
1264 ecx: &mut MiriInterpCx<'tcx>,
1265 instance: ty::Instance<'tcx>,
1266 args: &[OpTy<'tcx>],
1267 dest: &PlaceTy<'tcx>,
1268 ret: Option<mir::BasicBlock>,
1269 unwind: mir::UnwindAction,
1270 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1271 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1272 }
1273
1274 #[inline(always)]
1275 fn assert_panic(
1276 ecx: &mut MiriInterpCx<'tcx>,
1277 msg: &mir::AssertMessage<'tcx>,
1278 unwind: mir::UnwindAction,
1279 ) -> InterpResult<'tcx> {
1280 ecx.assert_panic(msg, unwind)
1281 }
1282
1283 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1284 ecx.start_panic_nounwind(msg)
1285 }
1286
1287 fn unwind_terminate(
1288 ecx: &mut InterpCx<'tcx, Self>,
1289 reason: mir::UnwindTerminateReason,
1290 ) -> InterpResult<'tcx> {
1291 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1293 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1294 ecx.call_function(
1295 panic,
1296 ExternAbi::Rust,
1297 &[],
1298 None,
1299 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1300 )?;
1301 interp_ok(())
1302 }
1303
1304 #[inline(always)]
1305 fn binary_ptr_op(
1306 ecx: &MiriInterpCx<'tcx>,
1307 bin_op: mir::BinOp,
1308 left: &ImmTy<'tcx>,
1309 right: &ImmTy<'tcx>,
1310 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1311 ecx.binary_ptr_op(bin_op, left, right)
1312 }
1313
1314 #[inline(always)]
1315 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1316 ecx: &InterpCx<'tcx, Self>,
1317 inputs: &[F1],
1318 ) -> F2 {
1319 ecx.generate_nan(inputs)
1320 }
1321
1322 #[inline(always)]
1323 fn apply_float_nondet(
1324 ecx: &mut InterpCx<'tcx, Self>,
1325 val: ImmTy<'tcx>,
1326 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1327 crate::math::apply_random_float_error_to_imm(ecx, val, 4)
1328 }
1329
1330 #[inline(always)]
1331 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1332 ecx.equal_float_min_max(a, b)
1333 }
1334
1335 #[inline(always)]
1336 fn float_fuse_mul_add(ecx: &InterpCx<'tcx, Self>) -> bool {
1337 ecx.machine.float_nondet && ecx.machine.rng.borrow_mut().random()
1338 }
1339
1340 #[inline(always)]
1341 fn runtime_checks(
1342 ecx: &InterpCx<'tcx, Self>,
1343 r: mir::RuntimeChecks,
1344 ) -> InterpResult<'tcx, bool> {
1345 interp_ok(r.value(ecx.tcx.sess))
1346 }
1347
1348 #[inline(always)]
1349 fn thread_local_static_pointer(
1350 ecx: &mut MiriInterpCx<'tcx>,
1351 def_id: DefId,
1352 ) -> InterpResult<'tcx, StrictPointer> {
1353 ecx.get_or_create_thread_local_alloc(def_id)
1354 }
1355
1356 fn extern_static_pointer(
1357 ecx: &MiriInterpCx<'tcx>,
1358 def_id: DefId,
1359 ) -> InterpResult<'tcx, StrictPointer> {
1360 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1361 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1362 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1366 panic!("extern_statics cannot contain wildcards")
1367 };
1368 let info = ecx.get_alloc_info(alloc_id);
1369 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1370 let extern_decl_layout =
1371 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1372 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1373 throw_unsup_format!(
1374 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1375 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1376 but Miri emulates it via an extern static shim \
1377 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1378 name = ecx.tcx.def_path_str(def_id),
1379 krate = ecx.tcx.crate_name(def_id.krate),
1380 decl_size = extern_decl_layout.size.bytes(),
1381 decl_align = extern_decl_layout.align.bytes(),
1382 shim_size = info.size.bytes(),
1383 shim_align = info.align.bytes(),
1384 )
1385 }
1386 interp_ok(ptr)
1387 } else {
1388 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1389 }
1390 }
1391
1392 fn init_local_allocation(
1393 ecx: &MiriInterpCx<'tcx>,
1394 id: AllocId,
1395 kind: MemoryKind,
1396 size: Size,
1397 align: Align,
1398 ) -> InterpResult<'tcx, Self::AllocExtra> {
1399 assert!(kind != MiriMemoryKind::Global.into());
1400 MiriMachine::init_allocation(ecx, id, kind, size, align)
1401 }
1402
1403 fn adjust_alloc_root_pointer(
1404 ecx: &MiriInterpCx<'tcx>,
1405 ptr: interpret::Pointer<CtfeProvenance>,
1406 kind: Option<MemoryKind>,
1407 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1408 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1409 let alloc_id = ptr.provenance.alloc_id();
1410 if cfg!(debug_assertions) {
1411 match ecx.tcx.try_get_global_alloc(alloc_id) {
1413 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1414 panic!("adjust_alloc_root_pointer called on thread-local static")
1415 }
1416 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1417 panic!("adjust_alloc_root_pointer called on extern static")
1418 }
1419 _ => {}
1420 }
1421 }
1422 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1424 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1425 } else {
1426 BorTag::default()
1428 };
1429 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1430 }
1431
1432 #[inline(always)]
1434 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1435 ecx.ptr_from_addr_cast(addr)
1436 }
1437
1438 #[inline(always)]
1442 fn expose_provenance(
1443 ecx: &InterpCx<'tcx, Self>,
1444 provenance: Self::Provenance,
1445 ) -> InterpResult<'tcx> {
1446 ecx.expose_provenance(provenance)
1447 }
1448
1449 fn ptr_get_alloc(
1461 ecx: &MiriInterpCx<'tcx>,
1462 ptr: StrictPointer,
1463 size: i64,
1464 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1465 let rel = ecx.ptr_get_alloc(ptr, size);
1466
1467 rel.map(|(alloc_id, size)| {
1468 let tag = match ptr.provenance {
1469 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1470 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1471 };
1472 (alloc_id, size, tag)
1473 })
1474 }
1475
1476 fn adjust_global_allocation<'b>(
1485 ecx: &InterpCx<'tcx, Self>,
1486 id: AllocId,
1487 alloc: &'b Allocation,
1488 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1489 {
1490 let alloc = alloc.adjust_from_tcx(
1491 &ecx.tcx,
1492 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1493 |ptr| ecx.global_root_pointer(ptr),
1494 )?;
1495 let kind = MiriMemoryKind::Global.into();
1496 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1497 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1498 }
1499
1500 #[inline(always)]
1501 fn before_memory_read(
1502 _tcx: TyCtxtAt<'tcx>,
1503 machine: &Self,
1504 alloc_extra: &AllocExtra<'tcx>,
1505 ptr: Pointer,
1506 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1507 range: AllocRange,
1508 ) -> InterpResult<'tcx> {
1509 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1510 machine.emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(
1511 alloc_id,
1512 range,
1513 borrow_tracker::AccessKind::Read,
1514 ));
1515 }
1516 match &machine.data_race {
1518 GlobalDataRaceHandler::None => {}
1519 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1520 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1521 GlobalDataRaceHandler::Vclocks(_data_race) => {
1522 let _trace = enter_trace_span!(data_race::before_memory_read);
1523 let AllocDataRaceHandler::Vclocks(data_race, _weak_memory) = &alloc_extra.data_race
1524 else {
1525 unreachable!();
1526 };
1527 data_race.read_non_atomic(alloc_id, range, NaReadType::Read, None, machine)?;
1528 }
1529 }
1530 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1531 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1532 }
1533 for (_offset, obj) in alloc_extra.sync_objs.range(range.start..range.end()) {
1535 obj.on_access(concurrency::sync::AccessKind::Read)?;
1536 }
1537
1538 interp_ok(())
1539 }
1540
1541 #[inline(always)]
1542 fn before_memory_write(
1543 _tcx: TyCtxtAt<'tcx>,
1544 machine: &mut Self,
1545 alloc_extra: &mut AllocExtra<'tcx>,
1546 ptr: Pointer,
1547 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1548 range: AllocRange,
1549 ) -> InterpResult<'tcx> {
1550 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1551 machine.emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(
1552 alloc_id,
1553 range,
1554 borrow_tracker::AccessKind::Write,
1555 ));
1556 }
1557 match &machine.data_race {
1558 GlobalDataRaceHandler::None => {}
1559 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1560 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?,
1561 GlobalDataRaceHandler::Vclocks(_global_state) => {
1562 let _trace = enter_trace_span!(data_race::before_memory_write);
1563 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1564 &mut alloc_extra.data_race
1565 else {
1566 unreachable!()
1567 };
1568 data_race.write_non_atomic(alloc_id, range, NaWriteType::Write, None, machine)?;
1569 if let Some(weak_memory) = weak_memory {
1570 weak_memory
1571 .non_atomic_write(range, machine.data_race.as_vclocks_ref().unwrap());
1572 }
1573 }
1574 }
1575 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1576 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1577 }
1578 if !alloc_extra.sync_objs.is_empty() {
1581 let mut to_delete = vec![];
1582 for (offset, obj) in alloc_extra.sync_objs.range(range.start..range.end()) {
1583 obj.on_access(concurrency::sync::AccessKind::Write)?;
1584 if obj.delete_on_write() {
1585 to_delete.push(*offset);
1586 }
1587 }
1588 for offset in to_delete {
1589 alloc_extra.sync_objs.remove(&offset);
1590 }
1591 }
1592 interp_ok(())
1593 }
1594
1595 #[inline(always)]
1596 fn before_memory_deallocation(
1597 _tcx: TyCtxtAt<'tcx>,
1598 machine: &mut Self,
1599 alloc_extra: &mut AllocExtra<'tcx>,
1600 ptr: Pointer,
1601 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1602 size: Size,
1603 align: Align,
1604 kind: MemoryKind,
1605 ) -> InterpResult<'tcx> {
1606 if machine.tracked_alloc_ids.contains(&alloc_id) {
1607 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1608 }
1609 match &machine.data_race {
1610 GlobalDataRaceHandler::None => {}
1611 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1612 genmc_ctx.handle_dealloc(machine, alloc_id, ptr.addr(), kind)?,
1613 GlobalDataRaceHandler::Vclocks(_global_state) => {
1614 let _trace = enter_trace_span!(data_race::before_memory_deallocation);
1615 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1616 data_race.write_non_atomic(
1617 alloc_id,
1618 alloc_range(Size::ZERO, size),
1619 NaWriteType::Deallocate,
1620 None,
1621 machine,
1622 )?;
1623 }
1624 }
1625 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1626 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1627 }
1628 for obj in alloc_extra.sync_objs.values() {
1630 obj.on_access(concurrency::sync::AccessKind::Dealloc)?;
1631 }
1632
1633 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1634 {
1635 *deallocated_at = Some(machine.current_user_relevant_span());
1636 }
1637 machine.free_alloc_id(alloc_id, size, align, kind);
1638 interp_ok(())
1639 }
1640
1641 #[inline(always)]
1642 fn retag_ptr_value(
1643 ecx: &mut InterpCx<'tcx, Self>,
1644 kind: mir::RetagKind,
1645 val: &ImmTy<'tcx>,
1646 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1647 if ecx.machine.borrow_tracker.is_some() {
1648 ecx.retag_ptr_value(kind, val)
1649 } else {
1650 interp_ok(val.clone())
1651 }
1652 }
1653
1654 #[inline(always)]
1655 fn retag_place_contents(
1656 ecx: &mut InterpCx<'tcx, Self>,
1657 kind: mir::RetagKind,
1658 place: &PlaceTy<'tcx>,
1659 ) -> InterpResult<'tcx> {
1660 if ecx.machine.borrow_tracker.is_some() {
1661 ecx.retag_place_contents(kind, place)?;
1662 }
1663 interp_ok(())
1664 }
1665
1666 fn protect_in_place_function_argument(
1667 ecx: &mut InterpCx<'tcx, Self>,
1668 place: &MPlaceTy<'tcx>,
1669 ) -> InterpResult<'tcx> {
1670 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1673 ecx.protect_place(place)?
1674 } else {
1675 place.clone()
1677 };
1678 ecx.write_uninit(&protected_place)?;
1683 interp_ok(())
1685 }
1686
1687 #[inline(always)]
1688 fn init_frame(
1689 ecx: &mut InterpCx<'tcx, Self>,
1690 frame: Frame<'tcx, Provenance>,
1691 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1692 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1694 let fn_name = frame.instance().to_string();
1695 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1696 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1697
1698 Some(profiler.start_recording_interval_event_detached(
1699 *name,
1700 measureme::EventId::from_label(*name),
1701 ecx.active_thread().to_u32(),
1702 ))
1703 } else {
1704 None
1705 };
1706
1707 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1708
1709 let extra = FrameExtra {
1710 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1711 catch_unwind: None,
1712 timing,
1713 user_relevance: ecx.machine.user_relevance(&frame),
1714 data_race: ecx
1715 .machine
1716 .data_race
1717 .as_vclocks_ref()
1718 .map(|_| data_race::FrameState::default()),
1719 };
1720
1721 interp_ok(frame.with_extra(extra))
1722 }
1723
1724 fn stack<'a>(
1725 ecx: &'a InterpCx<'tcx, Self>,
1726 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1727 ecx.active_thread_stack()
1728 }
1729
1730 fn stack_mut<'a>(
1731 ecx: &'a mut InterpCx<'tcx, Self>,
1732 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1733 ecx.active_thread_stack_mut()
1734 }
1735
1736 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1737 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1739 if let Some(report_progress) = ecx.machine.report_progress {
1741 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1742 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1743 block_count: ecx.machine.basic_block_count,
1744 });
1745 }
1746 }
1747
1748 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1753 ecx.machine.since_gc = 0;
1754 ecx.run_provenance_gc();
1755 }
1756
1757 ecx.maybe_preempt_active_thread();
1760
1761 ecx.machine.monotonic_clock.tick();
1763
1764 interp_ok(())
1765 }
1766
1767 #[inline(always)]
1768 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1769 if ecx.frame().extra.user_relevance >= ecx.active_thread_ref().current_user_relevance() {
1770 let stack_len = ecx.active_thread_stack().len();
1773 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1774 }
1775 interp_ok(())
1776 }
1777
1778 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1779 let frame = ecx.frame();
1780 if ecx.machine.borrow_tracker.is_some() {
1783 ecx.on_stack_pop(frame)?;
1784 }
1785 if ecx
1786 .active_thread_ref()
1787 .top_user_relevant_frame()
1788 .expect("there should always be a most relevant frame for a non-empty stack")
1789 == ecx.frame_idx()
1790 {
1791 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1797 }
1798 info!("Leaving {}", ecx.frame().instance());
1802 interp_ok(())
1803 }
1804
1805 #[inline(always)]
1806 fn after_stack_pop(
1807 ecx: &mut InterpCx<'tcx, Self>,
1808 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1809 unwinding: bool,
1810 ) -> InterpResult<'tcx, ReturnAction> {
1811 let res = {
1812 let mut frame = frame;
1814 let timing = frame.extra.timing.take();
1815 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1816 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1817 profiler.finish_recording_interval_event(timing.unwrap());
1818 }
1819 res
1820 };
1821 if !ecx.active_thread_stack().is_empty() {
1824 info!("Continuing in {}", ecx.frame().instance());
1825 }
1826 res
1827 }
1828
1829 fn after_local_read(
1830 ecx: &InterpCx<'tcx, Self>,
1831 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1832 local: mir::Local,
1833 ) -> InterpResult<'tcx> {
1834 if let Some(data_race) = &frame.extra.data_race {
1835 let _trace = enter_trace_span!(data_race::after_local_read);
1836 data_race.local_read(local, &ecx.machine);
1837 }
1838 interp_ok(())
1839 }
1840
1841 fn after_local_write(
1842 ecx: &mut InterpCx<'tcx, Self>,
1843 local: mir::Local,
1844 storage_live: bool,
1845 ) -> InterpResult<'tcx> {
1846 if let Some(data_race) = &ecx.frame().extra.data_race {
1847 let _trace = enter_trace_span!(data_race::after_local_write);
1848 data_race.local_write(local, storage_live, &ecx.machine);
1849 }
1850 interp_ok(())
1851 }
1852
1853 fn after_local_moved_to_memory(
1854 ecx: &mut InterpCx<'tcx, Self>,
1855 local: mir::Local,
1856 mplace: &MPlaceTy<'tcx>,
1857 ) -> InterpResult<'tcx> {
1858 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1859 panic!("after_local_allocated should only be called on fresh allocations");
1860 };
1861 let local_decl = &ecx.frame().body().local_decls[local];
1863 let span = local_decl.source_info.span;
1864 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1865 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1867 if let Some(data_race) =
1868 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1869 {
1870 let _trace = enter_trace_span!(data_race::after_local_moved_to_memory);
1871 data_race.local_moved_to_memory(
1872 local,
1873 alloc_info.data_race.as_vclocks_mut().unwrap(),
1874 machine,
1875 );
1876 }
1877 interp_ok(())
1878 }
1879
1880 fn get_global_alloc_salt(
1881 ecx: &InterpCx<'tcx, Self>,
1882 instance: Option<ty::Instance<'tcx>>,
1883 ) -> usize {
1884 let unique = if let Some(instance) = instance {
1885 let is_generic = instance
1898 .args
1899 .into_iter()
1900 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1901 let can_be_inlined = matches!(
1902 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1903 InliningThreshold::Always
1904 ) || !matches!(
1905 ecx.tcx.codegen_instance_attrs(instance.def).inline,
1906 InlineAttr::Never
1907 );
1908 !is_generic && !can_be_inlined
1909 } else {
1910 false
1912 };
1913 if unique {
1915 CTFE_ALLOC_SALT
1916 } else {
1917 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1918 }
1919 }
1920
1921 fn cached_union_data_range<'e>(
1922 ecx: &'e mut InterpCx<'tcx, Self>,
1923 ty: Ty<'tcx>,
1924 compute_range: impl FnOnce() -> RangeSet,
1925 ) -> Cow<'e, RangeSet> {
1926 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1927 }
1928
1929 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1930 use crate::alloc::MiriAllocParams;
1931
1932 match &self.allocator {
1933 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1934 None => MiriAllocParams::Global,
1935 }
1936 }
1937
1938 fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
1939 #[cfg(feature = "tracing")]
1940 {
1941 span().entered()
1942 }
1943 #[cfg(not(feature = "tracing"))]
1944 #[expect(clippy::unused_unit)]
1945 {
1946 let _ = span; ()
1948 }
1949 }
1950}
1951
1952pub trait MachineCallback<'tcx, T>: VisitProvenance {
1954 fn call(
1956 self: Box<Self>,
1957 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1958 arg: T,
1959 ) -> InterpResult<'tcx>;
1960}
1961
1962pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1964
1965#[macro_export]
1982macro_rules! callback {
1983 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1984 { $($name:ident: $type:ty),* $(,)? }
1985 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1986 struct Callback<$tcx, $($lft),*> {
1987 $($name: $type,)*
1988 _phantom: std::marker::PhantomData<&$tcx ()>,
1989 }
1990
1991 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1992 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1993 $(
1994 self.$name.visit_provenance(_visit);
1995 )*
1996 }
1997 }
1998
1999 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
2000 fn call(
2001 self: Box<Self>,
2002 $this: &mut MiriInterpCx<$tcx>,
2003 $arg: $arg_ty
2004 ) -> InterpResult<$tcx> {
2005 #[allow(unused_variables)]
2006 let Callback { $($name,)* _phantom } = *self;
2007 $body
2008 }
2009 }
2010
2011 Box::new(Callback {
2012 $($name,)*
2013 _phantom: std::marker::PhantomData
2014 })
2015 }};
2016}