1use std::num::NonZero;
2use std::sync::Mutex;
3use std::time::Duration;
4use std::{cmp, iter};
5
6use rand::RngCore;
7use rustc_abi::{Align, ExternAbi, FieldIdx, FieldsShape, Size, Variants};
8use rustc_apfloat::Float;
9use rustc_hash::FxHashSet;
10use rustc_hir::Safety;
11use rustc_hir::def::{DefKind, Namespace};
12use rustc_hir::def_id::{CRATE_DEF_INDEX, CrateNum, DefId, LOCAL_CRATE};
13use rustc_index::IndexVec;
14use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
15use rustc_middle::middle::dependency_format::Linkage;
16use rustc_middle::middle::exported_symbols::ExportedSymbol;
17use rustc_middle::ty::layout::{LayoutOf, MaybeResult, TyAndLayout};
18use rustc_middle::ty::{self, IntTy, Ty, TyCtxt, UintTy};
19use rustc_session::config::CrateType;
20use rustc_span::{Span, Symbol};
21use rustc_symbol_mangling::mangle_internal_symbol;
22use rustc_target::spec::Os;
23
24use crate::*;
25
26fn try_resolve_did(tcx: TyCtxt<'_>, path: &[&str], namespace: Option<Namespace>) -> Option<DefId> {
30 let _trace = enter_trace_span!("try_resolve_did", ?path);
31
32 fn find_children<'tcx: 'a, 'a>(
34 tcx: TyCtxt<'tcx>,
35 item: DefId,
36 name: &'a str,
37 ) -> impl Iterator<Item = DefId> + 'a {
38 let name = Symbol::intern(name);
39 tcx.module_children(item)
40 .iter()
41 .filter(move |item| item.ident.name == name)
42 .map(move |item| item.res.def_id())
43 }
44
45 let (&crate_name, path) = path.split_first().expect("paths must have at least one segment");
47 let (modules, item) = if let Some(namespace) = namespace {
48 let (&item_name, modules) =
49 path.split_last().expect("non-module paths must have at least 2 segments");
50 (modules, Some((item_name, namespace)))
51 } else {
52 (path, None)
53 };
54
55 'crates: for krate in
60 tcx.crates(()).iter().filter(|&&krate| tcx.crate_name(krate).as_str() == crate_name)
61 {
62 let mut cur_item = DefId { krate: *krate, index: CRATE_DEF_INDEX };
63 for &segment in modules {
65 let Some(next_item) = find_children(tcx, cur_item, segment)
66 .find(|item| tcx.def_kind(item) == DefKind::Mod)
67 else {
68 continue 'crates;
69 };
70 cur_item = next_item;
71 }
72 match item {
74 Some((item_name, namespace)) => {
75 let Some(item) = find_children(tcx, cur_item, item_name)
76 .find(|item| tcx.def_kind(item).ns() == Some(namespace))
77 else {
78 continue 'crates;
79 };
80 return Some(item);
81 }
82 None => {
83 return Some(cur_item);
85 }
86 }
87 }
88 None
90}
91
92pub fn try_resolve_path<'tcx>(
94 tcx: TyCtxt<'tcx>,
95 path: &[&str],
96 namespace: Namespace,
97) -> Option<ty::Instance<'tcx>> {
98 let did = try_resolve_did(tcx, path, Some(namespace))?;
99 Some(ty::Instance::mono(tcx, did))
100}
101
102#[track_caller]
104pub fn resolve_path<'tcx>(
105 tcx: TyCtxt<'tcx>,
106 path: &[&str],
107 namespace: Namespace,
108) -> ty::Instance<'tcx> {
109 try_resolve_path(tcx, path, namespace)
110 .unwrap_or_else(|| panic!("failed to find required Rust item: {path:?}"))
111}
112
113#[track_caller]
115pub fn path_ty_layout<'tcx>(cx: &impl LayoutOf<'tcx>, path: &[&str]) -> TyAndLayout<'tcx> {
116 let ty = resolve_path(cx.tcx(), path, Namespace::TypeNS).ty(cx.tcx(), cx.typing_env());
117 cx.layout_of(ty).to_result().ok().unwrap()
118}
119
120pub fn iter_exported_symbols<'tcx>(
122 tcx: TyCtxt<'tcx>,
123 mut f: impl FnMut(CrateNum, DefId) -> InterpResult<'tcx>,
124) -> InterpResult<'tcx> {
125 let crate_items = tcx.hir_crate_items(());
129 for def_id in crate_items.definitions() {
130 let exported = tcx.def_kind(def_id).has_codegen_attrs() && {
131 let codegen_attrs = tcx.codegen_fn_attrs(def_id);
132 codegen_attrs.contains_extern_indicator()
133 || codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER)
134 || codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER)
135 };
136 if exported {
137 f(LOCAL_CRATE, def_id.into())?;
138 }
139 }
140
141 let dependency_formats = tcx.dependency_formats(());
146 let dependency_format = dependency_formats
148 .get(&CrateType::Executable)
149 .expect("interpreting a non-executable crate");
150 for cnum in dependency_format
151 .iter_enumerated()
152 .filter_map(|(num, &linkage)| (linkage != Linkage::NotLinked).then_some(num))
153 {
154 if cnum == LOCAL_CRATE {
155 continue; }
157
158 for &(symbol, _export_info) in tcx.exported_non_generic_symbols(cnum) {
161 if let ExportedSymbol::NonGeneric(def_id) = symbol {
162 f(cnum, def_id)?;
163 }
164 }
165 }
166 interp_ok(())
167}
168
169pub trait ToHost {
171 type HostFloat;
172 fn to_host(self) -> Self::HostFloat;
173}
174
175pub trait ToSoft {
177 type SoftFloat;
178 fn to_soft(self) -> Self::SoftFloat;
179}
180
181impl ToHost for rustc_apfloat::ieee::Double {
182 type HostFloat = f64;
183
184 fn to_host(self) -> Self::HostFloat {
185 f64::from_bits(self.to_bits().try_into().unwrap())
186 }
187}
188
189impl ToSoft for f64 {
190 type SoftFloat = rustc_apfloat::ieee::Double;
191
192 fn to_soft(self) -> Self::SoftFloat {
193 Float::from_bits(self.to_bits().into())
194 }
195}
196
197impl ToHost for rustc_apfloat::ieee::Single {
198 type HostFloat = f32;
199
200 fn to_host(self) -> Self::HostFloat {
201 f32::from_bits(self.to_bits().try_into().unwrap())
202 }
203}
204
205impl ToSoft for f32 {
206 type SoftFloat = rustc_apfloat::ieee::Single;
207
208 fn to_soft(self) -> Self::SoftFloat {
209 Float::from_bits(self.to_bits().into())
210 }
211}
212
213impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
214pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
215 fn have_module(&self, path: &[&str]) -> bool {
217 try_resolve_did(*self.eval_context_ref().tcx, path, None).is_some()
218 }
219
220 fn eval_path(&self, path: &[&str]) -> MPlaceTy<'tcx> {
222 let this = self.eval_context_ref();
223 let instance = resolve_path(*this.tcx, path, Namespace::ValueNS);
224 this.eval_global(instance).unwrap_or_else(|err| {
226 panic!("failed to evaluate required Rust item: {path:?}\n{err:?}")
227 })
228 }
229 fn eval_path_scalar(&self, path: &[&str]) -> Scalar {
230 let this = self.eval_context_ref();
231 let val = this.eval_path(path);
232 this.read_scalar(&val)
233 .unwrap_or_else(|err| panic!("failed to read required Rust item: {path:?}\n{err:?}"))
234 }
235
236 fn eval_libc(&self, name: &str) -> Scalar {
238 if self.eval_context_ref().tcx.sess.target.os == Os::Windows {
239 panic!(
240 "`libc` crate is not reliably available on Windows targets; Miri should not use it there"
241 );
242 }
243 self.eval_path_scalar(&["libc", name])
244 }
245
246 fn eval_libc_i32(&self, name: &str) -> i32 {
248 self.eval_libc(name).to_i32().unwrap_or_else(|_err| {
250 panic!("required libc item has unexpected type (not `i32`): {name}")
251 })
252 }
253
254 fn eval_libc_u32(&self, name: &str) -> u32 {
256 self.eval_libc(name).to_u32().unwrap_or_else(|_err| {
258 panic!("required libc item has unexpected type (not `u32`): {name}")
259 })
260 }
261
262 fn eval_libc_u64(&self, name: &str) -> u64 {
264 self.eval_libc(name).to_u64().unwrap_or_else(|_err| {
266 panic!("required libc item has unexpected type (not `u64`): {name}")
267 })
268 }
269
270 fn eval_windows(&self, module: &str, name: &str) -> Scalar {
272 self.eval_context_ref().eval_path_scalar(&["std", "sys", "pal", "windows", module, name])
273 }
274
275 fn eval_windows_u32(&self, module: &str, name: &str) -> u32 {
277 self.eval_windows(module, name).to_u32().unwrap_or_else(|_err| {
279 panic!("required Windows item has unexpected type (not `u32`): {module}::{name}")
280 })
281 }
282
283 fn eval_windows_u64(&self, module: &str, name: &str) -> u64 {
285 self.eval_windows(module, name).to_u64().unwrap_or_else(|_err| {
287 panic!("required Windows item has unexpected type (not `u64`): {module}::{name}")
288 })
289 }
290
291 fn libc_ty_layout(&self, name: &str) -> TyAndLayout<'tcx> {
293 let this = self.eval_context_ref();
294 if this.tcx.sess.target.os == Os::Windows {
295 panic!(
296 "`libc` crate is not reliably available on Windows targets; Miri should not use it there"
297 );
298 }
299 path_ty_layout(this, &["libc", name])
300 }
301
302 fn windows_ty_layout(&self, name: &str) -> TyAndLayout<'tcx> {
304 let this = self.eval_context_ref();
305 path_ty_layout(this, &["std", "sys", "pal", "windows", "c", name])
306 }
307
308 fn libc_array_ty_layout(&self, name: &str, size: u64) -> TyAndLayout<'tcx> {
310 let this = self.eval_context_ref();
311 let elem_ty_layout = this.libc_ty_layout(name);
312 let array_ty = Ty::new_array(*this.tcx, elem_ty_layout.ty, size);
313 this.layout_of(array_ty).unwrap()
314 }
315
316 fn try_project_field_named<P: Projectable<'tcx, Provenance>>(
318 &self,
319 base: &P,
320 name: &str,
321 ) -> InterpResult<'tcx, Option<P>> {
322 let this = self.eval_context_ref();
323 let adt = base.layout().ty.ty_adt_def().unwrap();
324 for (idx, field) in adt.non_enum_variant().fields.iter_enumerated() {
325 if field.name.as_str() == name {
326 return interp_ok(Some(this.project_field(base, idx)?));
327 }
328 }
329 interp_ok(None)
330 }
331
332 fn project_field_named<P: Projectable<'tcx, Provenance>>(
334 &self,
335 base: &P,
336 name: &str,
337 ) -> InterpResult<'tcx, P> {
338 interp_ok(
339 self.try_project_field_named(base, name)?
340 .unwrap_or_else(|| bug!("no field named {} in type {}", name, base.layout().ty)),
341 )
342 }
343
344 fn write_int(
348 &mut self,
349 i: impl Into<i128>,
350 dest: &impl Writeable<'tcx, Provenance>,
351 ) -> InterpResult<'tcx> {
352 assert!(
353 dest.layout().backend_repr.is_scalar(),
354 "write_int on non-scalar type {}",
355 dest.layout().ty
356 );
357 let val = if dest.layout().backend_repr.is_signed() {
358 Scalar::from_int(i, dest.layout().size)
359 } else {
360 Scalar::from_uint(u128::try_from(i.into()).unwrap(), dest.layout().size)
362 };
363 self.eval_context_mut().write_scalar(val, dest)
364 }
365
366 fn write_int_fields(
368 &mut self,
369 values: &[i128],
370 dest: &impl Writeable<'tcx, Provenance>,
371 ) -> InterpResult<'tcx> {
372 let this = self.eval_context_mut();
373 for (idx, &val) in values.iter().enumerate() {
374 let idx = FieldIdx::from_usize(idx);
375 let field = this.project_field(dest, idx)?;
376 this.write_int(val, &field)?;
377 }
378 interp_ok(())
379 }
380
381 fn write_int_fields_named(
383 &mut self,
384 values: &[(&str, i128)],
385 dest: &impl Writeable<'tcx, Provenance>,
386 ) -> InterpResult<'tcx> {
387 let this = self.eval_context_mut();
388 for &(name, val) in values.iter() {
389 let field = this.project_field_named(dest, name)?;
390 this.write_int(val, &field)?;
391 }
392 interp_ok(())
393 }
394
395 fn write_null(&mut self, dest: &impl Writeable<'tcx, Provenance>) -> InterpResult<'tcx> {
397 self.write_int(0, dest)
398 }
399
400 fn ptr_is_null(&self, ptr: Pointer) -> InterpResult<'tcx, bool> {
402 interp_ok(ptr.addr().bytes() == 0)
403 }
404
405 fn gen_random(&mut self, ptr: Pointer, len: u64) -> InterpResult<'tcx> {
407 if len == 0 {
413 return interp_ok(());
414 }
415 let this = self.eval_context_mut();
416
417 let mut data = vec![0; usize::try_from(len).unwrap()];
418
419 if this.machine.communicate() {
420 getrandom::fill(&mut data)
422 .map_err(|err| err_unsup_format!("host getrandom failed: {}", err))?;
423 } else {
424 let rng = this.machine.rng.get_mut();
425 rng.fill_bytes(&mut data);
426 }
427
428 this.write_bytes_ptr(ptr, data.iter().copied())
429 }
430
431 fn call_function(
437 &mut self,
438 f: ty::Instance<'tcx>,
439 caller_abi: ExternAbi,
440 args: &[ImmTy<'tcx>],
441 dest: Option<&MPlaceTy<'tcx>>,
442 cont: ReturnContinuation,
443 ) -> InterpResult<'tcx> {
444 let this = self.eval_context_mut();
445
446 let mir = this.load_mir(f.def, None)?;
448 let dest = match dest {
449 Some(dest) => dest.clone(),
450 None => MPlaceTy::fake_alloc_zst(this.machine.layouts.unit),
451 };
452
453 let sig = this.tcx.mk_fn_sig(
455 args.iter().map(|a| a.layout.ty),
456 dest.layout.ty,
457 false,
458 Safety::Safe,
459 caller_abi,
460 );
461 let caller_fn_abi = this.fn_abi_of_fn_ptr(ty::Binder::dummy(sig), ty::List::empty())?;
462
463 this.init_stack_frame(
465 f,
466 mir,
467 caller_fn_abi,
468 &args.iter().map(|a| FnArg::Copy(a.clone().into())).collect::<Vec<_>>(),
469 false,
470 &dest.into(),
471 cont,
472 )
473 }
474
475 fn visit_freeze_sensitive(
479 &self,
480 place: &MPlaceTy<'tcx>,
481 size: Size,
482 mut action: impl FnMut(AllocRange, bool) -> InterpResult<'tcx>,
483 ) -> InterpResult<'tcx> {
484 let this = self.eval_context_ref();
485 trace!("visit_frozen(place={:?}, size={:?})", *place, size);
486 debug_assert_eq!(
487 size,
488 this.size_and_align_of_val(place)?
489 .map(|(size, _)| size)
490 .unwrap_or_else(|| place.layout.size)
491 );
492 let start_addr = place.ptr().addr();
496 let mut cur_addr = start_addr;
497 let mut unsafe_cell_action = |unsafe_cell_ptr: &Pointer, unsafe_cell_size: Size| {
500 let unsafe_cell_addr = unsafe_cell_ptr.addr();
503 assert!(unsafe_cell_addr >= cur_addr);
504 let frozen_size = unsafe_cell_addr - cur_addr;
505 if frozen_size != Size::ZERO {
507 action(alloc_range(cur_addr - start_addr, frozen_size), true)?;
508 }
509 cur_addr += frozen_size;
510 if unsafe_cell_size != Size::ZERO {
512 action(
513 alloc_range(cur_addr - start_addr, unsafe_cell_size),
514 false,
515 )?;
516 }
517 cur_addr += unsafe_cell_size;
518 interp_ok(())
520 };
521 {
523 let mut visitor = UnsafeCellVisitor {
524 ecx: this,
525 unsafe_cell_action: |place| {
526 trace!("unsafe_cell_action on {:?}", place.ptr());
527 let unsafe_cell_size = this
529 .size_and_align_of_val(place)?
530 .map(|(size, _)| size)
531 .unwrap_or_else(|| place.layout.size);
533 if unsafe_cell_size != Size::ZERO {
535 unsafe_cell_action(&place.ptr(), unsafe_cell_size)
536 } else {
537 interp_ok(())
538 }
539 },
540 };
541 visitor.visit_value(place)?;
542 }
543 unsafe_cell_action(&place.ptr().wrapping_offset(size, this), Size::ZERO)?;
546 return interp_ok(());
548
549 struct UnsafeCellVisitor<'ecx, 'tcx, F>
552 where
553 F: FnMut(&MPlaceTy<'tcx>) -> InterpResult<'tcx>,
554 {
555 ecx: &'ecx MiriInterpCx<'tcx>,
556 unsafe_cell_action: F,
557 }
558
559 impl<'ecx, 'tcx, F> ValueVisitor<'tcx, MiriMachine<'tcx>> for UnsafeCellVisitor<'ecx, 'tcx, F>
560 where
561 F: FnMut(&MPlaceTy<'tcx>) -> InterpResult<'tcx>,
562 {
563 type V = MPlaceTy<'tcx>;
564
565 #[inline(always)]
566 fn ecx(&self) -> &MiriInterpCx<'tcx> {
567 self.ecx
568 }
569
570 fn aggregate_field_iter(
571 memory_index: &IndexVec<FieldIdx, u32>,
572 ) -> impl Iterator<Item = FieldIdx> + 'static {
573 let inverse_memory_index = memory_index.invert_bijective_mapping();
574 inverse_memory_index.into_iter()
575 }
576
577 fn visit_value(&mut self, v: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
579 trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty);
580 let is_unsafe_cell = match v.layout.ty.kind() {
581 ty::Adt(adt, _) =>
582 Some(adt.did()) == self.ecx.tcx.lang_items().unsafe_cell_type(),
583 _ => false,
584 };
585 if is_unsafe_cell {
586 (self.unsafe_cell_action)(v)
588 } else if self.ecx.type_is_freeze(v.layout.ty) {
589 interp_ok(())
591 } else if matches!(v.layout.fields, FieldsShape::Union(..)) {
592 (self.unsafe_cell_action)(v)
594 } else {
595 match v.layout.variants {
602 Variants::Multiple { .. } => {
603 (self.unsafe_cell_action)(v)
611 }
612 Variants::Single { .. } | Variants::Empty => {
613 self.walk_value(v)
616 }
617 }
618 }
619 }
620
621 fn visit_union(
622 &mut self,
623 _v: &MPlaceTy<'tcx>,
624 _fields: NonZero<usize>,
625 ) -> InterpResult<'tcx> {
626 bug!("we should have already handled unions in `visit_value`")
627 }
628 }
629 }
630
631 fn check_no_isolation(&self, name: &str) -> InterpResult<'tcx> {
635 if !self.eval_context_ref().machine.communicate() {
636 self.reject_in_isolation(name, RejectOpWith::Abort)?;
637 }
638 interp_ok(())
639 }
640
641 fn reject_in_isolation(&self, op_name: &str, reject_with: RejectOpWith) -> InterpResult<'tcx> {
644 let this = self.eval_context_ref();
645 match reject_with {
646 RejectOpWith::Abort => isolation_abort_error(op_name),
647 RejectOpWith::WarningWithoutBacktrace => {
648 static DEDUP: Mutex<FxHashSet<String>> =
650 Mutex::new(FxHashSet::with_hasher(rustc_hash::FxBuildHasher));
651 let mut emitted_warnings = DEDUP.lock().unwrap();
652 if !emitted_warnings.contains(op_name) {
653 emitted_warnings.insert(op_name.to_owned());
655 this.tcx
656 .dcx()
657 .warn(format!("{op_name} was made to return an error due to isolation"));
658 }
659
660 interp_ok(())
661 }
662 RejectOpWith::Warning => {
663 this.emit_diagnostic(NonHaltingDiagnostic::RejectedIsolatedOp(op_name.to_string()));
664 interp_ok(())
665 }
666 RejectOpWith::NoWarning => interp_ok(()), }
668 }
669
670 fn assert_target_os(&self, target_os: Os, name: &str) {
674 assert_eq!(
675 self.eval_context_ref().tcx.sess.target.os,
676 target_os,
677 "`{name}` is only available on the `{target_os}` target OS",
678 )
679 }
680
681 fn check_target_os(&self, target_oses: &[Os], name: Symbol) -> InterpResult<'tcx> {
685 let target_os = &self.eval_context_ref().tcx.sess.target.os;
686 if !target_oses.contains(target_os) {
687 throw_unsup_format!("`{name}` is not supported on {target_os}");
688 }
689 interp_ok(())
690 }
691
692 fn assert_target_os_is_unix(&self, name: &str) {
696 assert!(self.target_os_is_unix(), "`{name}` is only available for unix targets",);
697 }
698
699 fn target_os_is_unix(&self) -> bool {
700 self.eval_context_ref().tcx.sess.target.families.iter().any(|f| f == "unix")
701 }
702
703 fn deref_pointer_as(
705 &self,
706 op: &impl Projectable<'tcx, Provenance>,
707 layout: TyAndLayout<'tcx>,
708 ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
709 let this = self.eval_context_ref();
710 let ptr = this.read_pointer(op)?;
711 interp_ok(this.ptr_to_mplace(ptr, layout))
712 }
713
714 fn deref_pointer_and_offset(
716 &self,
717 op: &impl Projectable<'tcx, Provenance>,
718 offset: u64,
719 base_layout: TyAndLayout<'tcx>,
720 value_layout: TyAndLayout<'tcx>,
721 ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
722 let this = self.eval_context_ref();
723 let op_place = this.deref_pointer_as(op, base_layout)?;
724 let offset = Size::from_bytes(offset);
725
726 assert!(base_layout.size >= offset + value_layout.size);
728 let value_place = op_place.offset(offset, value_layout, this)?;
729 interp_ok(value_place)
730 }
731
732 fn deref_pointer_and_read(
733 &self,
734 op: &impl Projectable<'tcx, Provenance>,
735 offset: u64,
736 base_layout: TyAndLayout<'tcx>,
737 value_layout: TyAndLayout<'tcx>,
738 ) -> InterpResult<'tcx, Scalar> {
739 let this = self.eval_context_ref();
740 let value_place = this.deref_pointer_and_offset(op, offset, base_layout, value_layout)?;
741 this.read_scalar(&value_place)
742 }
743
744 fn deref_pointer_and_write(
745 &mut self,
746 op: &impl Projectable<'tcx, Provenance>,
747 offset: u64,
748 value: impl Into<Scalar>,
749 base_layout: TyAndLayout<'tcx>,
750 value_layout: TyAndLayout<'tcx>,
751 ) -> InterpResult<'tcx, ()> {
752 let this = self.eval_context_mut();
753 let value_place = this.deref_pointer_and_offset(op, offset, base_layout, value_layout)?;
754 this.write_scalar(value, &value_place)
755 }
756
757 fn read_timespec(&mut self, tp: &MPlaceTy<'tcx>) -> InterpResult<'tcx, Option<Duration>> {
761 let this = self.eval_context_mut();
762 let seconds_place = this.project_field(tp, FieldIdx::ZERO)?;
763 let seconds_scalar = this.read_scalar(&seconds_place)?;
764 let seconds = seconds_scalar.to_target_isize(this)?;
765 let nanoseconds_place = this.project_field(tp, FieldIdx::ONE)?;
766 let nanoseconds_scalar = this.read_scalar(&nanoseconds_place)?;
767 let nanoseconds = nanoseconds_scalar.to_target_isize(this)?;
768
769 interp_ok(
770 try {
771 let seconds: u64 = seconds.try_into().ok()?;
773 let nanoseconds: u32 = nanoseconds.try_into().ok()?;
775 if nanoseconds >= 1_000_000_000 {
776 None?
778 }
779 Duration::new(seconds, nanoseconds)
780 },
781 )
782 }
783
784 fn read_byte_slice<'a>(&'a self, slice: &ImmTy<'tcx>) -> InterpResult<'tcx, &'a [u8]>
786 where
787 'tcx: 'a,
788 {
789 let this = self.eval_context_ref();
790 let (ptr, len) = slice.to_scalar_pair();
791 let ptr = ptr.to_pointer(this)?;
792 let len = len.to_target_usize(this)?;
793 let bytes = this.read_bytes_ptr_strip_provenance(ptr, Size::from_bytes(len))?;
794 interp_ok(bytes)
795 }
796
797 fn read_c_str<'a>(&'a self, ptr: Pointer) -> InterpResult<'tcx, &'a [u8]>
799 where
800 'tcx: 'a,
801 {
802 let this = self.eval_context_ref();
803 let size1 = Size::from_bytes(1);
804
805 let mut len = Size::ZERO;
807 loop {
808 let alloc = this.get_ptr_alloc(ptr.wrapping_offset(len, this), size1)?.unwrap(); let byte = alloc.read_integer(alloc_range(Size::ZERO, size1))?.to_u8()?;
812 if byte == 0 {
813 break;
814 } else {
815 len += size1;
816 }
817 }
818
819 this.read_bytes_ptr_strip_provenance(ptr, len)
821 }
822
823 fn write_c_str(
829 &mut self,
830 c_str: &[u8],
831 ptr: Pointer,
832 size: u64,
833 ) -> InterpResult<'tcx, (bool, u64)> {
834 let string_length = u64::try_from(c_str.len()).unwrap();
837 let string_length = string_length.strict_add(1);
838 if size < string_length {
839 return interp_ok((false, string_length));
840 }
841 self.eval_context_mut()
842 .write_bytes_ptr(ptr, c_str.iter().copied().chain(iter::once(0u8)))?;
843 interp_ok((true, string_length))
844 }
845
846 fn read_c_str_with_char_size<T>(
849 &self,
850 mut ptr: Pointer,
851 size: Size,
852 align: Align,
853 ) -> InterpResult<'tcx, Vec<T>>
854 where
855 T: TryFrom<u128>,
856 <T as TryFrom<u128>>::Error: std::fmt::Debug,
857 {
858 assert_ne!(size, Size::ZERO);
859
860 let this = self.eval_context_ref();
861
862 this.check_ptr_align(ptr, align)?;
863
864 let mut wchars = Vec::new();
865 loop {
866 let alloc = this.get_ptr_alloc(ptr, size)?.unwrap(); let wchar_int = alloc.read_integer(alloc_range(Size::ZERO, size))?.to_bits(size)?;
870 if wchar_int == 0 {
871 break;
872 } else {
873 wchars.push(wchar_int.try_into().unwrap());
874 ptr = ptr.wrapping_offset(size, this);
875 }
876 }
877
878 interp_ok(wchars)
879 }
880
881 fn read_wide_str(&self, ptr: Pointer) -> InterpResult<'tcx, Vec<u16>> {
883 self.read_c_str_with_char_size(ptr, Size::from_bytes(2), Align::from_bytes(2).unwrap())
884 }
885
886 fn write_wide_str(
893 &mut self,
894 wide_str: &[u16],
895 ptr: Pointer,
896 size: u64,
897 ) -> InterpResult<'tcx, (bool, u64)> {
898 let string_length = u64::try_from(wide_str.len()).unwrap();
901 let string_length = string_length.strict_add(1);
902 if size < string_length {
903 return interp_ok((false, string_length));
904 }
905
906 let size2 = Size::from_bytes(2);
908 let this = self.eval_context_mut();
909 this.check_ptr_align(ptr, Align::from_bytes(2).unwrap())?;
910 let mut alloc = this.get_ptr_alloc_mut(ptr, size2 * string_length)?.unwrap(); for (offset, wchar) in wide_str.iter().copied().chain(iter::once(0x0000)).enumerate() {
912 let offset = u64::try_from(offset).unwrap();
913 alloc.write_scalar(alloc_range(size2 * offset, size2), Scalar::from_u16(wchar))?;
914 }
915 interp_ok((true, string_length))
916 }
917
918 fn read_wchar_t_str(&self, ptr: Pointer) -> InterpResult<'tcx, Vec<u32>> {
921 let this = self.eval_context_ref();
922 let wchar_t = if this.tcx.sess.target.os == Os::Windows {
923 this.machine.layouts.u16
925 } else {
926 this.libc_ty_layout("wchar_t")
927 };
928 self.read_c_str_with_char_size(ptr, wchar_t.size, wchar_t.align.abi)
929 }
930
931 fn frame_in_std(&self) -> bool {
932 let this = self.eval_context_ref();
933 let frame = this.frame();
934 let instance: Option<_> = try {
936 let scope = frame.current_source_info()?.scope;
937 let inlined_parent = frame.body().source_scopes[scope].inlined_parent_scope?;
938 let source = &frame.body().source_scopes[inlined_parent];
939 source.inlined.expect("inlined_parent_scope points to scope without inline info").0
940 };
941 let instance = instance.unwrap_or(frame.instance());
943 let frame_crate = this.tcx.def_path(instance.def_id()).krate;
948 let crate_name = this.tcx.crate_name(frame_crate);
949 let crate_name = crate_name.as_str();
950 crate_name == "std" || crate_name == "std_miri_test"
952 }
953
954 fn mark_immutable(&mut self, mplace: &MPlaceTy<'tcx>) {
956 let this = self.eval_context_mut();
957 let provenance = mplace.ptr().into_pointer_or_addr().unwrap().provenance;
959 this.alloc_mark_immutable(provenance.get_alloc_id().unwrap()).unwrap();
960 }
961
962 fn get_twice_wide_int_ty(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
964 let this = self.eval_context_ref();
965 match ty.kind() {
966 ty::Uint(UintTy::U8) => this.tcx.types.u16,
968 ty::Uint(UintTy::U16) => this.tcx.types.u32,
969 ty::Uint(UintTy::U32) => this.tcx.types.u64,
970 ty::Uint(UintTy::U64) => this.tcx.types.u128,
971 ty::Int(IntTy::I8) => this.tcx.types.i16,
973 ty::Int(IntTy::I16) => this.tcx.types.i32,
974 ty::Int(IntTy::I32) => this.tcx.types.i64,
975 ty::Int(IntTy::I64) => this.tcx.types.i128,
976 _ => span_bug!(this.cur_span(), "unexpected type: {ty:?}"),
977 }
978 }
979
980 fn expect_target_feature_for_intrinsic(
985 &self,
986 intrinsic: Symbol,
987 target_feature: &str,
988 ) -> InterpResult<'tcx, ()> {
989 let this = self.eval_context_ref();
990 if !this.tcx.sess.unstable_target_features.contains(&Symbol::intern(target_feature)) {
991 throw_ub_format!(
992 "attempted to call intrinsic `{intrinsic}` that requires missing target feature {target_feature}"
993 );
994 }
995 interp_ok(())
996 }
997
998 fn lookup_link_section(
1000 &mut self,
1001 include_name: impl Fn(&str) -> bool,
1002 ) -> InterpResult<'tcx, Vec<ImmTy<'tcx>>> {
1003 let this = self.eval_context_mut();
1004 let tcx = this.tcx.tcx;
1005
1006 let mut array = vec![];
1007
1008 iter_exported_symbols(tcx, |_cnum, def_id| {
1009 let attrs = tcx.codegen_fn_attrs(def_id);
1010 let Some(link_section) = attrs.link_section else {
1011 return interp_ok(());
1012 };
1013 if include_name(link_section.as_str()) {
1014 let instance = ty::Instance::mono(tcx, def_id);
1015 let const_val = this.eval_global(instance).unwrap_or_else(|err| {
1016 panic!(
1017 "failed to evaluate static in required link_section: {def_id:?}\n{err:?}"
1018 )
1019 });
1020 match const_val.layout.ty.kind() {
1021 ty::FnPtr(..) => {
1022 array.push(this.read_immediate(&const_val)?);
1023 }
1024 ty::Array(elem_ty, _) if matches!(elem_ty.kind(), ty::FnPtr(..)) => {
1025 let mut elems = this.project_array_fields(&const_val)?;
1026 while let Some((_idx, elem)) = elems.next(this)? {
1027 array.push(this.read_immediate(&elem)?);
1028 }
1029 }
1030 _ =>
1031 throw_unsup_format!(
1032 "only function pointers and arrays of function pointers are supported in well-known linker sections"
1033 ),
1034 }
1035 }
1036 interp_ok(())
1037 })?;
1038
1039 interp_ok(array)
1040 }
1041
1042 fn mangle_internal_symbol<'a>(&'a mut self, name: &'static str) -> &'a str
1043 where
1044 'tcx: 'a,
1045 {
1046 let this = self.eval_context_mut();
1047 let tcx = *this.tcx;
1048 this.machine
1049 .mangle_internal_symbol_cache
1050 .entry(name)
1051 .or_insert_with(|| mangle_internal_symbol(tcx, name))
1052 }
1053}
1054
1055impl<'tcx> MiriMachine<'tcx> {
1056 pub fn current_user_relevant_span(&self) -> Span {
1061 self.threads.active_thread_ref().current_user_relevant_span()
1062 }
1063
1064 pub fn caller_span(&self) -> Span {
1070 let frame_idx = self.top_user_relevant_frame().unwrap();
1073 let frame_idx = cmp::min(frame_idx, self.stack().len().saturating_sub(2));
1074 self.stack()[frame_idx].current_span()
1075 }
1076
1077 fn stack(&self) -> &[Frame<'tcx, Provenance, machine::FrameExtra<'tcx>>] {
1078 self.threads.active_thread_stack()
1079 }
1080
1081 fn top_user_relevant_frame(&self) -> Option<usize> {
1082 self.threads.active_thread_ref().top_user_relevant_frame()
1083 }
1084
1085 pub fn user_relevance(&self, frame: &Frame<'tcx, Provenance>) -> u8 {
1087 if frame.instance().def.requires_caller_location(self.tcx) {
1088 return 0;
1089 }
1090 if self.is_local(frame.instance()) {
1091 u8::MAX
1092 } else {
1093 1
1096 }
1097 }
1098}
1099
1100pub fn isolation_abort_error<'tcx>(name: &str) -> InterpResult<'tcx> {
1101 throw_machine_stop!(TerminationInfo::UnsupportedInIsolation(format!(
1102 "{name} not available when isolation is enabled",
1103 )))
1104}
1105
1106pub(crate) fn bool_to_simd_element(b: bool, size: Size) -> Scalar {
1107 let val = if b { -1 } else { 0 };
1111 Scalar::from_int(val, size)
1112}
1113
1114pub(crate) fn windows_check_buffer_size((success, len): (bool, u64)) -> u32 {
1118 if success {
1119 u32::try_from(len.strict_sub(1)).unwrap()
1122 } else {
1123 u32::try_from(len).unwrap()
1126 }
1127}
1128
1129pub trait ToUsize {
1131 fn to_usize(self) -> usize;
1132}
1133
1134impl ToUsize for u32 {
1135 fn to_usize(self) -> usize {
1136 self.try_into().unwrap()
1137 }
1138}
1139
1140pub trait ToU64 {
1143 fn to_u64(self) -> u64;
1144}
1145
1146impl ToU64 for usize {
1147 fn to_u64(self) -> u64 {
1148 self.try_into().unwrap()
1149 }
1150}
1151
1152#[macro_export]
1158macro_rules! enter_trace_span {
1159 ($($tt:tt)*) => {
1160 rustc_const_eval::enter_trace_span!($crate::MiriMachine<'static>, $($tt)*)
1161 };
1162}