1use std::assert_matches::assert_matches;
10use std::borrow::{Borrow, Cow};
11use std::cell::Cell;
12use std::collections::VecDeque;
13use std::{fmt, ptr};
14
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
18use rustc_middle::mir::display_allocation;
19use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
20use rustc_middle::{bug, throw_ub_format};
21use tracing::{debug, instrument, trace};
22
23use super::{
24 AllocBytes, AllocId, AllocInit, AllocMap, AllocRange, Allocation, CheckAlignMsg,
25 CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
26 Misalignment, Pointer, PointerArithmetic, Provenance, Scalar, alloc_range, err_ub,
27 err_ub_custom, interp_ok, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
28};
29use crate::const_eval::ConstEvalErrKind;
30use crate::fluent_generated as fluent;
31
32#[derive(Debug, PartialEq, Copy, Clone)]
33pub enum MemoryKind<T> {
34 Stack,
36 CallerLocation,
38 Machine(T),
40}
41
42impl<T: MayLeak> MayLeak for MemoryKind<T> {
43 #[inline]
44 fn may_leak(self) -> bool {
45 match self {
46 MemoryKind::Stack => false,
47 MemoryKind::CallerLocation => true,
48 MemoryKind::Machine(k) => k.may_leak(),
49 }
50 }
51}
52
53impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
54 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55 match self {
56 MemoryKind::Stack => write!(f, "stack variable"),
57 MemoryKind::CallerLocation => write!(f, "caller location"),
58 MemoryKind::Machine(m) => write!(f, "{m}"),
59 }
60 }
61}
62
63#[derive(Copy, Clone, PartialEq, Debug)]
65pub enum AllocKind {
66 LiveData,
68 Function,
70 VTable,
72 Dead,
74}
75
76#[derive(Copy, Clone, PartialEq, Debug)]
78pub struct AllocInfo {
79 pub size: Size,
80 pub align: Align,
81 pub kind: AllocKind,
82 pub mutbl: Mutability,
83}
84
85impl AllocInfo {
86 fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
87 Self { size, align, kind, mutbl }
88 }
89}
90
91#[derive(Debug, Copy, Clone)]
93pub enum FnVal<'tcx, Other> {
94 Instance(Instance<'tcx>),
95 Other(Other),
96}
97
98impl<'tcx, Other> FnVal<'tcx, Other> {
99 pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
100 match self {
101 FnVal::Instance(instance) => interp_ok(instance),
102 FnVal::Other(_) => {
103 throw_unsup_format!("'foreign' function pointers are not supported in this context")
104 }
105 }
106 }
107}
108
109pub struct Memory<'tcx, M: Machine<'tcx>> {
112 pub(super) alloc_map: M::MemoryMap,
123
124 extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
126
127 pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
132
133 validation_in_progress: Cell<bool>,
137}
138
139#[derive(Copy, Clone)]
142pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
143 alloc: &'a Allocation<Prov, Extra, Bytes>,
144 range: AllocRange,
145 tcx: TyCtxt<'tcx>,
146 alloc_id: AllocId,
147}
148pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
151 alloc: &'a mut Allocation<Prov, Extra, Bytes>,
152 range: AllocRange,
153 tcx: TyCtxt<'tcx>,
154 alloc_id: AllocId,
155}
156
157impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
158 pub fn new() -> Self {
159 Memory {
160 alloc_map: M::MemoryMap::default(),
161 extra_fn_ptr_map: FxIndexMap::default(),
162 dead_alloc_map: FxIndexMap::default(),
163 validation_in_progress: Cell::new(false),
164 }
165 }
166
167 pub fn alloc_map(&self) -> &M::MemoryMap {
169 &self.alloc_map
170 }
171}
172
173impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
174 #[inline]
184 pub fn global_root_pointer(
185 &self,
186 ptr: Pointer<CtfeProvenance>,
187 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
188 let alloc_id = ptr.provenance.alloc_id();
189 match self.tcx.try_get_global_alloc(alloc_id) {
191 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
192 bug!("global memory cannot point to thread-local static")
195 }
196 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
197 return M::extern_static_pointer(self, def_id);
198 }
199 None => {
200 assert!(
201 self.memory.extra_fn_ptr_map.contains_key(&alloc_id),
202 "{alloc_id:?} is neither global nor a function pointer"
203 );
204 }
205 _ => {}
206 }
207 M::adjust_alloc_root_pointer(self, ptr, M::GLOBAL_KIND.map(MemoryKind::Machine))
209 }
210
211 pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> {
212 let id = match fn_val {
213 FnVal::Instance(instance) => {
214 let salt = M::get_global_alloc_salt(self, Some(instance));
215 self.tcx.reserve_and_set_fn_alloc(instance, salt)
216 }
217 FnVal::Other(extra) => {
218 let id = self.tcx.reserve_alloc_id();
220 let old = self.memory.extra_fn_ptr_map.insert(id, extra);
221 assert!(old.is_none());
222 id
223 }
224 };
225 self.global_root_pointer(Pointer::from(id)).unwrap()
228 }
229
230 pub fn allocate_ptr(
231 &mut self,
232 size: Size,
233 align: Align,
234 kind: MemoryKind<M::MemoryKind>,
235 init: AllocInit,
236 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
237 let params = self.machine.get_default_alloc_params();
238 let alloc = if M::PANIC_ON_ALLOC_FAIL {
239 Allocation::new(size, align, init, params)
240 } else {
241 Allocation::try_new(size, align, init, params)?
242 };
243 self.insert_allocation(alloc, kind)
244 }
245
246 pub fn allocate_bytes_ptr(
247 &mut self,
248 bytes: &[u8],
249 align: Align,
250 kind: MemoryKind<M::MemoryKind>,
251 mutability: Mutability,
252 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
253 let params = self.machine.get_default_alloc_params();
254 let alloc = Allocation::from_bytes(bytes, align, mutability, params);
255 self.insert_allocation(alloc, kind)
256 }
257
258 pub fn insert_allocation(
259 &mut self,
260 alloc: Allocation<M::Provenance, (), M::Bytes>,
261 kind: MemoryKind<M::MemoryKind>,
262 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
263 assert!(alloc.size() <= self.max_size_of_val());
264 let id = self.tcx.reserve_alloc_id();
265 debug_assert_ne!(
266 Some(kind),
267 M::GLOBAL_KIND.map(MemoryKind::Machine),
268 "dynamically allocating global memory"
269 );
270 let extra = M::init_local_allocation(self, id, kind, alloc.size(), alloc.align)?;
273 let alloc = alloc.with_extra(extra);
274 self.memory.alloc_map.insert(id, (kind, alloc));
275 M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))
276 }
277
278 pub fn reallocate_ptr(
281 &mut self,
282 ptr: Pointer<Option<M::Provenance>>,
283 old_size_and_align: Option<(Size, Align)>,
284 new_size: Size,
285 new_align: Align,
286 kind: MemoryKind<M::MemoryKind>,
287 init_growth: AllocInit,
288 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
289 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
290 if offset.bytes() != 0 {
291 throw_ub_custom!(
292 fluent::const_eval_realloc_or_alloc_with_offset,
293 ptr = format!("{ptr:?}"),
294 kind = "realloc"
295 );
296 }
297
298 let new_ptr = self.allocate_ptr(new_size, new_align, kind, init_growth)?;
304 let old_size = match old_size_and_align {
305 Some((size, _align)) => size,
306 None => self.get_alloc_raw(alloc_id)?.size(),
307 };
308 self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), true)?;
310 self.deallocate_ptr(ptr, old_size_and_align, kind)?;
311
312 interp_ok(new_ptr)
313 }
314
315 pub fn make_const_heap_ptr_global(
317 &mut self,
318 ptr: Pointer<Option<CtfeProvenance>>,
319 ) -> InterpResult<'tcx>
320 where
321 M: Machine<'tcx, MemoryKind = crate::const_eval::MemoryKind, Provenance = CtfeProvenance>,
322 {
323 let (alloc_id, offset, _) = self.ptr_get_alloc_id(ptr, 0)?;
324 if offset.bytes() != 0 {
325 return Err(ConstEvalErrKind::ConstMakeGlobalWithOffset(ptr)).into();
326 }
327
328 if matches!(self.tcx.try_get_global_alloc(alloc_id), Some(_)) {
329 return Err(ConstEvalErrKind::ConstMakeGlobalPtrIsNonHeap(ptr)).into();
331 }
332
333 let (kind, alloc) = self
336 .memory
337 .alloc_map
338 .get_mut_or(alloc_id, || Err(ConstEvalErrKind::ConstMakeGlobalWithDanglingPtr(ptr)))?;
339
340 match kind {
342 MemoryKind::Stack | MemoryKind::CallerLocation => {
343 return Err(ConstEvalErrKind::ConstMakeGlobalPtrIsNonHeap(ptr)).into();
344 }
345 MemoryKind::Machine(crate::const_eval::MemoryKind::Heap { was_made_global }) => {
346 if *was_made_global {
347 return Err(ConstEvalErrKind::ConstMakeGlobalPtrAlreadyMadeGlobal(alloc_id))
348 .into();
349 }
350 *was_made_global = true;
351 }
352 }
353
354 alloc.mutability = Mutability::Not;
356
357 interp_ok(())
358 }
359
360 #[instrument(skip(self), level = "debug")]
361 pub fn deallocate_ptr(
362 &mut self,
363 ptr: Pointer<Option<M::Provenance>>,
364 old_size_and_align: Option<(Size, Align)>,
365 kind: MemoryKind<M::MemoryKind>,
366 ) -> InterpResult<'tcx> {
367 let (alloc_id, offset, prov) = self.ptr_get_alloc_id(ptr, 0)?;
368 trace!("deallocating: {alloc_id:?}");
369
370 if offset.bytes() != 0 {
371 throw_ub_custom!(
372 fluent::const_eval_realloc_or_alloc_with_offset,
373 ptr = format!("{ptr:?}"),
374 kind = "dealloc",
375 );
376 }
377
378 let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else {
379 return Err(match self.tcx.try_get_global_alloc(alloc_id) {
381 Some(GlobalAlloc::Function { .. }) => {
382 err_ub_custom!(
383 fluent::const_eval_invalid_dealloc,
384 alloc_id = alloc_id,
385 kind = "fn",
386 )
387 }
388 Some(GlobalAlloc::VTable(..)) => {
389 err_ub_custom!(
390 fluent::const_eval_invalid_dealloc,
391 alloc_id = alloc_id,
392 kind = "vtable",
393 )
394 }
395 Some(GlobalAlloc::TypeId { .. }) => {
396 err_ub_custom!(
397 fluent::const_eval_invalid_dealloc,
398 alloc_id = alloc_id,
399 kind = "typeid",
400 )
401 }
402 Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
403 err_ub_custom!(
404 fluent::const_eval_invalid_dealloc,
405 alloc_id = alloc_id,
406 kind = "static_mem"
407 )
408 }
409 None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccess)),
410 })
411 .into();
412 };
413
414 if alloc.mutability.is_not() {
415 throw_ub_custom!(fluent::const_eval_dealloc_immutable, alloc = alloc_id,);
416 }
417 if alloc_kind != kind {
418 throw_ub_custom!(
419 fluent::const_eval_dealloc_kind_mismatch,
420 alloc = alloc_id,
421 alloc_kind = format!("{alloc_kind}"),
422 kind = format!("{kind}"),
423 );
424 }
425 if let Some((size, align)) = old_size_and_align {
426 if size != alloc.size() || align != alloc.align {
427 throw_ub_custom!(
428 fluent::const_eval_dealloc_incorrect_layout,
429 alloc = alloc_id,
430 size = alloc.size().bytes(),
431 align = alloc.align.bytes(),
432 size_found = size.bytes(),
433 align_found = align.bytes(),
434 )
435 }
436 }
437
438 let size = alloc.size();
440 M::before_memory_deallocation(
441 self.tcx,
442 &mut self.machine,
443 &mut alloc.extra,
444 ptr,
445 (alloc_id, prov),
446 size,
447 alloc.align,
448 kind,
449 )?;
450
451 let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align));
453 if old.is_some() {
454 bug!("Nothing can be deallocated twice");
455 }
456
457 interp_ok(())
458 }
459
460 #[inline(always)]
462 fn get_ptr_access(
463 &self,
464 ptr: Pointer<Option<M::Provenance>>,
465 size: Size,
466 ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> {
467 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(
469 self,
470 ptr,
471 size,
472 CheckInAllocMsg::MemoryAccess,
473 |this, alloc_id, offset, prov| {
474 let (size, align) =
475 this.get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccess)?;
476 interp_ok((size, align, (alloc_id, offset, prov)))
477 },
478 )
479 }
480
481 #[inline(always)]
484 pub fn check_ptr_access(
485 &self,
486 ptr: Pointer<Option<M::Provenance>>,
487 size: Size,
488 msg: CheckInAllocMsg,
489 ) -> InterpResult<'tcx> {
490 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
492 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
493 interp_ok((size, align, ()))
494 })?;
495 interp_ok(())
496 }
497
498 pub fn check_ptr_access_signed(
502 &self,
503 ptr: Pointer<Option<M::Provenance>>,
504 size: i64,
505 msg: CheckInAllocMsg,
506 ) -> InterpResult<'tcx> {
507 Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
508 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
509 interp_ok((size, align, ()))
510 })?;
511 interp_ok(())
512 }
513
514 fn check_and_deref_ptr<T, R: Borrow<Self>>(
523 this: R,
524 ptr: Pointer<Option<M::Provenance>>,
525 size: i64,
526 msg: CheckInAllocMsg,
527 alloc_size: impl FnOnce(
528 R,
529 AllocId,
530 Size,
531 M::ProvenanceExtra,
532 ) -> InterpResult<'tcx, (Size, Align, T)>,
533 ) -> InterpResult<'tcx, Option<T>> {
534 if size == 0 {
536 return interp_ok(None);
537 }
538
539 interp_ok(match this.borrow().ptr_try_get_alloc_id(ptr, size) {
540 Err(addr) => {
541 throw_ub!(DanglingIntPointer { addr, inbounds_size: size, msg });
543 }
544 Ok((alloc_id, offset, prov)) => {
545 let tcx = this.borrow().tcx;
546 let (alloc_size, _alloc_align, ret_val) = alloc_size(this, alloc_id, offset, prov)?;
547 let offset = offset.bytes();
548 let (begin, end) = if size >= 0 {
550 (Some(offset), offset.checked_add(size as u64))
551 } else {
552 (offset.checked_sub(size.unsigned_abs()), Some(offset))
553 };
554 let in_bounds = begin.is_some() && end.is_some_and(|e| e <= alloc_size.bytes());
556 if !in_bounds {
557 throw_ub!(PointerOutOfBounds {
558 alloc_id,
559 alloc_size,
560 ptr_offset: tcx.sign_extend_to_target_isize(offset),
561 inbounds_size: size,
562 msg,
563 })
564 }
565
566 Some(ret_val)
567 }
568 })
569 }
570
571 pub(super) fn check_misalign(
572 &self,
573 misaligned: Option<Misalignment>,
574 msg: CheckAlignMsg,
575 ) -> InterpResult<'tcx> {
576 if let Some(misaligned) = misaligned {
577 throw_ub!(AlignmentCheckFailed(misaligned, msg))
578 }
579 interp_ok(())
580 }
581
582 pub(super) fn is_ptr_misaligned(
583 &self,
584 ptr: Pointer<Option<M::Provenance>>,
585 align: Align,
586 ) -> Option<Misalignment> {
587 if !M::enforce_alignment(self) || align.bytes() == 1 {
588 return None;
589 }
590
591 #[inline]
592 fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
593 if offset.is_multiple_of(align.bytes()) {
594 None
595 } else {
596 let offset_pow2 = 1 << offset.trailing_zeros();
598 Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align })
599 }
600 }
601
602 match self.ptr_try_get_alloc_id(ptr, 0) {
603 Err(addr) => is_offset_misaligned(addr, align),
604 Ok((alloc_id, offset, _prov)) => {
605 let alloc_info = self.get_alloc_info(alloc_id);
606 if let Some(misalign) = M::alignment_check(
607 self,
608 alloc_id,
609 alloc_info.align,
610 alloc_info.kind,
611 offset,
612 align,
613 ) {
614 Some(misalign)
615 } else if M::Provenance::OFFSET_IS_ADDR {
616 is_offset_misaligned(ptr.addr().bytes(), align)
617 } else {
618 if alloc_info.align.bytes() < align.bytes() {
620 Some(Misalignment { has: alloc_info.align, required: align })
621 } else {
622 is_offset_misaligned(offset.bytes(), align)
623 }
624 }
625 }
626 }
627 }
628
629 pub fn check_ptr_align(
633 &self,
634 ptr: Pointer<Option<M::Provenance>>,
635 align: Align,
636 ) -> InterpResult<'tcx> {
637 self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr)
638 }
639}
640
641impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
642 pub fn remove_unreachable_allocs(&mut self, reachable_allocs: &FxHashSet<AllocId>) {
644 #[allow(rustc::potential_query_instability)] self.memory.dead_alloc_map.retain(|id, _| reachable_allocs.contains(id));
649 }
650}
651
652impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
654 fn get_global_alloc(
660 &self,
661 id: AllocId,
662 is_write: bool,
663 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::Provenance, M::AllocExtra, M::Bytes>>> {
664 let (alloc, def_id) = match self.tcx.try_get_global_alloc(id) {
665 Some(GlobalAlloc::Memory(mem)) => {
666 (mem, None)
668 }
669 Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)),
670 Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)),
671 Some(GlobalAlloc::TypeId { .. }) => throw_ub!(DerefTypeIdPointer(id)),
672 None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccess)),
673 Some(GlobalAlloc::Static(def_id)) => {
674 assert!(self.tcx.is_static(def_id));
675 assert!(!self.tcx.is_thread_local_static(def_id));
678 if self.tcx.is_foreign_item(def_id) {
689 throw_unsup!(ExternStatic(def_id));
692 }
693
694 let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
696 (val, Some(def_id))
697 }
698 };
699 M::before_access_global(self.tcx, &self.machine, id, alloc, def_id, is_write)?;
700 M::adjust_global_allocation(
702 self,
703 id, alloc.inner(),
705 )
706 }
707
708 pub fn get_alloc_raw(
713 &self,
714 id: AllocId,
715 ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
716 let a = self.memory.alloc_map.get_or(id, || {
721 let alloc = self.get_global_alloc(id, false).report_err().map_err(Err)?;
724 match alloc {
725 Cow::Borrowed(alloc) => {
726 Err(Ok(alloc))
729 }
730 Cow::Owned(alloc) => {
731 let kind = M::GLOBAL_KIND.expect(
733 "I got a global allocation that I have to copy but the machine does \
734 not expect that to happen",
735 );
736 Ok((MemoryKind::Machine(kind), alloc))
737 }
738 }
739 });
740 match a {
742 Ok(a) => interp_ok(&a.1),
743 Err(a) => a.into(),
744 }
745 }
746
747 pub fn get_alloc_bytes_unchecked_raw(&self, id: AllocId) -> InterpResult<'tcx, *const u8> {
750 let alloc = self.get_alloc_raw(id)?;
751 interp_ok(alloc.get_bytes_unchecked_raw())
752 }
753
754 pub fn get_ptr_alloc<'a>(
756 &'a self,
757 ptr: Pointer<Option<M::Provenance>>,
758 size: Size,
759 ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
760 {
761 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
763 self,
764 ptr,
765 size_i64,
766 CheckInAllocMsg::MemoryAccess,
767 |this, alloc_id, offset, prov| {
768 let alloc = this.get_alloc_raw(alloc_id)?;
769 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
770 },
771 )?;
772 if !self.memory.validation_in_progress.get() {
776 if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
777 M::before_alloc_access(self.tcx, &self.machine, alloc_id)?;
778 }
779 }
780
781 if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
782 let range = alloc_range(offset, size);
783 if !self.memory.validation_in_progress.get() {
784 M::before_memory_read(
785 self.tcx,
786 &self.machine,
787 &alloc.extra,
788 ptr,
789 (alloc_id, prov),
790 range,
791 )?;
792 }
793 interp_ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
794 } else {
795 interp_ok(None)
796 }
797 }
798
799 pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> {
801 interp_ok(&self.get_alloc_raw(id)?.extra)
802 }
803
804 pub fn get_alloc_mutability<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, Mutability> {
806 interp_ok(self.get_alloc_raw(id)?.mutability)
807 }
808
809 pub fn get_alloc_raw_mut(
817 &mut self,
818 id: AllocId,
819 ) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
820 if self.memory.alloc_map.get_mut(id).is_none() {
828 let alloc = self.get_global_alloc(id, true)?;
831 let kind = M::GLOBAL_KIND.expect(
832 "I got a global allocation that I have to copy but the machine does \
833 not expect that to happen",
834 );
835 self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
836 }
837
838 let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
839 if alloc.mutability.is_not() {
840 throw_ub!(WriteToReadOnly(id))
841 }
842 interp_ok((alloc, &mut self.machine))
843 }
844
845 pub fn get_alloc_bytes_unchecked_raw_mut(
848 &mut self,
849 id: AllocId,
850 ) -> InterpResult<'tcx, *mut u8> {
851 let alloc = self.get_alloc_raw_mut(id)?.0;
852 interp_ok(alloc.get_bytes_unchecked_raw_mut())
853 }
854
855 pub fn get_ptr_alloc_mut<'a>(
857 &'a mut self,
858 ptr: Pointer<Option<M::Provenance>>,
859 size: Size,
860 ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
861 {
862 let tcx = self.tcx;
863 let validation_in_progress = self.memory.validation_in_progress.get();
864
865 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
867 self,
868 ptr,
869 size_i64,
870 CheckInAllocMsg::MemoryAccess,
871 |this, alloc_id, offset, prov| {
872 let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?;
873 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine)))
874 },
875 )?;
876
877 if let Some((alloc_id, offset, prov, alloc, machine)) = ptr_and_alloc {
878 let range = alloc_range(offset, size);
879 if !validation_in_progress {
880 M::before_alloc_access(tcx, machine, alloc_id)?;
883 M::before_memory_write(
884 tcx,
885 machine,
886 &mut alloc.extra,
887 ptr,
888 (alloc_id, prov),
889 range,
890 )?;
891 }
892 interp_ok(Some(AllocRefMut { alloc, range, tcx: *tcx, alloc_id }))
893 } else {
894 interp_ok(None)
895 }
896 }
897
898 pub fn get_alloc_extra_mut<'a>(
900 &'a mut self,
901 id: AllocId,
902 ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> {
903 let (alloc, machine) = self.get_alloc_raw_mut(id)?;
904 interp_ok((&mut alloc.extra, machine))
905 }
906
907 pub fn is_alloc_live(&self, id: AllocId) -> bool {
911 self.memory.alloc_map.contains_key_ref(&id)
912 || self.memory.extra_fn_ptr_map.contains_key(&id)
913 || self.tcx.try_get_global_alloc(id).is_some()
916 }
917
918 pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
921 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
926 return AllocInfo::new(
927 alloc.size(),
928 alloc.align,
929 AllocKind::LiveData,
930 alloc.mutability,
931 );
932 }
933
934 if let Some(fn_val) = self.get_fn_alloc(id) {
937 let align = match fn_val {
938 FnVal::Instance(instance) => {
939 self.tcx.codegen_instance_attrs(instance.def).alignment.unwrap_or(Align::ONE)
940 }
941 FnVal::Other(_) => Align::ONE,
943 };
944
945 return AllocInfo::new(Size::ZERO, align, AllocKind::Function, Mutability::Not);
946 }
947
948 if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
950 let (size, align) = global_alloc.size_and_align(*self.tcx, self.typing_env);
951 let mutbl = global_alloc.mutability(*self.tcx, self.typing_env);
952 let kind = match global_alloc {
953 GlobalAlloc::TypeId { .. }
954 | GlobalAlloc::Static { .. }
955 | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
956 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
957 GlobalAlloc::VTable { .. } => AllocKind::VTable,
958 };
959 return AllocInfo::new(size, align, kind, mutbl);
960 }
961
962 let (size, align) = *self
964 .memory
965 .dead_alloc_map
966 .get(&id)
967 .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
968 AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
969 }
970
971 fn get_live_alloc_size_and_align(
973 &self,
974 id: AllocId,
975 msg: CheckInAllocMsg,
976 ) -> InterpResult<'tcx, (Size, Align)> {
977 let info = self.get_alloc_info(id);
978 if matches!(info.kind, AllocKind::Dead) {
979 throw_ub!(PointerUseAfterFree(id, msg))
980 }
981 interp_ok((info.size, info.align))
982 }
983
984 fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
985 if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) {
986 Some(FnVal::Other(*extra))
987 } else {
988 match self.tcx.try_get_global_alloc(id) {
989 Some(GlobalAlloc::Function { instance, .. }) => Some(FnVal::Instance(instance)),
990 _ => None,
991 }
992 }
993 }
994
995 pub fn get_ptr_type_id(
998 &self,
999 ptr: Pointer<Option<M::Provenance>>,
1000 ) -> InterpResult<'tcx, (Ty<'tcx>, u64)> {
1001 let (alloc_id, offset, _meta) = self.ptr_get_alloc_id(ptr, 0)?;
1002 let GlobalAlloc::TypeId { ty } = self.tcx.global_alloc(alloc_id) else {
1003 throw_ub_format!("invalid `TypeId` value: not all bytes carry type id metadata")
1004 };
1005 interp_ok((ty, offset.bytes()))
1006 }
1007
1008 pub fn get_ptr_fn(
1009 &self,
1010 ptr: Pointer<Option<M::Provenance>>,
1011 ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
1012 trace!("get_ptr_fn({:?})", ptr);
1013 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
1014 if offset.bytes() != 0 {
1015 throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
1016 }
1017 self.get_fn_alloc(alloc_id)
1018 .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))))
1019 .into()
1020 }
1021
1022 pub fn get_ptr_vtable_ty(
1025 &self,
1026 ptr: Pointer<Option<M::Provenance>>,
1027 expected_trait: Option<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>,
1028 ) -> InterpResult<'tcx, Ty<'tcx>> {
1029 trace!("get_ptr_vtable({:?})", ptr);
1030 let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr, 0)?;
1031 if offset.bytes() != 0 {
1032 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
1033 }
1034 let Some(GlobalAlloc::VTable(ty, vtable_dyn_type)) =
1035 self.tcx.try_get_global_alloc(alloc_id)
1036 else {
1037 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
1038 };
1039 if let Some(expected_dyn_type) = expected_trait {
1040 self.check_vtable_for_type(vtable_dyn_type, expected_dyn_type)?;
1041 }
1042 interp_ok(ty)
1043 }
1044
1045 pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
1046 self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not;
1047 interp_ok(())
1048 }
1049
1050 pub fn visit_reachable_allocs(
1053 &mut self,
1054 start: Vec<AllocId>,
1055 mut visit: impl FnMut(&mut Self, AllocId, &AllocInfo) -> InterpResult<'tcx>,
1056 ) -> InterpResult<'tcx> {
1057 let mut done = FxHashSet::default();
1058 let mut todo = start;
1059 while let Some(id) = todo.pop() {
1060 if !done.insert(id) {
1061 continue;
1063 }
1064 let info = self.get_alloc_info(id);
1065
1066 if matches!(info.kind, AllocKind::LiveData) {
1070 let alloc = self.get_alloc_raw(id)?;
1071 for prov in alloc.provenance().provenances() {
1072 if let Some(id) = prov.get_alloc_id() {
1073 todo.push(id);
1074 }
1075 }
1076 }
1077
1078 visit(self, id, &info)?;
1080 }
1081 interp_ok(())
1082 }
1083
1084 #[must_use]
1087 pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'tcx, M> {
1088 self.dump_allocs(vec![id])
1089 }
1090
1091 #[must_use]
1094 pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'tcx, M> {
1095 allocs.sort();
1096 allocs.dedup();
1097 DumpAllocs { ecx: self, allocs }
1098 }
1099
1100 pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
1102 let alloc = self.get_alloc_raw(id).unwrap();
1105 let mut bytes = String::new();
1106 if alloc.size() != Size::ZERO {
1107 bytes = "\n".into();
1108 rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
1110 .unwrap();
1111 }
1112 bytes
1113 }
1114
1115 pub fn take_leaked_allocations(
1121 &mut self,
1122 static_roots: impl FnOnce(&Self) -> &[AllocId],
1123 ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
1124 {
1125 let reachable = {
1127 let mut reachable = FxHashSet::default();
1128 let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine);
1129 let mut todo: Vec<_> =
1130 self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| {
1131 if Some(kind) == global_kind { Some(id) } else { None }
1132 });
1133 todo.extend(static_roots(self));
1134 while let Some(id) = todo.pop() {
1135 if reachable.insert(id) {
1136 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
1140 todo.extend(
1141 alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
1142 );
1143 }
1144 }
1145 }
1146 reachable
1147 };
1148
1149 let leaked: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
1151 if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
1152 });
1153 let mut result = Vec::new();
1154 for &id in leaked.iter() {
1155 let (kind, alloc) = self.memory.alloc_map.remove(&id).unwrap();
1156 result.push((id, kind, alloc));
1157 }
1158 result
1159 }
1160
1161 pub fn run_for_validation_mut<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
1167 assert!(
1170 self.memory.validation_in_progress.replace(true) == false,
1171 "`validation_in_progress` was already set"
1172 );
1173 let res = f(self);
1174 assert!(
1175 self.memory.validation_in_progress.replace(false) == true,
1176 "`validation_in_progress` was unset by someone else"
1177 );
1178 res
1179 }
1180
1181 pub fn run_for_validation_ref<R>(&self, f: impl FnOnce(&Self) -> R) -> R {
1187 assert!(
1190 self.memory.validation_in_progress.replace(true) == false,
1191 "`validation_in_progress` was already set"
1192 );
1193 let res = f(self);
1194 assert!(
1195 self.memory.validation_in_progress.replace(false) == true,
1196 "`validation_in_progress` was unset by someone else"
1197 );
1198 res
1199 }
1200
1201 pub(super) fn validation_in_progress(&self) -> bool {
1202 self.memory.validation_in_progress.get()
1203 }
1204}
1205
1206#[doc(hidden)]
1207pub struct DumpAllocs<'a, 'tcx, M: Machine<'tcx>> {
1209 ecx: &'a InterpCx<'tcx, M>,
1210 allocs: Vec<AllocId>,
1211}
1212
1213impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for DumpAllocs<'a, 'tcx, M> {
1214 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1215 fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
1217 fmt: &mut std::fmt::Formatter<'_>,
1218 tcx: TyCtxt<'tcx>,
1219 allocs_to_print: &mut VecDeque<AllocId>,
1220 alloc: &Allocation<Prov, Extra, Bytes>,
1221 ) -> std::fmt::Result {
1222 for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
1223 {
1224 allocs_to_print.push_back(alloc_id);
1225 }
1226 write!(fmt, "{}", display_allocation(tcx, alloc))
1227 }
1228
1229 let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect();
1230 let mut allocs_printed = FxHashSet::default();
1232
1233 while let Some(id) = allocs_to_print.pop_front() {
1234 if !allocs_printed.insert(id) {
1235 continue;
1237 }
1238
1239 write!(fmt, "{id:?}")?;
1240 match self.ecx.memory.alloc_map.get(id) {
1241 Some((kind, alloc)) => {
1242 write!(fmt, " ({kind}, ")?;
1244 write_allocation_track_relocs(
1245 &mut *fmt,
1246 *self.ecx.tcx,
1247 &mut allocs_to_print,
1248 alloc,
1249 )?;
1250 }
1251 None => {
1252 match self.ecx.tcx.try_get_global_alloc(id) {
1254 Some(GlobalAlloc::Memory(alloc)) => {
1255 write!(fmt, " (unchanged global, ")?;
1256 write_allocation_track_relocs(
1257 &mut *fmt,
1258 *self.ecx.tcx,
1259 &mut allocs_to_print,
1260 alloc.inner(),
1261 )?;
1262 }
1263 Some(GlobalAlloc::Function { instance, .. }) => {
1264 write!(fmt, " (fn: {instance})")?;
1265 }
1266 Some(GlobalAlloc::VTable(ty, dyn_ty)) => {
1267 write!(fmt, " (vtable: impl {dyn_ty} for {ty})")?;
1268 }
1269 Some(GlobalAlloc::TypeId { ty }) => {
1270 write!(fmt, " (typeid for {ty})")?;
1271 }
1272 Some(GlobalAlloc::Static(did)) => {
1273 write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?;
1274 }
1275 None => {
1276 write!(fmt, " (deallocated)")?;
1277 }
1278 }
1279 }
1280 }
1281 writeln!(fmt)?;
1282 }
1283 Ok(())
1284 }
1285}
1286
1287impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
1289 AllocRefMut<'a, 'tcx, Prov, Extra, Bytes>
1290{
1291 pub fn as_ref<'b>(&'b self) -> AllocRef<'b, 'tcx, Prov, Extra, Bytes> {
1292 AllocRef { alloc: self.alloc, range: self.range, tcx: self.tcx, alloc_id: self.alloc_id }
1293 }
1294
1295 pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
1297 let range = self.range.subrange(range);
1298 debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
1299
1300 self.alloc
1301 .write_scalar(&self.tcx, range, val)
1302 .map_err(|e| e.to_interp_error(self.alloc_id))
1303 .into()
1304 }
1305
1306 pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
1308 self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size()), val)
1309 }
1310
1311 pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
1313 let range = self.range.subrange(range);
1314
1315 self.alloc
1316 .write_uninit(&self.tcx, range)
1317 .map_err(|e| e.to_interp_error(self.alloc_id))
1318 .into()
1319 }
1320
1321 pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
1323 self.alloc
1324 .write_uninit(&self.tcx, self.range)
1325 .map_err(|e| e.to_interp_error(self.alloc_id))
1326 .into()
1327 }
1328
1329 pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
1331 self.alloc
1332 .clear_provenance(&self.tcx, self.range)
1333 .map_err(|e| e.to_interp_error(self.alloc_id))
1334 .into()
1335 }
1336}
1337
1338impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> AllocRef<'a, 'tcx, Prov, Extra, Bytes> {
1339 pub fn read_scalar(
1341 &self,
1342 range: AllocRange,
1343 read_provenance: bool,
1344 ) -> InterpResult<'tcx, Scalar<Prov>> {
1345 let range = self.range.subrange(range);
1346 self.alloc
1347 .read_scalar(&self.tcx, range, read_provenance)
1348 .map_err(|e| e.to_interp_error(self.alloc_id))
1349 .into()
1350 }
1351
1352 pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
1354 self.read_scalar(range, false)
1355 }
1356
1357 pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
1359 self.read_scalar(
1360 alloc_range(offset, self.tcx.data_layout().pointer_size()),
1361 true,
1362 )
1363 }
1364
1365 pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
1367 self.alloc
1368 .get_bytes_strip_provenance(&self.tcx, self.range)
1369 .map_err(|e| e.to_interp_error(self.alloc_id))
1370 .into()
1371 }
1372
1373 pub fn has_provenance(&self) -> bool {
1375 !self.alloc.provenance().range_empty(self.range, &self.tcx)
1376 }
1377}
1378
1379impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1380 pub fn read_bytes_ptr_strip_provenance(
1385 &self,
1386 ptr: Pointer<Option<M::Provenance>>,
1387 size: Size,
1388 ) -> InterpResult<'tcx, &[u8]> {
1389 let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else {
1390 return interp_ok(&[]);
1392 };
1393 interp_ok(
1396 alloc_ref
1397 .alloc
1398 .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
1399 .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?,
1400 )
1401 }
1402
1403 pub fn write_bytes_ptr(
1407 &mut self,
1408 ptr: Pointer<Option<M::Provenance>>,
1409 src: impl IntoIterator<Item = u8>,
1410 ) -> InterpResult<'tcx> {
1411 let mut src = src.into_iter();
1412 let (lower, upper) = src.size_hint();
1413 let len = upper.expect("can only write bounded iterators");
1414 assert_eq!(lower, len, "can only write iterators with a precise length");
1415
1416 let size = Size::from_bytes(len);
1417 let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else {
1418 assert_matches!(src.next(), None, "iterator said it was empty but returned an element");
1420 return interp_ok(());
1421 };
1422
1423 let alloc_id = alloc_ref.alloc_id;
1426 let bytes = alloc_ref
1427 .alloc
1428 .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
1429 .map_err(move |e| e.to_interp_error(alloc_id))?;
1430 for dest in bytes {
1433 *dest = src.next().expect("iterator was shorter than it said it would be");
1434 }
1435 assert_matches!(src.next(), None, "iterator was longer than it said it would be");
1436 interp_ok(())
1437 }
1438
1439 pub fn mem_copy(
1440 &mut self,
1441 src: Pointer<Option<M::Provenance>>,
1442 dest: Pointer<Option<M::Provenance>>,
1443 size: Size,
1444 nonoverlapping: bool,
1445 ) -> InterpResult<'tcx> {
1446 self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping)
1447 }
1448
1449 pub fn mem_copy_repeatedly(
1455 &mut self,
1456 src: Pointer<Option<M::Provenance>>,
1457 dest: Pointer<Option<M::Provenance>>,
1458 size: Size,
1459 num_copies: u64,
1460 nonoverlapping: bool,
1461 ) -> InterpResult<'tcx> {
1462 let tcx = self.tcx;
1463 let src_parts = self.get_ptr_access(src, size)?;
1465 let dest_parts = self.get_ptr_access(dest, size * num_copies)?; if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(src, size.bytes().try_into().unwrap())
1471 {
1472 M::before_alloc_access(tcx, &self.machine, alloc_id)?;
1473 }
1474
1475 let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
1480 return interp_ok(());
1482 };
1483 let src_alloc = self.get_alloc_raw(src_alloc_id)?;
1484 let src_range = alloc_range(src_offset, size);
1485 assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation");
1486
1487 M::before_memory_read(
1491 tcx,
1492 &self.machine,
1493 &src_alloc.extra,
1494 src,
1495 (src_alloc_id, src_prov),
1496 src_range,
1497 )?;
1498 let Some((dest_alloc_id, dest_offset, dest_prov)) = dest_parts else {
1501 return interp_ok(());
1503 };
1504
1505 let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); let provenance = src_alloc
1512 .provenance()
1513 .prepare_copy(src_range, dest_offset, num_copies, self)
1514 .map_err(|e| e.to_interp_error(src_alloc_id))?;
1515 let init = src_alloc.init_mask().prepare_copy(src_range);
1517
1518 let (dest_alloc, machine) = self.get_alloc_raw_mut(dest_alloc_id)?;
1520 let dest_range = alloc_range(dest_offset, size * num_copies);
1521 M::before_alloc_access(tcx, machine, dest_alloc_id)?;
1523 M::before_memory_write(
1524 tcx,
1525 machine,
1526 &mut dest_alloc.extra,
1527 dest,
1528 (dest_alloc_id, dest_prov),
1529 dest_range,
1530 )?;
1531 let dest_bytes = dest_alloc
1533 .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
1534 .map_err(|e| e.to_interp_error(dest_alloc_id))?
1535 .as_mut_ptr();
1536
1537 if init.no_bytes_init() {
1538 dest_alloc
1545 .write_uninit(&tcx, dest_range)
1546 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1547 return interp_ok(());
1549 }
1550
1551 unsafe {
1557 if src_alloc_id == dest_alloc_id {
1558 if nonoverlapping {
1559 if (src_offset <= dest_offset && src_offset + size > dest_offset)
1561 || (dest_offset <= src_offset && dest_offset + size > src_offset)
1562 {
1563 throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
1564 }
1565 }
1566 }
1567 if num_copies > 1 {
1568 assert!(nonoverlapping, "multi-copy only supported in non-overlapping mode");
1569 }
1570
1571 let size_in_bytes = size.bytes_usize();
1572 if size_in_bytes == 1 {
1575 debug_assert!(num_copies >= 1); let value = *src_bytes;
1578 dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
1579 } else if src_alloc_id == dest_alloc_id {
1580 let mut dest_ptr = dest_bytes;
1581 for _ in 0..num_copies {
1582 ptr::copy(src_bytes, dest_ptr, size_in_bytes);
1585 dest_ptr = dest_ptr.add(size_in_bytes);
1586 }
1587 } else {
1588 let mut dest_ptr = dest_bytes;
1589 for _ in 0..num_copies {
1590 ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
1591 dest_ptr = dest_ptr.add(size_in_bytes);
1592 }
1593 }
1594 }
1595
1596 dest_alloc.init_mask_apply_copy(
1598 init,
1599 alloc_range(dest_offset, size), num_copies,
1601 );
1602 dest_alloc.provenance_apply_copy(provenance);
1604
1605 interp_ok(())
1606 }
1607}
1608
1609impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1611 pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
1614 match scalar.try_to_scalar_int() {
1615 Ok(int) => interp_ok(int.is_null()),
1616 Err(_) => {
1617 let ptr = scalar.to_pointer(self)?;
1619 match self.ptr_try_get_alloc_id(ptr, 0) {
1620 Ok((alloc_id, offset, _)) => {
1621 let info = self.get_alloc_info(alloc_id);
1622 if offset <= info.size {
1624 return interp_ok(false);
1625 }
1626 if !offset.bytes().is_multiple_of(info.align.bytes()) {
1630 return interp_ok(false);
1631 }
1632 interp_ok(true)
1634 }
1635 Err(_offset) => bug!("a non-int scalar is always a pointer"),
1636 }
1637 }
1638 }
1639 }
1640
1641 pub fn ptr_try_get_alloc_id(
1655 &self,
1656 ptr: Pointer<Option<M::Provenance>>,
1657 size: i64,
1658 ) -> Result<(AllocId, Size, M::ProvenanceExtra), u64> {
1659 match ptr.into_pointer_or_addr() {
1660 Ok(ptr) => match M::ptr_get_alloc(self, ptr, size) {
1661 Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)),
1662 None => {
1663 assert!(M::Provenance::OFFSET_IS_ADDR);
1664 let (_, addr) = ptr.into_raw_parts();
1666 Err(addr.bytes())
1667 }
1668 },
1669 Err(addr) => Err(addr.bytes()),
1670 }
1671 }
1672
1673 #[inline(always)]
1686 pub fn ptr_get_alloc_id(
1687 &self,
1688 ptr: Pointer<Option<M::Provenance>>,
1689 size: i64,
1690 ) -> InterpResult<'tcx, (AllocId, Size, M::ProvenanceExtra)> {
1691 self.ptr_try_get_alloc_id(ptr, size)
1692 .map_err(|offset| {
1693 err_ub!(DanglingIntPointer {
1694 addr: offset,
1695 inbounds_size: size,
1696 msg: CheckInAllocMsg::Dereferenceable
1697 })
1698 })
1699 .into()
1700 }
1701}