1use std::assert_matches::assert_matches;
10use std::borrow::{Borrow, Cow};
11use std::cell::Cell;
12use std::collections::VecDeque;
13use std::{fmt, ptr};
14
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
18use rustc_middle::bug;
19use rustc_middle::mir::display_allocation;
20use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
21use tracing::{debug, instrument, trace};
22
23use super::{
24 AllocBytes, AllocId, AllocInit, AllocMap, AllocRange, Allocation, CheckAlignMsg,
25 CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
26 Misalignment, Pointer, PointerArithmetic, Provenance, Scalar, alloc_range, err_ub,
27 err_ub_custom, interp_ok, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
28};
29use crate::fluent_generated as fluent;
30
31#[derive(Debug, PartialEq, Copy, Clone)]
32pub enum MemoryKind<T> {
33 Stack,
35 CallerLocation,
37 Machine(T),
39}
40
41impl<T: MayLeak> MayLeak for MemoryKind<T> {
42 #[inline]
43 fn may_leak(self) -> bool {
44 match self {
45 MemoryKind::Stack => false,
46 MemoryKind::CallerLocation => true,
47 MemoryKind::Machine(k) => k.may_leak(),
48 }
49 }
50}
51
52impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 match self {
55 MemoryKind::Stack => write!(f, "stack variable"),
56 MemoryKind::CallerLocation => write!(f, "caller location"),
57 MemoryKind::Machine(m) => write!(f, "{m}"),
58 }
59 }
60}
61
62#[derive(Copy, Clone, PartialEq, Debug)]
64pub enum AllocKind {
65 LiveData,
67 Function,
69 VTable,
71 Dead,
73}
74
75#[derive(Copy, Clone, PartialEq, Debug)]
77pub struct AllocInfo {
78 pub size: Size,
79 pub align: Align,
80 pub kind: AllocKind,
81 pub mutbl: Mutability,
82}
83
84impl AllocInfo {
85 fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
86 Self { size, align, kind, mutbl }
87 }
88}
89
90#[derive(Debug, Copy, Clone)]
92pub enum FnVal<'tcx, Other> {
93 Instance(Instance<'tcx>),
94 Other(Other),
95}
96
97impl<'tcx, Other> FnVal<'tcx, Other> {
98 pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
99 match self {
100 FnVal::Instance(instance) => interp_ok(instance),
101 FnVal::Other(_) => {
102 throw_unsup_format!("'foreign' function pointers are not supported in this context")
103 }
104 }
105 }
106}
107
108pub struct Memory<'tcx, M: Machine<'tcx>> {
111 pub(super) alloc_map: M::MemoryMap,
122
123 extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
125
126 pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
131
132 validation_in_progress: Cell<bool>,
136}
137
138#[derive(Copy, Clone)]
141pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
142 alloc: &'a Allocation<Prov, Extra, Bytes>,
143 range: AllocRange,
144 tcx: TyCtxt<'tcx>,
145 alloc_id: AllocId,
146}
147pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
150 alloc: &'a mut Allocation<Prov, Extra, Bytes>,
151 range: AllocRange,
152 tcx: TyCtxt<'tcx>,
153 alloc_id: AllocId,
154}
155
156impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
157 pub fn new() -> Self {
158 Memory {
159 alloc_map: M::MemoryMap::default(),
160 extra_fn_ptr_map: FxIndexMap::default(),
161 dead_alloc_map: FxIndexMap::default(),
162 validation_in_progress: Cell::new(false),
163 }
164 }
165
166 pub fn alloc_map(&self) -> &M::MemoryMap {
168 &self.alloc_map
169 }
170}
171
172impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
173 #[inline]
183 pub fn global_root_pointer(
184 &self,
185 ptr: Pointer<CtfeProvenance>,
186 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
187 let alloc_id = ptr.provenance.alloc_id();
188 match self.tcx.try_get_global_alloc(alloc_id) {
190 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
191 bug!("global memory cannot point to thread-local static")
194 }
195 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
196 return M::extern_static_pointer(self, def_id);
197 }
198 None => {
199 assert!(
200 self.memory.extra_fn_ptr_map.contains_key(&alloc_id),
201 "{alloc_id:?} is neither global nor a function pointer"
202 );
203 }
204 _ => {}
205 }
206 M::adjust_alloc_root_pointer(self, ptr, M::GLOBAL_KIND.map(MemoryKind::Machine))
208 }
209
210 pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> {
211 let id = match fn_val {
212 FnVal::Instance(instance) => {
213 let salt = M::get_global_alloc_salt(self, Some(instance));
214 self.tcx.reserve_and_set_fn_alloc(instance, salt)
215 }
216 FnVal::Other(extra) => {
217 let id = self.tcx.reserve_alloc_id();
219 let old = self.memory.extra_fn_ptr_map.insert(id, extra);
220 assert!(old.is_none());
221 id
222 }
223 };
224 self.global_root_pointer(Pointer::from(id)).unwrap()
227 }
228
229 pub fn allocate_ptr(
230 &mut self,
231 size: Size,
232 align: Align,
233 kind: MemoryKind<M::MemoryKind>,
234 init: AllocInit,
235 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
236 let params = self.machine.get_default_alloc_params();
237 let alloc = if M::PANIC_ON_ALLOC_FAIL {
238 Allocation::new(size, align, init, params)
239 } else {
240 Allocation::try_new(size, align, init, params)?
241 };
242 self.insert_allocation(alloc, kind)
243 }
244
245 pub fn allocate_bytes_ptr(
246 &mut self,
247 bytes: &[u8],
248 align: Align,
249 kind: MemoryKind<M::MemoryKind>,
250 mutability: Mutability,
251 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
252 let params = self.machine.get_default_alloc_params();
253 let alloc = Allocation::from_bytes(bytes, align, mutability, params);
254 self.insert_allocation(alloc, kind)
255 }
256
257 pub fn insert_allocation(
258 &mut self,
259 alloc: Allocation<M::Provenance, (), M::Bytes>,
260 kind: MemoryKind<M::MemoryKind>,
261 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
262 assert!(alloc.size() <= self.max_size_of_val());
263 let id = self.tcx.reserve_alloc_id();
264 debug_assert_ne!(
265 Some(kind),
266 M::GLOBAL_KIND.map(MemoryKind::Machine),
267 "dynamically allocating global memory"
268 );
269 let extra = M::init_local_allocation(self, id, kind, alloc.size(), alloc.align)?;
272 let alloc = alloc.with_extra(extra);
273 self.memory.alloc_map.insert(id, (kind, alloc));
274 M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))
275 }
276
277 pub fn reallocate_ptr(
280 &mut self,
281 ptr: Pointer<Option<M::Provenance>>,
282 old_size_and_align: Option<(Size, Align)>,
283 new_size: Size,
284 new_align: Align,
285 kind: MemoryKind<M::MemoryKind>,
286 init_growth: AllocInit,
287 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
288 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
289 if offset.bytes() != 0 {
290 throw_ub_custom!(
291 fluent::const_eval_realloc_or_alloc_with_offset,
292 ptr = format!("{ptr:?}"),
293 kind = "realloc"
294 );
295 }
296
297 let new_ptr = self.allocate_ptr(new_size, new_align, kind, init_growth)?;
303 let old_size = match old_size_and_align {
304 Some((size, _align)) => size,
305 None => self.get_alloc_raw(alloc_id)?.size(),
306 };
307 self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), true)?;
309 self.deallocate_ptr(ptr, old_size_and_align, kind)?;
310
311 interp_ok(new_ptr)
312 }
313
314 #[instrument(skip(self), level = "debug")]
315 pub fn deallocate_ptr(
316 &mut self,
317 ptr: Pointer<Option<M::Provenance>>,
318 old_size_and_align: Option<(Size, Align)>,
319 kind: MemoryKind<M::MemoryKind>,
320 ) -> InterpResult<'tcx> {
321 let (alloc_id, offset, prov) = self.ptr_get_alloc_id(ptr, 0)?;
322 trace!("deallocating: {alloc_id:?}");
323
324 if offset.bytes() != 0 {
325 throw_ub_custom!(
326 fluent::const_eval_realloc_or_alloc_with_offset,
327 ptr = format!("{ptr:?}"),
328 kind = "dealloc",
329 );
330 }
331
332 let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else {
333 return Err(match self.tcx.try_get_global_alloc(alloc_id) {
335 Some(GlobalAlloc::Function { .. }) => {
336 err_ub_custom!(
337 fluent::const_eval_invalid_dealloc,
338 alloc_id = alloc_id,
339 kind = "fn",
340 )
341 }
342 Some(GlobalAlloc::VTable(..)) => {
343 err_ub_custom!(
344 fluent::const_eval_invalid_dealloc,
345 alloc_id = alloc_id,
346 kind = "vtable",
347 )
348 }
349 Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
350 err_ub_custom!(
351 fluent::const_eval_invalid_dealloc,
352 alloc_id = alloc_id,
353 kind = "static_mem"
354 )
355 }
356 None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccess)),
357 })
358 .into();
359 };
360
361 if alloc.mutability.is_not() {
362 throw_ub_custom!(fluent::const_eval_dealloc_immutable, alloc = alloc_id,);
363 }
364 if alloc_kind != kind {
365 throw_ub_custom!(
366 fluent::const_eval_dealloc_kind_mismatch,
367 alloc = alloc_id,
368 alloc_kind = format!("{alloc_kind}"),
369 kind = format!("{kind}"),
370 );
371 }
372 if let Some((size, align)) = old_size_and_align {
373 if size != alloc.size() || align != alloc.align {
374 throw_ub_custom!(
375 fluent::const_eval_dealloc_incorrect_layout,
376 alloc = alloc_id,
377 size = alloc.size().bytes(),
378 align = alloc.align.bytes(),
379 size_found = size.bytes(),
380 align_found = align.bytes(),
381 )
382 }
383 }
384
385 let size = alloc.size();
387 M::before_memory_deallocation(
388 self.tcx,
389 &mut self.machine,
390 &mut alloc.extra,
391 ptr,
392 (alloc_id, prov),
393 size,
394 alloc.align,
395 kind,
396 )?;
397
398 let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align));
400 if old.is_some() {
401 bug!("Nothing can be deallocated twice");
402 }
403
404 interp_ok(())
405 }
406
407 #[inline(always)]
409 fn get_ptr_access(
410 &self,
411 ptr: Pointer<Option<M::Provenance>>,
412 size: Size,
413 ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> {
414 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(
416 self,
417 ptr,
418 size,
419 CheckInAllocMsg::MemoryAccess,
420 |this, alloc_id, offset, prov| {
421 let (size, align) =
422 this.get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccess)?;
423 interp_ok((size, align, (alloc_id, offset, prov)))
424 },
425 )
426 }
427
428 #[inline(always)]
431 pub fn check_ptr_access(
432 &self,
433 ptr: Pointer<Option<M::Provenance>>,
434 size: Size,
435 msg: CheckInAllocMsg,
436 ) -> InterpResult<'tcx> {
437 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
439 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
440 interp_ok((size, align, ()))
441 })?;
442 interp_ok(())
443 }
444
445 pub fn check_ptr_access_signed(
449 &self,
450 ptr: Pointer<Option<M::Provenance>>,
451 size: i64,
452 msg: CheckInAllocMsg,
453 ) -> InterpResult<'tcx> {
454 Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
455 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
456 interp_ok((size, align, ()))
457 })?;
458 interp_ok(())
459 }
460
461 fn check_and_deref_ptr<T, R: Borrow<Self>>(
470 this: R,
471 ptr: Pointer<Option<M::Provenance>>,
472 size: i64,
473 msg: CheckInAllocMsg,
474 alloc_size: impl FnOnce(
475 R,
476 AllocId,
477 Size,
478 M::ProvenanceExtra,
479 ) -> InterpResult<'tcx, (Size, Align, T)>,
480 ) -> InterpResult<'tcx, Option<T>> {
481 if size == 0 {
483 return interp_ok(None);
484 }
485
486 interp_ok(match this.borrow().ptr_try_get_alloc_id(ptr, size) {
487 Err(addr) => {
488 throw_ub!(DanglingIntPointer { addr, inbounds_size: size, msg });
490 }
491 Ok((alloc_id, offset, prov)) => {
492 let tcx = this.borrow().tcx;
493 let (alloc_size, _alloc_align, ret_val) = alloc_size(this, alloc_id, offset, prov)?;
494 let offset = offset.bytes();
495 let (begin, end) = if size >= 0 {
497 (Some(offset), offset.checked_add(size as u64))
498 } else {
499 (offset.checked_sub(size.unsigned_abs()), Some(offset))
500 };
501 let in_bounds = begin.is_some() && end.is_some_and(|e| e <= alloc_size.bytes());
503 if !in_bounds {
504 throw_ub!(PointerOutOfBounds {
505 alloc_id,
506 alloc_size,
507 ptr_offset: tcx.sign_extend_to_target_isize(offset),
508 inbounds_size: size,
509 msg,
510 })
511 }
512
513 Some(ret_val)
514 }
515 })
516 }
517
518 pub(super) fn check_misalign(
519 &self,
520 misaligned: Option<Misalignment>,
521 msg: CheckAlignMsg,
522 ) -> InterpResult<'tcx> {
523 if let Some(misaligned) = misaligned {
524 throw_ub!(AlignmentCheckFailed(misaligned, msg))
525 }
526 interp_ok(())
527 }
528
529 pub(super) fn is_ptr_misaligned(
530 &self,
531 ptr: Pointer<Option<M::Provenance>>,
532 align: Align,
533 ) -> Option<Misalignment> {
534 if !M::enforce_alignment(self) || align.bytes() == 1 {
535 return None;
536 }
537
538 #[inline]
539 fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
540 if offset.is_multiple_of(align.bytes()) {
541 None
542 } else {
543 let offset_pow2 = 1 << offset.trailing_zeros();
545 Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align })
546 }
547 }
548
549 match self.ptr_try_get_alloc_id(ptr, 0) {
550 Err(addr) => is_offset_misaligned(addr, align),
551 Ok((alloc_id, offset, _prov)) => {
552 let alloc_info = self.get_alloc_info(alloc_id);
553 if let Some(misalign) = M::alignment_check(
554 self,
555 alloc_id,
556 alloc_info.align,
557 alloc_info.kind,
558 offset,
559 align,
560 ) {
561 Some(misalign)
562 } else if M::Provenance::OFFSET_IS_ADDR {
563 is_offset_misaligned(ptr.addr().bytes(), align)
564 } else {
565 if alloc_info.align.bytes() < align.bytes() {
567 Some(Misalignment { has: alloc_info.align, required: align })
568 } else {
569 is_offset_misaligned(offset.bytes(), align)
570 }
571 }
572 }
573 }
574 }
575
576 pub fn check_ptr_align(
580 &self,
581 ptr: Pointer<Option<M::Provenance>>,
582 align: Align,
583 ) -> InterpResult<'tcx> {
584 self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr)
585 }
586}
587
588impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
589 pub fn remove_unreachable_allocs(&mut self, reachable_allocs: &FxHashSet<AllocId>) {
591 #[allow(rustc::potential_query_instability)] self.memory.dead_alloc_map.retain(|id, _| reachable_allocs.contains(id));
596 }
597}
598
599impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
601 fn get_global_alloc(
607 &self,
608 id: AllocId,
609 is_write: bool,
610 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::Provenance, M::AllocExtra, M::Bytes>>> {
611 let (alloc, def_id) = match self.tcx.try_get_global_alloc(id) {
612 Some(GlobalAlloc::Memory(mem)) => {
613 (mem, None)
615 }
616 Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)),
617 Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)),
618 None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccess)),
619 Some(GlobalAlloc::Static(def_id)) => {
620 assert!(self.tcx.is_static(def_id));
621 assert!(!self.tcx.is_thread_local_static(def_id));
624 if self.tcx.is_foreign_item(def_id) {
635 throw_unsup!(ExternStatic(def_id));
638 }
639
640 let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
642 (val, Some(def_id))
643 }
644 };
645 M::before_access_global(self.tcx, &self.machine, id, alloc, def_id, is_write)?;
646 M::adjust_global_allocation(
648 self,
649 id, alloc.inner(),
651 )
652 }
653
654 pub fn get_alloc_raw(
659 &self,
660 id: AllocId,
661 ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
662 let a = self.memory.alloc_map.get_or(id, || {
667 let alloc = self.get_global_alloc(id, false).report_err().map_err(Err)?;
670 match alloc {
671 Cow::Borrowed(alloc) => {
672 Err(Ok(alloc))
675 }
676 Cow::Owned(alloc) => {
677 let kind = M::GLOBAL_KIND.expect(
679 "I got a global allocation that I have to copy but the machine does \
680 not expect that to happen",
681 );
682 Ok((MemoryKind::Machine(kind), alloc))
683 }
684 }
685 });
686 match a {
688 Ok(a) => interp_ok(&a.1),
689 Err(a) => a.into(),
690 }
691 }
692
693 pub fn get_alloc_bytes_unchecked_raw(&self, id: AllocId) -> InterpResult<'tcx, *const u8> {
696 let alloc = self.get_alloc_raw(id)?;
697 interp_ok(alloc.get_bytes_unchecked_raw())
698 }
699
700 pub fn get_ptr_alloc<'a>(
702 &'a self,
703 ptr: Pointer<Option<M::Provenance>>,
704 size: Size,
705 ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
706 {
707 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
709 self,
710 ptr,
711 size_i64,
712 CheckInAllocMsg::MemoryAccess,
713 |this, alloc_id, offset, prov| {
714 let alloc = this.get_alloc_raw(alloc_id)?;
715 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
716 },
717 )?;
718 if !self.memory.validation_in_progress.get() {
722 if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
723 M::before_alloc_access(self.tcx, &self.machine, alloc_id)?;
724 }
725 }
726
727 if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
728 let range = alloc_range(offset, size);
729 if !self.memory.validation_in_progress.get() {
730 M::before_memory_read(
731 self.tcx,
732 &self.machine,
733 &alloc.extra,
734 ptr,
735 (alloc_id, prov),
736 range,
737 )?;
738 }
739 interp_ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
740 } else {
741 interp_ok(None)
742 }
743 }
744
745 pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> {
747 interp_ok(&self.get_alloc_raw(id)?.extra)
748 }
749
750 pub fn get_alloc_mutability<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, Mutability> {
752 interp_ok(self.get_alloc_raw(id)?.mutability)
753 }
754
755 pub fn get_alloc_raw_mut(
763 &mut self,
764 id: AllocId,
765 ) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
766 if self.memory.alloc_map.get_mut(id).is_none() {
774 let alloc = self.get_global_alloc(id, true)?;
777 let kind = M::GLOBAL_KIND.expect(
778 "I got a global allocation that I have to copy but the machine does \
779 not expect that to happen",
780 );
781 self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
782 }
783
784 let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
785 if alloc.mutability.is_not() {
786 throw_ub!(WriteToReadOnly(id))
787 }
788 interp_ok((alloc, &mut self.machine))
789 }
790
791 pub fn get_alloc_bytes_unchecked_raw_mut(
794 &mut self,
795 id: AllocId,
796 ) -> InterpResult<'tcx, *mut u8> {
797 let alloc = self.get_alloc_raw_mut(id)?.0;
798 interp_ok(alloc.get_bytes_unchecked_raw_mut())
799 }
800
801 pub fn get_ptr_alloc_mut<'a>(
803 &'a mut self,
804 ptr: Pointer<Option<M::Provenance>>,
805 size: Size,
806 ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
807 {
808 let tcx = self.tcx;
809 let validation_in_progress = self.memory.validation_in_progress.get();
810
811 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
813 self,
814 ptr,
815 size_i64,
816 CheckInAllocMsg::MemoryAccess,
817 |this, alloc_id, offset, prov| {
818 let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?;
819 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine)))
820 },
821 )?;
822
823 if let Some((alloc_id, offset, prov, alloc, machine)) = ptr_and_alloc {
824 let range = alloc_range(offset, size);
825 if !validation_in_progress {
826 M::before_alloc_access(tcx, machine, alloc_id)?;
829 M::before_memory_write(
830 tcx,
831 machine,
832 &mut alloc.extra,
833 ptr,
834 (alloc_id, prov),
835 range,
836 )?;
837 }
838 interp_ok(Some(AllocRefMut { alloc, range, tcx: *tcx, alloc_id }))
839 } else {
840 interp_ok(None)
841 }
842 }
843
844 pub fn get_alloc_extra_mut<'a>(
846 &'a mut self,
847 id: AllocId,
848 ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> {
849 let (alloc, machine) = self.get_alloc_raw_mut(id)?;
850 interp_ok((&mut alloc.extra, machine))
851 }
852
853 pub fn is_alloc_live(&self, id: AllocId) -> bool {
857 self.memory.alloc_map.contains_key_ref(&id)
858 || self.memory.extra_fn_ptr_map.contains_key(&id)
859 || self.tcx.try_get_global_alloc(id).is_some()
862 }
863
864 pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
867 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
872 return AllocInfo::new(
873 alloc.size(),
874 alloc.align,
875 AllocKind::LiveData,
876 alloc.mutability,
877 );
878 }
879
880 if let Some(fn_val) = self.get_fn_alloc(id) {
883 let align = match fn_val {
884 FnVal::Instance(instance) => {
885 self.tcx.codegen_fn_attrs(instance.def_id()).alignment.unwrap_or(Align::ONE)
886 }
887 FnVal::Other(_) => Align::ONE,
889 };
890
891 return AllocInfo::new(Size::ZERO, align, AllocKind::Function, Mutability::Not);
892 }
893
894 if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
896 let (size, align) = global_alloc.size_and_align(*self.tcx, self.typing_env);
897 let mutbl = global_alloc.mutability(*self.tcx, self.typing_env);
898 let kind = match global_alloc {
899 GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
900 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
901 GlobalAlloc::VTable { .. } => AllocKind::VTable,
902 };
903 return AllocInfo::new(size, align, kind, mutbl);
904 }
905
906 let (size, align) = *self
908 .memory
909 .dead_alloc_map
910 .get(&id)
911 .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
912 AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
913 }
914
915 fn get_live_alloc_size_and_align(
917 &self,
918 id: AllocId,
919 msg: CheckInAllocMsg,
920 ) -> InterpResult<'tcx, (Size, Align)> {
921 let info = self.get_alloc_info(id);
922 if matches!(info.kind, AllocKind::Dead) {
923 throw_ub!(PointerUseAfterFree(id, msg))
924 }
925 interp_ok((info.size, info.align))
926 }
927
928 fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
929 if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) {
930 Some(FnVal::Other(*extra))
931 } else {
932 match self.tcx.try_get_global_alloc(id) {
933 Some(GlobalAlloc::Function { instance, .. }) => Some(FnVal::Instance(instance)),
934 _ => None,
935 }
936 }
937 }
938
939 pub fn get_ptr_fn(
940 &self,
941 ptr: Pointer<Option<M::Provenance>>,
942 ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
943 trace!("get_ptr_fn({:?})", ptr);
944 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
945 if offset.bytes() != 0 {
946 throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
947 }
948 self.get_fn_alloc(alloc_id)
949 .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))))
950 .into()
951 }
952
953 pub fn get_ptr_vtable_ty(
956 &self,
957 ptr: Pointer<Option<M::Provenance>>,
958 expected_trait: Option<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>,
959 ) -> InterpResult<'tcx, Ty<'tcx>> {
960 trace!("get_ptr_vtable({:?})", ptr);
961 let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr, 0)?;
962 if offset.bytes() != 0 {
963 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
964 }
965 let Some(GlobalAlloc::VTable(ty, vtable_dyn_type)) =
966 self.tcx.try_get_global_alloc(alloc_id)
967 else {
968 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
969 };
970 if let Some(expected_dyn_type) = expected_trait {
971 self.check_vtable_for_type(vtable_dyn_type, expected_dyn_type)?;
972 }
973 interp_ok(ty)
974 }
975
976 pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
977 self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not;
978 interp_ok(())
979 }
980
981 pub fn visit_reachable_allocs(
984 &mut self,
985 start: Vec<AllocId>,
986 mut visit: impl FnMut(&mut Self, AllocId, &AllocInfo) -> InterpResult<'tcx>,
987 ) -> InterpResult<'tcx> {
988 let mut done = FxHashSet::default();
989 let mut todo = start;
990 while let Some(id) = todo.pop() {
991 if !done.insert(id) {
992 continue;
994 }
995 let info = self.get_alloc_info(id);
996
997 if matches!(info.kind, AllocKind::LiveData) {
1001 let alloc = self.get_alloc_raw(id)?;
1002 for prov in alloc.provenance().provenances() {
1003 if let Some(id) = prov.get_alloc_id() {
1004 todo.push(id);
1005 }
1006 }
1007 }
1008
1009 visit(self, id, &info)?;
1011 }
1012 interp_ok(())
1013 }
1014
1015 #[must_use]
1018 pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'tcx, M> {
1019 self.dump_allocs(vec![id])
1020 }
1021
1022 #[must_use]
1025 pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'tcx, M> {
1026 allocs.sort();
1027 allocs.dedup();
1028 DumpAllocs { ecx: self, allocs }
1029 }
1030
1031 pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
1033 let alloc = self.get_alloc_raw(id).unwrap();
1036 let mut bytes = String::new();
1037 if alloc.size() != Size::ZERO {
1038 bytes = "\n".into();
1039 rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
1041 .unwrap();
1042 }
1043 bytes
1044 }
1045
1046 pub fn take_leaked_allocations(
1052 &mut self,
1053 static_roots: impl FnOnce(&Self) -> &[AllocId],
1054 ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
1055 {
1056 let reachable = {
1058 let mut reachable = FxHashSet::default();
1059 let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine);
1060 let mut todo: Vec<_> =
1061 self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| {
1062 if Some(kind) == global_kind { Some(id) } else { None }
1063 });
1064 todo.extend(static_roots(self));
1065 while let Some(id) = todo.pop() {
1066 if reachable.insert(id) {
1067 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
1071 todo.extend(
1072 alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
1073 );
1074 }
1075 }
1076 }
1077 reachable
1078 };
1079
1080 let leaked: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
1082 if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
1083 });
1084 let mut result = Vec::new();
1085 for &id in leaked.iter() {
1086 let (kind, alloc) = self.memory.alloc_map.remove(&id).unwrap();
1087 result.push((id, kind, alloc));
1088 }
1089 result
1090 }
1091
1092 pub fn run_for_validation_mut<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
1098 assert!(
1101 self.memory.validation_in_progress.replace(true) == false,
1102 "`validation_in_progress` was already set"
1103 );
1104 let res = f(self);
1105 assert!(
1106 self.memory.validation_in_progress.replace(false) == true,
1107 "`validation_in_progress` was unset by someone else"
1108 );
1109 res
1110 }
1111
1112 pub fn run_for_validation_ref<R>(&self, f: impl FnOnce(&Self) -> R) -> R {
1118 assert!(
1121 self.memory.validation_in_progress.replace(true) == false,
1122 "`validation_in_progress` was already set"
1123 );
1124 let res = f(self);
1125 assert!(
1126 self.memory.validation_in_progress.replace(false) == true,
1127 "`validation_in_progress` was unset by someone else"
1128 );
1129 res
1130 }
1131
1132 pub(super) fn validation_in_progress(&self) -> bool {
1133 self.memory.validation_in_progress.get()
1134 }
1135}
1136
1137#[doc(hidden)]
1138pub struct DumpAllocs<'a, 'tcx, M: Machine<'tcx>> {
1140 ecx: &'a InterpCx<'tcx, M>,
1141 allocs: Vec<AllocId>,
1142}
1143
1144impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for DumpAllocs<'a, 'tcx, M> {
1145 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1146 fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
1148 fmt: &mut std::fmt::Formatter<'_>,
1149 tcx: TyCtxt<'tcx>,
1150 allocs_to_print: &mut VecDeque<AllocId>,
1151 alloc: &Allocation<Prov, Extra, Bytes>,
1152 ) -> std::fmt::Result {
1153 for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
1154 {
1155 allocs_to_print.push_back(alloc_id);
1156 }
1157 write!(fmt, "{}", display_allocation(tcx, alloc))
1158 }
1159
1160 let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect();
1161 let mut allocs_printed = FxHashSet::default();
1163
1164 while let Some(id) = allocs_to_print.pop_front() {
1165 if !allocs_printed.insert(id) {
1166 continue;
1168 }
1169
1170 write!(fmt, "{id:?}")?;
1171 match self.ecx.memory.alloc_map.get(id) {
1172 Some((kind, alloc)) => {
1173 write!(fmt, " ({kind}, ")?;
1175 write_allocation_track_relocs(
1176 &mut *fmt,
1177 *self.ecx.tcx,
1178 &mut allocs_to_print,
1179 alloc,
1180 )?;
1181 }
1182 None => {
1183 match self.ecx.tcx.try_get_global_alloc(id) {
1185 Some(GlobalAlloc::Memory(alloc)) => {
1186 write!(fmt, " (unchanged global, ")?;
1187 write_allocation_track_relocs(
1188 &mut *fmt,
1189 *self.ecx.tcx,
1190 &mut allocs_to_print,
1191 alloc.inner(),
1192 )?;
1193 }
1194 Some(GlobalAlloc::Function { instance, .. }) => {
1195 write!(fmt, " (fn: {instance})")?;
1196 }
1197 Some(GlobalAlloc::VTable(ty, dyn_ty)) => {
1198 write!(fmt, " (vtable: impl {dyn_ty} for {ty})")?;
1199 }
1200 Some(GlobalAlloc::Static(did)) => {
1201 write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?;
1202 }
1203 None => {
1204 write!(fmt, " (deallocated)")?;
1205 }
1206 }
1207 }
1208 }
1209 writeln!(fmt)?;
1210 }
1211 Ok(())
1212 }
1213}
1214
1215impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
1217 AllocRefMut<'a, 'tcx, Prov, Extra, Bytes>
1218{
1219 pub fn as_ref<'b>(&'b self) -> AllocRef<'b, 'tcx, Prov, Extra, Bytes> {
1220 AllocRef { alloc: self.alloc, range: self.range, tcx: self.tcx, alloc_id: self.alloc_id }
1221 }
1222
1223 pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
1225 let range = self.range.subrange(range);
1226 debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
1227
1228 self.alloc
1229 .write_scalar(&self.tcx, range, val)
1230 .map_err(|e| e.to_interp_error(self.alloc_id))
1231 .into()
1232 }
1233
1234 pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
1236 self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size()), val)
1237 }
1238
1239 pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
1241 let range = self.range.subrange(range);
1242
1243 self.alloc
1244 .write_uninit(&self.tcx, range)
1245 .map_err(|e| e.to_interp_error(self.alloc_id))
1246 .into()
1247 }
1248
1249 pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
1251 self.alloc
1252 .write_uninit(&self.tcx, self.range)
1253 .map_err(|e| e.to_interp_error(self.alloc_id))
1254 .into()
1255 }
1256
1257 pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
1259 self.alloc
1260 .clear_provenance(&self.tcx, self.range)
1261 .map_err(|e| e.to_interp_error(self.alloc_id))
1262 .into()
1263 }
1264}
1265
1266impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> AllocRef<'a, 'tcx, Prov, Extra, Bytes> {
1267 pub fn read_scalar(
1269 &self,
1270 range: AllocRange,
1271 read_provenance: bool,
1272 ) -> InterpResult<'tcx, Scalar<Prov>> {
1273 let range = self.range.subrange(range);
1274 self.alloc
1275 .read_scalar(&self.tcx, range, read_provenance)
1276 .map_err(|e| e.to_interp_error(self.alloc_id))
1277 .into()
1278 }
1279
1280 pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
1282 self.read_scalar(range, false)
1283 }
1284
1285 pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
1287 self.read_scalar(
1288 alloc_range(offset, self.tcx.data_layout().pointer_size()),
1289 true,
1290 )
1291 }
1292
1293 pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
1295 self.alloc
1296 .get_bytes_strip_provenance(&self.tcx, self.range)
1297 .map_err(|e| e.to_interp_error(self.alloc_id))
1298 .into()
1299 }
1300
1301 pub fn has_provenance(&self) -> bool {
1303 !self.alloc.provenance().range_empty(self.range, &self.tcx)
1304 }
1305}
1306
1307impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1308 pub fn read_bytes_ptr_strip_provenance(
1313 &self,
1314 ptr: Pointer<Option<M::Provenance>>,
1315 size: Size,
1316 ) -> InterpResult<'tcx, &[u8]> {
1317 let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else {
1318 return interp_ok(&[]);
1320 };
1321 interp_ok(
1324 alloc_ref
1325 .alloc
1326 .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
1327 .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?,
1328 )
1329 }
1330
1331 pub fn write_bytes_ptr(
1335 &mut self,
1336 ptr: Pointer<Option<M::Provenance>>,
1337 src: impl IntoIterator<Item = u8>,
1338 ) -> InterpResult<'tcx> {
1339 let mut src = src.into_iter();
1340 let (lower, upper) = src.size_hint();
1341 let len = upper.expect("can only write bounded iterators");
1342 assert_eq!(lower, len, "can only write iterators with a precise length");
1343
1344 let size = Size::from_bytes(len);
1345 let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else {
1346 assert_matches!(src.next(), None, "iterator said it was empty but returned an element");
1348 return interp_ok(());
1349 };
1350
1351 let alloc_id = alloc_ref.alloc_id;
1354 let bytes = alloc_ref
1355 .alloc
1356 .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
1357 .map_err(move |e| e.to_interp_error(alloc_id))?;
1358 for dest in bytes {
1361 *dest = src.next().expect("iterator was shorter than it said it would be");
1362 }
1363 assert_matches!(src.next(), None, "iterator was longer than it said it would be");
1364 interp_ok(())
1365 }
1366
1367 pub fn mem_copy(
1368 &mut self,
1369 src: Pointer<Option<M::Provenance>>,
1370 dest: Pointer<Option<M::Provenance>>,
1371 size: Size,
1372 nonoverlapping: bool,
1373 ) -> InterpResult<'tcx> {
1374 self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping)
1375 }
1376
1377 pub fn mem_copy_repeatedly(
1383 &mut self,
1384 src: Pointer<Option<M::Provenance>>,
1385 dest: Pointer<Option<M::Provenance>>,
1386 size: Size,
1387 num_copies: u64,
1388 nonoverlapping: bool,
1389 ) -> InterpResult<'tcx> {
1390 let tcx = self.tcx;
1391 let src_parts = self.get_ptr_access(src, size)?;
1393 let dest_parts = self.get_ptr_access(dest, size * num_copies)?; if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(src, size.bytes().try_into().unwrap())
1399 {
1400 M::before_alloc_access(tcx, &self.machine, alloc_id)?;
1401 }
1402
1403 let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
1408 return interp_ok(());
1410 };
1411 let src_alloc = self.get_alloc_raw(src_alloc_id)?;
1412 let src_range = alloc_range(src_offset, size);
1413 assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation");
1414
1415 M::before_memory_read(
1419 tcx,
1420 &self.machine,
1421 &src_alloc.extra,
1422 src,
1423 (src_alloc_id, src_prov),
1424 src_range,
1425 )?;
1426 let Some((dest_alloc_id, dest_offset, dest_prov)) = dest_parts else {
1429 return interp_ok(());
1431 };
1432
1433 let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); let provenance = src_alloc
1440 .provenance()
1441 .prepare_copy(src_range, dest_offset, num_copies, self)
1442 .map_err(|e| e.to_interp_error(src_alloc_id))?;
1443 let init = src_alloc.init_mask().prepare_copy(src_range);
1445
1446 let (dest_alloc, machine) = self.get_alloc_raw_mut(dest_alloc_id)?;
1448 let dest_range = alloc_range(dest_offset, size * num_copies);
1449 M::before_alloc_access(tcx, machine, dest_alloc_id)?;
1451 M::before_memory_write(
1452 tcx,
1453 machine,
1454 &mut dest_alloc.extra,
1455 dest,
1456 (dest_alloc_id, dest_prov),
1457 dest_range,
1458 )?;
1459 let dest_bytes = dest_alloc
1461 .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
1462 .map_err(|e| e.to_interp_error(dest_alloc_id))?
1463 .as_mut_ptr();
1464
1465 if init.no_bytes_init() {
1466 dest_alloc
1473 .write_uninit(&tcx, dest_range)
1474 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1475 return interp_ok(());
1477 }
1478
1479 unsafe {
1485 if src_alloc_id == dest_alloc_id {
1486 if nonoverlapping {
1487 if (src_offset <= dest_offset && src_offset + size > dest_offset)
1489 || (dest_offset <= src_offset && dest_offset + size > src_offset)
1490 {
1491 throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
1492 }
1493 }
1494 }
1495 if num_copies > 1 {
1496 assert!(nonoverlapping, "multi-copy only supported in non-overlapping mode");
1497 }
1498
1499 let size_in_bytes = size.bytes_usize();
1500 if size_in_bytes == 1 {
1503 debug_assert!(num_copies >= 1); let value = *src_bytes;
1506 dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
1507 } else if src_alloc_id == dest_alloc_id {
1508 let mut dest_ptr = dest_bytes;
1509 for _ in 0..num_copies {
1510 ptr::copy(src_bytes, dest_ptr, size_in_bytes);
1513 dest_ptr = dest_ptr.add(size_in_bytes);
1514 }
1515 } else {
1516 let mut dest_ptr = dest_bytes;
1517 for _ in 0..num_copies {
1518 ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
1519 dest_ptr = dest_ptr.add(size_in_bytes);
1520 }
1521 }
1522 }
1523
1524 dest_alloc.init_mask_apply_copy(
1526 init,
1527 alloc_range(dest_offset, size), num_copies,
1529 );
1530 dest_alloc.provenance_apply_copy(provenance);
1532
1533 interp_ok(())
1534 }
1535}
1536
1537impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1539 pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
1542 match scalar.try_to_scalar_int() {
1543 Ok(int) => interp_ok(int.is_null()),
1544 Err(_) => {
1545 let ptr = scalar.to_pointer(self)?;
1547 match self.ptr_try_get_alloc_id(ptr, 0) {
1548 Ok((alloc_id, offset, _)) => {
1549 let info = self.get_alloc_info(alloc_id);
1550 if offset <= info.size {
1552 return interp_ok(false);
1553 }
1554 if !offset.bytes().is_multiple_of(info.align.bytes()) {
1558 return interp_ok(false);
1559 }
1560 interp_ok(true)
1562 }
1563 Err(_offset) => bug!("a non-int scalar is always a pointer"),
1564 }
1565 }
1566 }
1567 }
1568
1569 pub fn ptr_try_get_alloc_id(
1583 &self,
1584 ptr: Pointer<Option<M::Provenance>>,
1585 size: i64,
1586 ) -> Result<(AllocId, Size, M::ProvenanceExtra), u64> {
1587 match ptr.into_pointer_or_addr() {
1588 Ok(ptr) => match M::ptr_get_alloc(self, ptr, size) {
1589 Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)),
1590 None => {
1591 assert!(M::Provenance::OFFSET_IS_ADDR);
1592 let (_, addr) = ptr.into_raw_parts();
1594 Err(addr.bytes())
1595 }
1596 },
1597 Err(addr) => Err(addr.bytes()),
1598 }
1599 }
1600
1601 #[inline(always)]
1614 pub fn ptr_get_alloc_id(
1615 &self,
1616 ptr: Pointer<Option<M::Provenance>>,
1617 size: i64,
1618 ) -> InterpResult<'tcx, (AllocId, Size, M::ProvenanceExtra)> {
1619 self.ptr_try_get_alloc_id(ptr, size)
1620 .map_err(|offset| {
1621 err_ub!(DanglingIntPointer {
1622 addr: offset,
1623 inbounds_size: size,
1624 msg: CheckInAllocMsg::Dereferenceable
1625 })
1626 })
1627 .into()
1628 }
1629}