1use std::assert_matches::assert_matches;
10use std::borrow::{Borrow, Cow};
11use std::cell::Cell;
12use std::collections::VecDeque;
13use std::{fmt, ptr};
14
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
18use rustc_middle::bug;
19use rustc_middle::mir::display_allocation;
20use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
21use tracing::{debug, instrument, trace};
22
23use super::{
24 AllocBytes, AllocId, AllocInit, AllocMap, AllocRange, Allocation, CheckAlignMsg,
25 CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
26 Misalignment, Pointer, PointerArithmetic, Provenance, Scalar, alloc_range, err_ub,
27 err_ub_custom, interp_ok, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
28};
29use crate::fluent_generated as fluent;
30
31#[derive(Debug, PartialEq, Copy, Clone)]
32pub enum MemoryKind<T> {
33 Stack,
35 CallerLocation,
37 Machine(T),
39}
40
41impl<T: MayLeak> MayLeak for MemoryKind<T> {
42 #[inline]
43 fn may_leak(self) -> bool {
44 match self {
45 MemoryKind::Stack => false,
46 MemoryKind::CallerLocation => true,
47 MemoryKind::Machine(k) => k.may_leak(),
48 }
49 }
50}
51
52impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 match self {
55 MemoryKind::Stack => write!(f, "stack variable"),
56 MemoryKind::CallerLocation => write!(f, "caller location"),
57 MemoryKind::Machine(m) => write!(f, "{m}"),
58 }
59 }
60}
61
62#[derive(Copy, Clone, PartialEq, Debug)]
64pub enum AllocKind {
65 LiveData,
67 Function,
69 VTable,
71 Dead,
73}
74
75#[derive(Copy, Clone, PartialEq, Debug)]
77pub struct AllocInfo {
78 pub size: Size,
79 pub align: Align,
80 pub kind: AllocKind,
81 pub mutbl: Mutability,
82}
83
84impl AllocInfo {
85 fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
86 Self { size, align, kind, mutbl }
87 }
88}
89
90#[derive(Debug, Copy, Clone)]
92pub enum FnVal<'tcx, Other> {
93 Instance(Instance<'tcx>),
94 Other(Other),
95}
96
97impl<'tcx, Other> FnVal<'tcx, Other> {
98 pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
99 match self {
100 FnVal::Instance(instance) => interp_ok(instance),
101 FnVal::Other(_) => {
102 throw_unsup_format!("'foreign' function pointers are not supported in this context")
103 }
104 }
105 }
106}
107
108pub struct Memory<'tcx, M: Machine<'tcx>> {
111 pub(super) alloc_map: M::MemoryMap,
122
123 extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
125
126 pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
131
132 validation_in_progress: Cell<bool>,
136}
137
138#[derive(Copy, Clone)]
141pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
142 alloc: &'a Allocation<Prov, Extra, Bytes>,
143 range: AllocRange,
144 tcx: TyCtxt<'tcx>,
145 alloc_id: AllocId,
146}
147pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
150 alloc: &'a mut Allocation<Prov, Extra, Bytes>,
151 range: AllocRange,
152 tcx: TyCtxt<'tcx>,
153 alloc_id: AllocId,
154}
155
156impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
157 pub fn new() -> Self {
158 Memory {
159 alloc_map: M::MemoryMap::default(),
160 extra_fn_ptr_map: FxIndexMap::default(),
161 dead_alloc_map: FxIndexMap::default(),
162 validation_in_progress: Cell::new(false),
163 }
164 }
165
166 pub fn alloc_map(&self) -> &M::MemoryMap {
168 &self.alloc_map
169 }
170}
171
172impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
173 #[inline]
183 pub fn global_root_pointer(
184 &self,
185 ptr: Pointer<CtfeProvenance>,
186 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
187 let alloc_id = ptr.provenance.alloc_id();
188 match self.tcx.try_get_global_alloc(alloc_id) {
190 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
191 bug!("global memory cannot point to thread-local static")
194 }
195 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
196 return M::extern_static_pointer(self, def_id);
197 }
198 None => {
199 assert!(
200 self.memory.extra_fn_ptr_map.contains_key(&alloc_id),
201 "{alloc_id:?} is neither global nor a function pointer"
202 );
203 }
204 _ => {}
205 }
206 M::adjust_alloc_root_pointer(self, ptr, M::GLOBAL_KIND.map(MemoryKind::Machine))
208 }
209
210 pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> {
211 let id = match fn_val {
212 FnVal::Instance(instance) => {
213 let salt = M::get_global_alloc_salt(self, Some(instance));
214 self.tcx.reserve_and_set_fn_alloc(instance, salt)
215 }
216 FnVal::Other(extra) => {
217 let id = self.tcx.reserve_alloc_id();
219 let old = self.memory.extra_fn_ptr_map.insert(id, extra);
220 assert!(old.is_none());
221 id
222 }
223 };
224 self.global_root_pointer(Pointer::from(id)).unwrap()
227 }
228
229 pub fn allocate_ptr(
230 &mut self,
231 size: Size,
232 align: Align,
233 kind: MemoryKind<M::MemoryKind>,
234 init: AllocInit,
235 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
236 let params = self.machine.get_default_alloc_params();
237 let alloc = if M::PANIC_ON_ALLOC_FAIL {
238 Allocation::new(size, align, init, params)
239 } else {
240 Allocation::try_new(size, align, init, params)?
241 };
242 self.insert_allocation(alloc, kind)
243 }
244
245 pub fn allocate_bytes_ptr(
246 &mut self,
247 bytes: &[u8],
248 align: Align,
249 kind: MemoryKind<M::MemoryKind>,
250 mutability: Mutability,
251 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
252 let params = self.machine.get_default_alloc_params();
253 let alloc = Allocation::from_bytes(bytes, align, mutability, params);
254 self.insert_allocation(alloc, kind)
255 }
256
257 pub fn insert_allocation(
258 &mut self,
259 alloc: Allocation<M::Provenance, (), M::Bytes>,
260 kind: MemoryKind<M::MemoryKind>,
261 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
262 assert!(alloc.size() <= self.max_size_of_val());
263 let id = self.tcx.reserve_alloc_id();
264 debug_assert_ne!(
265 Some(kind),
266 M::GLOBAL_KIND.map(MemoryKind::Machine),
267 "dynamically allocating global memory"
268 );
269 let extra = M::init_local_allocation(self, id, kind, alloc.size(), alloc.align)?;
272 let alloc = alloc.with_extra(extra);
273 self.memory.alloc_map.insert(id, (kind, alloc));
274 M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))
275 }
276
277 pub fn reallocate_ptr(
280 &mut self,
281 ptr: Pointer<Option<M::Provenance>>,
282 old_size_and_align: Option<(Size, Align)>,
283 new_size: Size,
284 new_align: Align,
285 kind: MemoryKind<M::MemoryKind>,
286 init_growth: AllocInit,
287 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
288 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
289 if offset.bytes() != 0 {
290 throw_ub_custom!(
291 fluent::const_eval_realloc_or_alloc_with_offset,
292 ptr = format!("{ptr:?}"),
293 kind = "realloc"
294 );
295 }
296
297 let new_ptr = self.allocate_ptr(new_size, new_align, kind, init_growth)?;
303 let old_size = match old_size_and_align {
304 Some((size, _align)) => size,
305 None => self.get_alloc_raw(alloc_id)?.size(),
306 };
307 self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), true)?;
309 self.deallocate_ptr(ptr, old_size_and_align, kind)?;
310
311 interp_ok(new_ptr)
312 }
313
314 #[instrument(skip(self), level = "debug")]
315 pub fn deallocate_ptr(
316 &mut self,
317 ptr: Pointer<Option<M::Provenance>>,
318 old_size_and_align: Option<(Size, Align)>,
319 kind: MemoryKind<M::MemoryKind>,
320 ) -> InterpResult<'tcx> {
321 let (alloc_id, offset, prov) = self.ptr_get_alloc_id(ptr, 0)?;
322 trace!("deallocating: {alloc_id:?}");
323
324 if offset.bytes() != 0 {
325 throw_ub_custom!(
326 fluent::const_eval_realloc_or_alloc_with_offset,
327 ptr = format!("{ptr:?}"),
328 kind = "dealloc",
329 );
330 }
331
332 let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else {
333 return Err(match self.tcx.try_get_global_alloc(alloc_id) {
335 Some(GlobalAlloc::Function { .. }) => {
336 err_ub_custom!(
337 fluent::const_eval_invalid_dealloc,
338 alloc_id = alloc_id,
339 kind = "fn",
340 )
341 }
342 Some(GlobalAlloc::VTable(..)) => {
343 err_ub_custom!(
344 fluent::const_eval_invalid_dealloc,
345 alloc_id = alloc_id,
346 kind = "vtable",
347 )
348 }
349 Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
350 err_ub_custom!(
351 fluent::const_eval_invalid_dealloc,
352 alloc_id = alloc_id,
353 kind = "static_mem"
354 )
355 }
356 None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccess)),
357 })
358 .into();
359 };
360
361 if alloc.mutability.is_not() {
362 throw_ub_custom!(fluent::const_eval_dealloc_immutable, alloc = alloc_id,);
363 }
364 if alloc_kind != kind {
365 throw_ub_custom!(
366 fluent::const_eval_dealloc_kind_mismatch,
367 alloc = alloc_id,
368 alloc_kind = format!("{alloc_kind}"),
369 kind = format!("{kind}"),
370 );
371 }
372 if let Some((size, align)) = old_size_and_align {
373 if size != alloc.size() || align != alloc.align {
374 throw_ub_custom!(
375 fluent::const_eval_dealloc_incorrect_layout,
376 alloc = alloc_id,
377 size = alloc.size().bytes(),
378 align = alloc.align.bytes(),
379 size_found = size.bytes(),
380 align_found = align.bytes(),
381 )
382 }
383 }
384
385 let size = alloc.size();
387 M::before_memory_deallocation(
388 self.tcx,
389 &mut self.machine,
390 &mut alloc.extra,
391 ptr,
392 (alloc_id, prov),
393 size,
394 alloc.align,
395 kind,
396 )?;
397
398 let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align));
400 if old.is_some() {
401 bug!("Nothing can be deallocated twice");
402 }
403
404 interp_ok(())
405 }
406
407 #[inline(always)]
409 fn get_ptr_access(
410 &self,
411 ptr: Pointer<Option<M::Provenance>>,
412 size: Size,
413 ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> {
414 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(
416 self,
417 ptr,
418 size,
419 CheckInAllocMsg::MemoryAccess,
420 |this, alloc_id, offset, prov| {
421 let (size, align) =
422 this.get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccess)?;
423 interp_ok((size, align, (alloc_id, offset, prov)))
424 },
425 )
426 }
427
428 #[inline(always)]
431 pub fn check_ptr_access(
432 &self,
433 ptr: Pointer<Option<M::Provenance>>,
434 size: Size,
435 msg: CheckInAllocMsg,
436 ) -> InterpResult<'tcx> {
437 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
439 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
440 interp_ok((size, align, ()))
441 })?;
442 interp_ok(())
443 }
444
445 pub fn check_ptr_access_signed(
449 &self,
450 ptr: Pointer<Option<M::Provenance>>,
451 size: i64,
452 msg: CheckInAllocMsg,
453 ) -> InterpResult<'tcx> {
454 Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
455 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
456 interp_ok((size, align, ()))
457 })?;
458 interp_ok(())
459 }
460
461 fn check_and_deref_ptr<T, R: Borrow<Self>>(
470 this: R,
471 ptr: Pointer<Option<M::Provenance>>,
472 size: i64,
473 msg: CheckInAllocMsg,
474 alloc_size: impl FnOnce(
475 R,
476 AllocId,
477 Size,
478 M::ProvenanceExtra,
479 ) -> InterpResult<'tcx, (Size, Align, T)>,
480 ) -> InterpResult<'tcx, Option<T>> {
481 if size == 0 {
483 return interp_ok(None);
484 }
485
486 interp_ok(match this.borrow().ptr_try_get_alloc_id(ptr, size) {
487 Err(addr) => {
488 throw_ub!(DanglingIntPointer { addr, inbounds_size: size, msg });
490 }
491 Ok((alloc_id, offset, prov)) => {
492 let tcx = this.borrow().tcx;
493 let (alloc_size, _alloc_align, ret_val) = alloc_size(this, alloc_id, offset, prov)?;
494 let offset = offset.bytes();
495 let (begin, end) = if size >= 0 {
497 (Some(offset), offset.checked_add(size as u64))
498 } else {
499 (offset.checked_sub(size.unsigned_abs()), Some(offset))
500 };
501 let in_bounds = begin.is_some() && end.is_some_and(|e| e <= alloc_size.bytes());
503 if !in_bounds {
504 throw_ub!(PointerOutOfBounds {
505 alloc_id,
506 alloc_size,
507 ptr_offset: tcx.sign_extend_to_target_isize(offset),
508 inbounds_size: size,
509 msg,
510 })
511 }
512
513 Some(ret_val)
514 }
515 })
516 }
517
518 pub(super) fn check_misalign(
519 &self,
520 misaligned: Option<Misalignment>,
521 msg: CheckAlignMsg,
522 ) -> InterpResult<'tcx> {
523 if let Some(misaligned) = misaligned {
524 throw_ub!(AlignmentCheckFailed(misaligned, msg))
525 }
526 interp_ok(())
527 }
528
529 pub(super) fn is_ptr_misaligned(
530 &self,
531 ptr: Pointer<Option<M::Provenance>>,
532 align: Align,
533 ) -> Option<Misalignment> {
534 if !M::enforce_alignment(self) || align.bytes() == 1 {
535 return None;
536 }
537
538 #[inline]
539 fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
540 if offset % align.bytes() == 0 {
541 None
542 } else {
543 let offset_pow2 = 1 << offset.trailing_zeros();
545 Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align })
546 }
547 }
548
549 match self.ptr_try_get_alloc_id(ptr, 0) {
550 Err(addr) => is_offset_misaligned(addr, align),
551 Ok((alloc_id, offset, _prov)) => {
552 let alloc_info = self.get_alloc_info(alloc_id);
553 if let Some(misalign) = M::alignment_check(
554 self,
555 alloc_id,
556 alloc_info.align,
557 alloc_info.kind,
558 offset,
559 align,
560 ) {
561 Some(misalign)
562 } else if M::Provenance::OFFSET_IS_ADDR {
563 is_offset_misaligned(ptr.addr().bytes(), align)
564 } else {
565 if alloc_info.align.bytes() < align.bytes() {
567 Some(Misalignment { has: alloc_info.align, required: align })
568 } else {
569 is_offset_misaligned(offset.bytes(), align)
570 }
571 }
572 }
573 }
574 }
575
576 pub fn check_ptr_align(
580 &self,
581 ptr: Pointer<Option<M::Provenance>>,
582 align: Align,
583 ) -> InterpResult<'tcx> {
584 self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr)
585 }
586}
587
588impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
589 pub fn remove_unreachable_allocs(&mut self, reachable_allocs: &FxHashSet<AllocId>) {
591 #[allow(rustc::potential_query_instability)] self.memory.dead_alloc_map.retain(|id, _| reachable_allocs.contains(id));
596 }
597}
598
599impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
601 fn get_global_alloc(
607 &self,
608 id: AllocId,
609 is_write: bool,
610 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::Provenance, M::AllocExtra, M::Bytes>>> {
611 let (alloc, def_id) = match self.tcx.try_get_global_alloc(id) {
612 Some(GlobalAlloc::Memory(mem)) => {
613 (mem, None)
615 }
616 Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)),
617 Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)),
618 None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccess)),
619 Some(GlobalAlloc::Static(def_id)) => {
620 assert!(self.tcx.is_static(def_id));
621 assert!(!self.tcx.is_thread_local_static(def_id));
624 if self.tcx.is_foreign_item(def_id) {
635 throw_unsup!(ExternStatic(def_id));
638 }
639
640 let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
642 (val, Some(def_id))
643 }
644 };
645 M::before_access_global(self.tcx, &self.machine, id, alloc, def_id, is_write)?;
646 M::adjust_global_allocation(
648 self,
649 id, alloc.inner(),
651 )
652 }
653
654 fn get_alloc_raw(
659 &self,
660 id: AllocId,
661 ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
662 let a = self.memory.alloc_map.get_or(id, || {
667 let alloc = self.get_global_alloc(id, false).report_err().map_err(Err)?;
670 match alloc {
671 Cow::Borrowed(alloc) => {
672 Err(Ok(alloc))
675 }
676 Cow::Owned(alloc) => {
677 let kind = M::GLOBAL_KIND.expect(
679 "I got a global allocation that I have to copy but the machine does \
680 not expect that to happen",
681 );
682 Ok((MemoryKind::Machine(kind), alloc))
683 }
684 }
685 });
686 match a {
688 Ok(a) => interp_ok(&a.1),
689 Err(a) => a.into(),
690 }
691 }
692
693 pub fn get_alloc_bytes_unchecked_raw(&self, id: AllocId) -> InterpResult<'tcx, *const u8> {
696 let alloc = self.get_alloc_raw(id)?;
697 interp_ok(alloc.get_bytes_unchecked_raw())
698 }
699
700 pub fn get_ptr_alloc<'a>(
702 &'a self,
703 ptr: Pointer<Option<M::Provenance>>,
704 size: Size,
705 ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
706 {
707 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
709 self,
710 ptr,
711 size_i64,
712 CheckInAllocMsg::MemoryAccess,
713 |this, alloc_id, offset, prov| {
714 let alloc = this.get_alloc_raw(alloc_id)?;
715 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
716 },
717 )?;
718 if !self.memory.validation_in_progress.get() {
722 if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
723 M::before_alloc_access(self.tcx, &self.machine, alloc_id)?;
724 }
725 }
726
727 if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
728 let range = alloc_range(offset, size);
729 if !self.memory.validation_in_progress.get() {
730 M::before_memory_read(
731 self.tcx,
732 &self.machine,
733 &alloc.extra,
734 ptr,
735 (alloc_id, prov),
736 range,
737 )?;
738 }
739 interp_ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
740 } else {
741 interp_ok(None)
742 }
743 }
744
745 pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> {
747 interp_ok(&self.get_alloc_raw(id)?.extra)
748 }
749
750 pub fn get_alloc_mutability<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, Mutability> {
752 interp_ok(self.get_alloc_raw(id)?.mutability)
753 }
754
755 fn get_alloc_raw_mut(
761 &mut self,
762 id: AllocId,
763 ) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
764 if self.memory.alloc_map.get_mut(id).is_none() {
772 let alloc = self.get_global_alloc(id, true)?;
775 let kind = M::GLOBAL_KIND.expect(
776 "I got a global allocation that I have to copy but the machine does \
777 not expect that to happen",
778 );
779 self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
780 }
781
782 let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
783 if alloc.mutability.is_not() {
784 throw_ub!(WriteToReadOnly(id))
785 }
786 interp_ok((alloc, &mut self.machine))
787 }
788
789 pub fn get_alloc_bytes_unchecked_raw_mut(
792 &mut self,
793 id: AllocId,
794 ) -> InterpResult<'tcx, *mut u8> {
795 let alloc = self.get_alloc_raw_mut(id)?.0;
796 interp_ok(alloc.get_bytes_unchecked_raw_mut())
797 }
798
799 pub fn get_ptr_alloc_mut<'a>(
801 &'a mut self,
802 ptr: Pointer<Option<M::Provenance>>,
803 size: Size,
804 ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
805 {
806 let tcx = self.tcx;
807 let validation_in_progress = self.memory.validation_in_progress.get();
808
809 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
811 self,
812 ptr,
813 size_i64,
814 CheckInAllocMsg::MemoryAccess,
815 |this, alloc_id, offset, prov| {
816 let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?;
817 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine)))
818 },
819 )?;
820
821 if let Some((alloc_id, offset, prov, alloc, machine)) = ptr_and_alloc {
822 let range = alloc_range(offset, size);
823 if !validation_in_progress {
824 M::before_alloc_access(tcx, machine, alloc_id)?;
827 M::before_memory_write(
828 tcx,
829 machine,
830 &mut alloc.extra,
831 ptr,
832 (alloc_id, prov),
833 range,
834 )?;
835 }
836 interp_ok(Some(AllocRefMut { alloc, range, tcx: *tcx, alloc_id }))
837 } else {
838 interp_ok(None)
839 }
840 }
841
842 pub fn get_alloc_extra_mut<'a>(
844 &'a mut self,
845 id: AllocId,
846 ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> {
847 let (alloc, machine) = self.get_alloc_raw_mut(id)?;
848 interp_ok((&mut alloc.extra, machine))
849 }
850
851 pub fn is_alloc_live(&self, id: AllocId) -> bool {
855 self.memory.alloc_map.contains_key_ref(&id)
856 || self.memory.extra_fn_ptr_map.contains_key(&id)
857 || self.tcx.try_get_global_alloc(id).is_some()
860 }
861
862 pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
865 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
870 return AllocInfo::new(
871 alloc.size(),
872 alloc.align,
873 AllocKind::LiveData,
874 alloc.mutability,
875 );
876 }
877
878 if let Some(fn_val) = self.get_fn_alloc(id) {
881 let align = match fn_val {
882 FnVal::Instance(instance) => {
883 self.tcx.codegen_fn_attrs(instance.def_id()).alignment.unwrap_or(Align::ONE)
884 }
885 FnVal::Other(_) => Align::ONE,
887 };
888
889 return AllocInfo::new(Size::ZERO, align, AllocKind::Function, Mutability::Not);
890 }
891
892 if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
894 let (size, align) = global_alloc.size_and_align(*self.tcx, self.typing_env);
895 let mutbl = global_alloc.mutability(*self.tcx, self.typing_env);
896 let kind = match global_alloc {
897 GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
898 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
899 GlobalAlloc::VTable { .. } => AllocKind::VTable,
900 };
901 return AllocInfo::new(size, align, kind, mutbl);
902 }
903
904 let (size, align) = *self
906 .memory
907 .dead_alloc_map
908 .get(&id)
909 .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
910 AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
911 }
912
913 fn get_live_alloc_size_and_align(
915 &self,
916 id: AllocId,
917 msg: CheckInAllocMsg,
918 ) -> InterpResult<'tcx, (Size, Align)> {
919 let info = self.get_alloc_info(id);
920 if matches!(info.kind, AllocKind::Dead) {
921 throw_ub!(PointerUseAfterFree(id, msg))
922 }
923 interp_ok((info.size, info.align))
924 }
925
926 fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
927 if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) {
928 Some(FnVal::Other(*extra))
929 } else {
930 match self.tcx.try_get_global_alloc(id) {
931 Some(GlobalAlloc::Function { instance, .. }) => Some(FnVal::Instance(instance)),
932 _ => None,
933 }
934 }
935 }
936
937 pub fn get_ptr_fn(
938 &self,
939 ptr: Pointer<Option<M::Provenance>>,
940 ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
941 trace!("get_ptr_fn({:?})", ptr);
942 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
943 if offset.bytes() != 0 {
944 throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
945 }
946 self.get_fn_alloc(alloc_id)
947 .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))))
948 .into()
949 }
950
951 pub fn get_ptr_vtable_ty(
954 &self,
955 ptr: Pointer<Option<M::Provenance>>,
956 expected_trait: Option<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>,
957 ) -> InterpResult<'tcx, Ty<'tcx>> {
958 trace!("get_ptr_vtable({:?})", ptr);
959 let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr, 0)?;
960 if offset.bytes() != 0 {
961 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
962 }
963 let Some(GlobalAlloc::VTable(ty, vtable_dyn_type)) =
964 self.tcx.try_get_global_alloc(alloc_id)
965 else {
966 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
967 };
968 if let Some(expected_dyn_type) = expected_trait {
969 self.check_vtable_for_type(vtable_dyn_type, expected_dyn_type)?;
970 }
971 interp_ok(ty)
972 }
973
974 pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
975 self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not;
976 interp_ok(())
977 }
978
979 pub fn prepare_for_native_call(&mut self, ids: Vec<AllocId>) -> InterpResult<'tcx> {
986 let mut done = FxHashSet::default();
987 let mut todo = ids;
988 while let Some(id) = todo.pop() {
989 if !done.insert(id) {
990 continue;
992 }
993 let info = self.get_alloc_info(id);
994
995 if !matches!(info.kind, AllocKind::LiveData) {
997 continue;
998 }
999
1000 let alloc = self.get_alloc_raw(id)?;
1002 for prov in alloc.provenance().provenances() {
1003 M::expose_provenance(self, prov)?;
1004 if let Some(id) = prov.get_alloc_id() {
1005 todo.push(id);
1006 }
1007 }
1008 std::hint::black_box(alloc.get_bytes_unchecked_raw().expose_provenance());
1012
1013 if info.mutbl.is_mut() {
1015 self.get_alloc_raw_mut(id)?
1016 .0
1017 .prepare_for_native_write()
1018 .map_err(|e| e.to_interp_error(id))?;
1019 }
1020 }
1021 interp_ok(())
1022 }
1023
1024 #[must_use]
1027 pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'tcx, M> {
1028 self.dump_allocs(vec![id])
1029 }
1030
1031 #[must_use]
1034 pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'tcx, M> {
1035 allocs.sort();
1036 allocs.dedup();
1037 DumpAllocs { ecx: self, allocs }
1038 }
1039
1040 pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
1042 let alloc = self.get_alloc_raw(id).unwrap();
1045 let mut bytes = String::new();
1046 if alloc.size() != Size::ZERO {
1047 bytes = "\n".into();
1048 rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
1050 .unwrap();
1051 }
1052 bytes
1053 }
1054
1055 pub fn take_leaked_allocations(
1061 &mut self,
1062 static_roots: impl FnOnce(&Self) -> &[AllocId],
1063 ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
1064 {
1065 let reachable = {
1067 let mut reachable = FxHashSet::default();
1068 let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine);
1069 let mut todo: Vec<_> =
1070 self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| {
1071 if Some(kind) == global_kind { Some(id) } else { None }
1072 });
1073 todo.extend(static_roots(self));
1074 while let Some(id) = todo.pop() {
1075 if reachable.insert(id) {
1076 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
1078 todo.extend(
1079 alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
1080 );
1081 }
1082 }
1083 }
1084 reachable
1085 };
1086
1087 let leaked: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
1089 if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
1090 });
1091 let mut result = Vec::new();
1092 for &id in leaked.iter() {
1093 let (kind, alloc) = self.memory.alloc_map.remove(&id).unwrap();
1094 result.push((id, kind, alloc));
1095 }
1096 result
1097 }
1098
1099 pub fn run_for_validation_mut<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
1105 assert!(
1108 self.memory.validation_in_progress.replace(true) == false,
1109 "`validation_in_progress` was already set"
1110 );
1111 let res = f(self);
1112 assert!(
1113 self.memory.validation_in_progress.replace(false) == true,
1114 "`validation_in_progress` was unset by someone else"
1115 );
1116 res
1117 }
1118
1119 pub fn run_for_validation_ref<R>(&self, f: impl FnOnce(&Self) -> R) -> R {
1125 assert!(
1128 self.memory.validation_in_progress.replace(true) == false,
1129 "`validation_in_progress` was already set"
1130 );
1131 let res = f(self);
1132 assert!(
1133 self.memory.validation_in_progress.replace(false) == true,
1134 "`validation_in_progress` was unset by someone else"
1135 );
1136 res
1137 }
1138
1139 pub(super) fn validation_in_progress(&self) -> bool {
1140 self.memory.validation_in_progress.get()
1141 }
1142}
1143
1144#[doc(hidden)]
1145pub struct DumpAllocs<'a, 'tcx, M: Machine<'tcx>> {
1147 ecx: &'a InterpCx<'tcx, M>,
1148 allocs: Vec<AllocId>,
1149}
1150
1151impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for DumpAllocs<'a, 'tcx, M> {
1152 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1153 fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
1155 fmt: &mut std::fmt::Formatter<'_>,
1156 tcx: TyCtxt<'tcx>,
1157 allocs_to_print: &mut VecDeque<AllocId>,
1158 alloc: &Allocation<Prov, Extra, Bytes>,
1159 ) -> std::fmt::Result {
1160 for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
1161 {
1162 allocs_to_print.push_back(alloc_id);
1163 }
1164 write!(fmt, "{}", display_allocation(tcx, alloc))
1165 }
1166
1167 let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect();
1168 let mut allocs_printed = FxHashSet::default();
1170
1171 while let Some(id) = allocs_to_print.pop_front() {
1172 if !allocs_printed.insert(id) {
1173 continue;
1175 }
1176
1177 write!(fmt, "{id:?}")?;
1178 match self.ecx.memory.alloc_map.get(id) {
1179 Some((kind, alloc)) => {
1180 write!(fmt, " ({kind}, ")?;
1182 write_allocation_track_relocs(
1183 &mut *fmt,
1184 *self.ecx.tcx,
1185 &mut allocs_to_print,
1186 alloc,
1187 )?;
1188 }
1189 None => {
1190 match self.ecx.tcx.try_get_global_alloc(id) {
1192 Some(GlobalAlloc::Memory(alloc)) => {
1193 write!(fmt, " (unchanged global, ")?;
1194 write_allocation_track_relocs(
1195 &mut *fmt,
1196 *self.ecx.tcx,
1197 &mut allocs_to_print,
1198 alloc.inner(),
1199 )?;
1200 }
1201 Some(GlobalAlloc::Function { instance, .. }) => {
1202 write!(fmt, " (fn: {instance})")?;
1203 }
1204 Some(GlobalAlloc::VTable(ty, dyn_ty)) => {
1205 write!(fmt, " (vtable: impl {dyn_ty} for {ty})")?;
1206 }
1207 Some(GlobalAlloc::Static(did)) => {
1208 write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?;
1209 }
1210 None => {
1211 write!(fmt, " (deallocated)")?;
1212 }
1213 }
1214 }
1215 }
1216 writeln!(fmt)?;
1217 }
1218 Ok(())
1219 }
1220}
1221
1222impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
1224 AllocRefMut<'a, 'tcx, Prov, Extra, Bytes>
1225{
1226 pub fn as_ref<'b>(&'b self) -> AllocRef<'b, 'tcx, Prov, Extra, Bytes> {
1227 AllocRef { alloc: self.alloc, range: self.range, tcx: self.tcx, alloc_id: self.alloc_id }
1228 }
1229
1230 pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
1232 let range = self.range.subrange(range);
1233 debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
1234
1235 self.alloc
1236 .write_scalar(&self.tcx, range, val)
1237 .map_err(|e| e.to_interp_error(self.alloc_id))
1238 .into()
1239 }
1240
1241 pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
1243 self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size), val)
1244 }
1245
1246 pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
1248 let range = self.range.subrange(range);
1249
1250 self.alloc
1251 .write_uninit(&self.tcx, range)
1252 .map_err(|e| e.to_interp_error(self.alloc_id))
1253 .into()
1254 }
1255
1256 pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
1258 self.alloc
1259 .write_uninit(&self.tcx, self.range)
1260 .map_err(|e| e.to_interp_error(self.alloc_id))
1261 .into()
1262 }
1263
1264 pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
1266 self.alloc
1267 .clear_provenance(&self.tcx, self.range)
1268 .map_err(|e| e.to_interp_error(self.alloc_id))
1269 .into()
1270 }
1271}
1272
1273impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> AllocRef<'a, 'tcx, Prov, Extra, Bytes> {
1274 pub fn read_scalar(
1276 &self,
1277 range: AllocRange,
1278 read_provenance: bool,
1279 ) -> InterpResult<'tcx, Scalar<Prov>> {
1280 let range = self.range.subrange(range);
1281 self.alloc
1282 .read_scalar(&self.tcx, range, read_provenance)
1283 .map_err(|e| e.to_interp_error(self.alloc_id))
1284 .into()
1285 }
1286
1287 pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
1289 self.read_scalar(range, false)
1290 }
1291
1292 pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
1294 self.read_scalar(
1295 alloc_range(offset, self.tcx.data_layout().pointer_size),
1296 true,
1297 )
1298 }
1299
1300 pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
1302 self.alloc
1303 .get_bytes_strip_provenance(&self.tcx, self.range)
1304 .map_err(|e| e.to_interp_error(self.alloc_id))
1305 .into()
1306 }
1307
1308 pub fn has_provenance(&self) -> bool {
1310 !self.alloc.provenance().range_empty(self.range, &self.tcx)
1311 }
1312}
1313
1314impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1315 pub fn read_bytes_ptr_strip_provenance(
1320 &self,
1321 ptr: Pointer<Option<M::Provenance>>,
1322 size: Size,
1323 ) -> InterpResult<'tcx, &[u8]> {
1324 let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else {
1325 return interp_ok(&[]);
1327 };
1328 interp_ok(
1331 alloc_ref
1332 .alloc
1333 .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
1334 .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?,
1335 )
1336 }
1337
1338 pub fn write_bytes_ptr(
1342 &mut self,
1343 ptr: Pointer<Option<M::Provenance>>,
1344 src: impl IntoIterator<Item = u8>,
1345 ) -> InterpResult<'tcx> {
1346 let mut src = src.into_iter();
1347 let (lower, upper) = src.size_hint();
1348 let len = upper.expect("can only write bounded iterators");
1349 assert_eq!(lower, len, "can only write iterators with a precise length");
1350
1351 let size = Size::from_bytes(len);
1352 let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else {
1353 assert_matches!(src.next(), None, "iterator said it was empty but returned an element");
1355 return interp_ok(());
1356 };
1357
1358 let alloc_id = alloc_ref.alloc_id;
1361 let bytes = alloc_ref
1362 .alloc
1363 .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
1364 .map_err(move |e| e.to_interp_error(alloc_id))?;
1365 for dest in bytes {
1368 *dest = src.next().expect("iterator was shorter than it said it would be");
1369 }
1370 assert_matches!(src.next(), None, "iterator was longer than it said it would be");
1371 interp_ok(())
1372 }
1373
1374 pub fn mem_copy(
1375 &mut self,
1376 src: Pointer<Option<M::Provenance>>,
1377 dest: Pointer<Option<M::Provenance>>,
1378 size: Size,
1379 nonoverlapping: bool,
1380 ) -> InterpResult<'tcx> {
1381 self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping)
1382 }
1383
1384 pub fn mem_copy_repeatedly(
1390 &mut self,
1391 src: Pointer<Option<M::Provenance>>,
1392 dest: Pointer<Option<M::Provenance>>,
1393 size: Size,
1394 num_copies: u64,
1395 nonoverlapping: bool,
1396 ) -> InterpResult<'tcx> {
1397 let tcx = self.tcx;
1398 let src_parts = self.get_ptr_access(src, size)?;
1400 let dest_parts = self.get_ptr_access(dest, size * num_copies)?; if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(src, size.bytes().try_into().unwrap())
1406 {
1407 M::before_alloc_access(tcx, &self.machine, alloc_id)?;
1408 }
1409
1410 let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
1415 return interp_ok(());
1417 };
1418 let src_alloc = self.get_alloc_raw(src_alloc_id)?;
1419 let src_range = alloc_range(src_offset, size);
1420 assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation");
1421
1422 M::before_memory_read(
1426 tcx,
1427 &self.machine,
1428 &src_alloc.extra,
1429 src,
1430 (src_alloc_id, src_prov),
1431 src_range,
1432 )?;
1433 let Some((dest_alloc_id, dest_offset, dest_prov)) = dest_parts else {
1436 return interp_ok(());
1438 };
1439
1440 let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); let provenance = src_alloc
1447 .provenance()
1448 .prepare_copy(src_range, dest_offset, num_copies, self)
1449 .map_err(|e| e.to_interp_error(src_alloc_id))?;
1450 let init = src_alloc.init_mask().prepare_copy(src_range);
1452
1453 let (dest_alloc, machine) = self.get_alloc_raw_mut(dest_alloc_id)?;
1455 let dest_range = alloc_range(dest_offset, size * num_copies);
1456 M::before_alloc_access(tcx, machine, dest_alloc_id)?;
1458 M::before_memory_write(
1459 tcx,
1460 machine,
1461 &mut dest_alloc.extra,
1462 dest,
1463 (dest_alloc_id, dest_prov),
1464 dest_range,
1465 )?;
1466 let dest_bytes = dest_alloc
1468 .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
1469 .map_err(|e| e.to_interp_error(dest_alloc_id))?
1470 .as_mut_ptr();
1471
1472 if init.no_bytes_init() {
1473 dest_alloc
1480 .write_uninit(&tcx, dest_range)
1481 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1482 return interp_ok(());
1484 }
1485
1486 unsafe {
1492 if src_alloc_id == dest_alloc_id {
1493 if nonoverlapping {
1494 if (src_offset <= dest_offset && src_offset + size > dest_offset)
1496 || (dest_offset <= src_offset && dest_offset + size > src_offset)
1497 {
1498 throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
1499 }
1500 }
1501 }
1502 if num_copies > 1 {
1503 assert!(nonoverlapping, "multi-copy only supported in non-overlapping mode");
1504 }
1505
1506 let size_in_bytes = size.bytes_usize();
1507 if size_in_bytes == 1 {
1510 debug_assert!(num_copies >= 1); let value = *src_bytes;
1513 dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
1514 } else if src_alloc_id == dest_alloc_id {
1515 let mut dest_ptr = dest_bytes;
1516 for _ in 0..num_copies {
1517 ptr::copy(src_bytes, dest_ptr, size_in_bytes);
1520 dest_ptr = dest_ptr.add(size_in_bytes);
1521 }
1522 } else {
1523 let mut dest_ptr = dest_bytes;
1524 for _ in 0..num_copies {
1525 ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
1526 dest_ptr = dest_ptr.add(size_in_bytes);
1527 }
1528 }
1529 }
1530
1531 dest_alloc.init_mask_apply_copy(
1533 init,
1534 alloc_range(dest_offset, size), num_copies,
1536 );
1537 dest_alloc.provenance_apply_copy(provenance);
1539
1540 interp_ok(())
1541 }
1542}
1543
1544impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1546 pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
1549 match scalar.try_to_scalar_int() {
1550 Ok(int) => interp_ok(int.is_null()),
1551 Err(_) => {
1552 let ptr = scalar.to_pointer(self)?;
1554 match self.ptr_try_get_alloc_id(ptr, 0) {
1555 Ok((alloc_id, offset, _)) => {
1556 let info = self.get_alloc_info(alloc_id);
1557 if offset <= info.size {
1559 return interp_ok(false);
1560 }
1561 if offset.bytes() % info.align.bytes() != 0 {
1565 return interp_ok(false);
1566 }
1567 interp_ok(true)
1569 }
1570 Err(_offset) => bug!("a non-int scalar is always a pointer"),
1571 }
1572 }
1573 }
1574 }
1575
1576 pub fn ptr_try_get_alloc_id(
1590 &self,
1591 ptr: Pointer<Option<M::Provenance>>,
1592 size: i64,
1593 ) -> Result<(AllocId, Size, M::ProvenanceExtra), u64> {
1594 match ptr.into_pointer_or_addr() {
1595 Ok(ptr) => match M::ptr_get_alloc(self, ptr, size) {
1596 Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)),
1597 None => {
1598 assert!(M::Provenance::OFFSET_IS_ADDR);
1599 let (_, addr) = ptr.into_parts();
1600 Err(addr.bytes())
1601 }
1602 },
1603 Err(addr) => Err(addr.bytes()),
1604 }
1605 }
1606
1607 #[inline(always)]
1620 pub fn ptr_get_alloc_id(
1621 &self,
1622 ptr: Pointer<Option<M::Provenance>>,
1623 size: i64,
1624 ) -> InterpResult<'tcx, (AllocId, Size, M::ProvenanceExtra)> {
1625 self.ptr_try_get_alloc_id(ptr, size)
1626 .map_err(|offset| {
1627 err_ub!(DanglingIntPointer {
1628 addr: offset,
1629 inbounds_size: size,
1630 msg: CheckInAllocMsg::Dereferenceable
1631 })
1632 })
1633 .into()
1634 }
1635}