1mod init_mask;
4mod provenance_map;
5
6use std::borrow::Cow;
7use std::hash::Hash;
8use std::ops::{Deref, DerefMut, Range};
9use std::{fmt, hash, ptr};
10
11use either::{Left, Right};
12use init_mask::*;
13pub use init_mask::{InitChunk, InitChunkIter};
14use provenance_map::*;
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::intern::Interned;
18use rustc_macros::HashStable;
19use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
20
21use super::{
22 AllocId, BadBytesAccess, CtfeProvenance, InterpErrorKind, InterpResult, Pointer,
23 PointerArithmetic, Provenance, ResourceExhaustionInfo, Scalar, ScalarSizeMismatch,
24 UndefinedBehaviorInfo, UnsupportedOpInfo, interp_ok, read_target_uint, write_target_uint,
25};
26use crate::ty;
27
28pub trait AllocBytes: Clone + fmt::Debug + Deref<Target = [u8]> + DerefMut<Target = [u8]> {
30 type AllocParams;
34
35 fn from_bytes<'a>(
37 slice: impl Into<Cow<'a, [u8]>>,
38 _align: Align,
39 _params: Self::AllocParams,
40 ) -> Self;
41
42 fn zeroed(size: Size, _align: Align, _params: Self::AllocParams) -> Option<Self>;
45
46 fn as_mut_ptr(&mut self) -> *mut u8;
52
53 fn as_ptr(&self) -> *const u8;
59}
60
61impl AllocBytes for Box<[u8]> {
63 type AllocParams = ();
64
65 fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align, _params: ()) -> Self {
66 Box::<[u8]>::from(slice.into())
67 }
68
69 fn zeroed(size: Size, _align: Align, _params: ()) -> Option<Self> {
70 let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes().try_into().ok()?).ok()?;
71 let bytes = unsafe { bytes.assume_init() };
73 Some(bytes)
74 }
75
76 fn as_mut_ptr(&mut self) -> *mut u8 {
77 Box::as_mut_ptr(self).cast()
78 }
79
80 fn as_ptr(&self) -> *const u8 {
81 Box::as_ptr(self).cast()
82 }
83}
84
85#[derive(Clone, Eq, PartialEq)]
93#[derive(HashStable)]
94pub struct Allocation<Prov: Provenance = CtfeProvenance, Extra = (), Bytes = Box<[u8]>> {
95 bytes: Bytes,
98 provenance: ProvenanceMap<Prov>,
103 init_mask: InitMask,
107 pub align: Align,
110 pub mutability: Mutability,
114 pub extra: Extra,
116}
117
118struct AllocFlags {
122 align: Align,
123 mutability: Mutability,
124 all_zero: bool,
125}
126
127impl<E: Encoder> Encodable<E> for AllocFlags {
128 fn encode(&self, encoder: &mut E) {
129 const {
131 let max_supported_align_repr = u8::MAX >> 2;
132 let max_supported_align = 1 << max_supported_align_repr;
133 assert!(Align::MAX.bytes() <= max_supported_align)
134 }
135
136 let mut flags = self.align.bytes().trailing_zeros() as u8;
137 flags |= match self.mutability {
138 Mutability::Not => 0,
139 Mutability::Mut => 1 << 6,
140 };
141 flags |= (self.all_zero as u8) << 7;
142 flags.encode(encoder);
143 }
144}
145
146impl<D: Decoder> Decodable<D> for AllocFlags {
147 fn decode(decoder: &mut D) -> Self {
148 let flags: u8 = Decodable::decode(decoder);
149 let align = flags & 0b0011_1111;
150 let mutability = flags & 0b0100_0000;
151 let all_zero = flags & 0b1000_0000;
152
153 let align = Align::from_bytes(1 << align).unwrap();
154 let mutability = match mutability {
155 0 => Mutability::Not,
156 _ => Mutability::Mut,
157 };
158 let all_zero = all_zero > 0;
159
160 AllocFlags { align, mutability, all_zero }
161 }
162}
163
164#[inline]
170fn all_zero(buf: &[u8]) -> bool {
171 if buf.is_empty() {
174 return true;
175 }
176 if buf[0] != 0 {
179 return false;
180 }
181
182 buf.iter().fold(true, |acc, b| acc & (*b == 0))
185}
186
187impl<Prov: Provenance, Extra, E: Encoder> Encodable<E> for Allocation<Prov, Extra, Box<[u8]>>
189where
190 ProvenanceMap<Prov>: Encodable<E>,
191 Extra: Encodable<E>,
192{
193 fn encode(&self, encoder: &mut E) {
194 let all_zero = all_zero(&self.bytes);
195 AllocFlags { align: self.align, mutability: self.mutability, all_zero }.encode(encoder);
196
197 encoder.emit_usize(self.bytes.len());
198 if !all_zero {
199 encoder.emit_raw_bytes(&self.bytes);
200 }
201 self.provenance.encode(encoder);
202 self.init_mask.encode(encoder);
203 self.extra.encode(encoder);
204 }
205}
206
207impl<Prov: Provenance, Extra, D: Decoder> Decodable<D> for Allocation<Prov, Extra, Box<[u8]>>
208where
209 ProvenanceMap<Prov>: Decodable<D>,
210 Extra: Decodable<D>,
211{
212 fn decode(decoder: &mut D) -> Self {
213 let AllocFlags { align, mutability, all_zero } = Decodable::decode(decoder);
214
215 let len = decoder.read_usize();
216 let bytes = if all_zero { vec![0u8; len] } else { decoder.read_raw_bytes(len).to_vec() };
217 let bytes = <Box<[u8]> as AllocBytes>::from_bytes(bytes, align, ());
218
219 let provenance = Decodable::decode(decoder);
220 let init_mask = Decodable::decode(decoder);
221 let extra = Decodable::decode(decoder);
222
223 Self { bytes, provenance, init_mask, align, mutability, extra }
224 }
225}
226
227const MAX_BYTES_TO_HASH: usize = 64;
232
233const MAX_HASHED_BUFFER_LEN: usize = 2 * MAX_BYTES_TO_HASH;
237
238impl hash::Hash for Allocation {
243 fn hash<H: hash::Hasher>(&self, state: &mut H) {
244 let Self {
245 bytes,
246 provenance,
247 init_mask,
248 align,
249 mutability,
250 extra: (), } = self;
252
253 let byte_count = bytes.len();
256 if byte_count > MAX_HASHED_BUFFER_LEN {
257 byte_count.hash(state);
259
260 bytes[..MAX_BYTES_TO_HASH].hash(state);
262 bytes[byte_count - MAX_BYTES_TO_HASH..].hash(state);
263 } else {
264 bytes.hash(state);
265 }
266
267 provenance.hash(state);
269 init_mask.hash(state);
270 align.hash(state);
271 mutability.hash(state);
272 }
273}
274
275#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
284#[rustc_pass_by_value]
285pub struct ConstAllocation<'tcx>(pub Interned<'tcx, Allocation>);
286
287impl<'tcx> fmt::Debug for ConstAllocation<'tcx> {
288 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
289 write!(f, "ConstAllocation {{ .. }}")
292 }
293}
294
295impl<'tcx> ConstAllocation<'tcx> {
296 pub fn inner(self) -> &'tcx Allocation {
297 self.0.0
298 }
299}
300
301#[derive(Debug)]
304pub enum AllocError {
305 ScalarSizeMismatch(ScalarSizeMismatch),
307 ReadPointerAsInt(Option<BadBytesAccess>),
309 OverwritePartialPointer(Size),
311 ReadPartialPointer(Size),
313 InvalidUninitBytes(Option<BadBytesAccess>),
315}
316pub type AllocResult<T = ()> = Result<T, AllocError>;
317
318impl From<ScalarSizeMismatch> for AllocError {
319 fn from(s: ScalarSizeMismatch) -> Self {
320 AllocError::ScalarSizeMismatch(s)
321 }
322}
323
324impl AllocError {
325 pub fn to_interp_error<'tcx>(self, alloc_id: AllocId) -> InterpErrorKind<'tcx> {
326 use AllocError::*;
327 match self {
328 ScalarSizeMismatch(s) => {
329 InterpErrorKind::UndefinedBehavior(UndefinedBehaviorInfo::ScalarSizeMismatch(s))
330 }
331 ReadPointerAsInt(info) => InterpErrorKind::Unsupported(
332 UnsupportedOpInfo::ReadPointerAsInt(info.map(|b| (alloc_id, b))),
333 ),
334 OverwritePartialPointer(offset) => InterpErrorKind::Unsupported(
335 UnsupportedOpInfo::OverwritePartialPointer(Pointer::new(alloc_id, offset)),
336 ),
337 ReadPartialPointer(offset) => InterpErrorKind::Unsupported(
338 UnsupportedOpInfo::ReadPartialPointer(Pointer::new(alloc_id, offset)),
339 ),
340 InvalidUninitBytes(info) => InterpErrorKind::UndefinedBehavior(
341 UndefinedBehaviorInfo::InvalidUninitBytes(info.map(|b| (alloc_id, b))),
342 ),
343 }
344 }
345}
346
347#[derive(Copy, Clone)]
349pub struct AllocRange {
350 pub start: Size,
351 pub size: Size,
352}
353
354impl fmt::Debug for AllocRange {
355 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
356 write!(f, "[{:#x}..{:#x}]", self.start.bytes(), self.end().bytes())
357 }
358}
359
360#[inline(always)]
362pub fn alloc_range(start: Size, size: Size) -> AllocRange {
363 AllocRange { start, size }
364}
365
366impl From<Range<Size>> for AllocRange {
367 #[inline]
368 fn from(r: Range<Size>) -> Self {
369 alloc_range(r.start, r.end - r.start) }
371}
372
373impl From<Range<usize>> for AllocRange {
374 #[inline]
375 fn from(r: Range<usize>) -> Self {
376 AllocRange::from(Size::from_bytes(r.start)..Size::from_bytes(r.end))
377 }
378}
379
380impl AllocRange {
381 #[inline(always)]
382 pub fn end(self) -> Size {
383 self.start + self.size }
385
386 #[inline]
388 pub fn subrange(self, subrange: AllocRange) -> AllocRange {
389 let sub_start = self.start + subrange.start;
390 let range = alloc_range(sub_start, subrange.size);
391 assert!(range.end() <= self.end(), "access outside the bounds for given AllocRange");
392 range
393 }
394}
395
396pub enum AllocInit {
398 Uninit,
399 Zero,
400}
401
402impl<Prov: Provenance, Bytes: AllocBytes> Allocation<Prov, (), Bytes> {
404 pub fn from_bytes<'a>(
406 slice: impl Into<Cow<'a, [u8]>>,
407 align: Align,
408 mutability: Mutability,
409 params: <Bytes as AllocBytes>::AllocParams,
410 ) -> Self {
411 let bytes = Bytes::from_bytes(slice, align, params);
412 let size = Size::from_bytes(bytes.len());
413 Self {
414 bytes,
415 provenance: ProvenanceMap::new(),
416 init_mask: InitMask::new(size, true),
417 align,
418 mutability,
419 extra: (),
420 }
421 }
422
423 pub fn from_bytes_byte_aligned_immutable<'a>(
424 slice: impl Into<Cow<'a, [u8]>>,
425 params: <Bytes as AllocBytes>::AllocParams,
426 ) -> Self {
427 Allocation::from_bytes(slice, Align::ONE, Mutability::Not, params)
428 }
429
430 fn new_inner<R>(
431 size: Size,
432 align: Align,
433 init: AllocInit,
434 params: <Bytes as AllocBytes>::AllocParams,
435 fail: impl FnOnce() -> R,
436 ) -> Result<Self, R> {
437 let bytes = Bytes::zeroed(size, align, params).ok_or_else(fail)?;
444
445 Ok(Allocation {
446 bytes,
447 provenance: ProvenanceMap::new(),
448 init_mask: InitMask::new(
449 size,
450 match init {
451 AllocInit::Uninit => false,
452 AllocInit::Zero => true,
453 },
454 ),
455 align,
456 mutability: Mutability::Mut,
457 extra: (),
458 })
459 }
460
461 pub fn try_new<'tcx>(
464 size: Size,
465 align: Align,
466 init: AllocInit,
467 params: <Bytes as AllocBytes>::AllocParams,
468 ) -> InterpResult<'tcx, Self> {
469 Self::new_inner(size, align, init, params, || {
470 ty::tls::with(|tcx| tcx.dcx().delayed_bug("exhausted memory during interpretation"));
471 InterpErrorKind::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted)
472 })
473 .into()
474 }
475
476 pub fn new(
482 size: Size,
483 align: Align,
484 init: AllocInit,
485 params: <Bytes as AllocBytes>::AllocParams,
486 ) -> Self {
487 match Self::new_inner(size, align, init, params, || {
488 panic!(
489 "interpreter ran out of memory: cannot create allocation of {} bytes",
490 size.bytes()
491 );
492 }) {
493 Ok(x) => x,
494 Err(x) => x,
495 }
496 }
497
498 pub fn with_extra<Extra>(self, extra: Extra) -> Allocation<Prov, Extra, Bytes> {
500 Allocation {
501 bytes: self.bytes,
502 provenance: self.provenance,
503 init_mask: self.init_mask,
504 align: self.align,
505 mutability: self.mutability,
506 extra,
507 }
508 }
509}
510
511impl Allocation {
512 pub fn adjust_from_tcx<'tcx, Prov: Provenance, Bytes: AllocBytes>(
515 &self,
516 cx: &impl HasDataLayout,
517 alloc_bytes: impl FnOnce(&[u8], Align) -> InterpResult<'tcx, Bytes>,
518 mut adjust_ptr: impl FnMut(Pointer<CtfeProvenance>) -> InterpResult<'tcx, Pointer<Prov>>,
519 ) -> InterpResult<'tcx, Allocation<Prov, (), Bytes>> {
520 let mut bytes = alloc_bytes(&*self.bytes, self.align)?;
522 let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len());
524 let ptr_size = cx.data_layout().pointer_size().bytes_usize();
525 let endian = cx.data_layout().endian;
526 for &(offset, alloc_id) in self.provenance.ptrs().iter() {
527 let idx = offset.bytes_usize();
528 let ptr_bytes = &mut bytes[idx..idx + ptr_size];
529 let bits = read_target_uint(endian, ptr_bytes).unwrap();
530 let (ptr_prov, ptr_offset) =
531 adjust_ptr(Pointer::new(alloc_id, Size::from_bytes(bits)))?.into_raw_parts();
532 write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
533 new_provenance.push((offset, ptr_prov));
534 }
535 interp_ok(Allocation {
537 bytes,
538 provenance: ProvenanceMap::from_presorted_ptrs(new_provenance),
539 init_mask: self.init_mask.clone(),
540 align: self.align,
541 mutability: self.mutability,
542 extra: self.extra,
543 })
544 }
545}
546
547impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
549 pub fn len(&self) -> usize {
550 self.bytes.len()
551 }
552
553 pub fn size(&self) -> Size {
554 Size::from_bytes(self.len())
555 }
556
557 pub fn inspect_with_uninit_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
562 &self.bytes[range]
563 }
564
565 pub fn init_mask(&self) -> &InitMask {
567 &self.init_mask
568 }
569
570 pub fn provenance(&self) -> &ProvenanceMap<Prov> {
572 &self.provenance
573 }
574}
575
576impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
578 #[inline]
585 pub fn get_bytes_unchecked(&self, range: AllocRange) -> &[u8] {
586 &self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
587 }
588
589 #[inline]
596 pub fn get_bytes_strip_provenance(
597 &self,
598 cx: &impl HasDataLayout,
599 range: AllocRange,
600 ) -> AllocResult<&[u8]> {
601 self.init_mask.is_range_initialized(range).map_err(|uninit_range| {
602 AllocError::InvalidUninitBytes(Some(BadBytesAccess {
603 access: range,
604 bad: uninit_range,
605 }))
606 })?;
607 if !Prov::OFFSET_IS_ADDR && !self.provenance.range_empty(range, cx) {
608 let (offset, _prov) = self
610 .provenance
611 .range_ptrs_get(range, cx)
612 .first()
613 .copied()
614 .expect("there must be provenance somewhere here");
615 let start = offset.max(range.start); let end = (offset + cx.pointer_size()).min(range.end()); return Err(AllocError::ReadPointerAsInt(Some(BadBytesAccess {
618 access: range,
619 bad: AllocRange::from(start..end),
620 })));
621 }
622 Ok(self.get_bytes_unchecked(range))
623 }
624
625 pub fn get_bytes_unchecked_for_overwrite(
633 &mut self,
634 cx: &impl HasDataLayout,
635 range: AllocRange,
636 ) -> AllocResult<&mut [u8]> {
637 self.mark_init(range, true);
638 self.provenance.clear(range, cx)?;
639
640 Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()])
641 }
642
643 pub fn get_bytes_unchecked_for_overwrite_ptr(
646 &mut self,
647 cx: &impl HasDataLayout,
648 range: AllocRange,
649 ) -> AllocResult<*mut [u8]> {
650 self.mark_init(range, true);
651 self.provenance.clear(range, cx)?;
652
653 assert!(range.end().bytes_usize() <= self.bytes.len()); let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize());
656 let len = range.end().bytes_usize() - range.start.bytes_usize();
657 Ok(ptr::slice_from_raw_parts_mut(begin_ptr, len))
658 }
659
660 pub fn get_bytes_unchecked_raw_mut(&mut self) -> *mut u8 {
664 assert!(Prov::OFFSET_IS_ADDR);
665 self.bytes.as_mut_ptr()
666 }
667
668 pub fn get_bytes_unchecked_raw(&self) -> *const u8 {
672 assert!(Prov::OFFSET_IS_ADDR);
673 self.bytes.as_ptr()
674 }
675}
676
677impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
679 fn mark_init(&mut self, range: AllocRange, is_init: bool) {
681 if range.size.bytes() == 0 {
682 return;
683 }
684 assert!(self.mutability == Mutability::Mut);
685 self.init_mask.set_range(range, is_init);
686 }
687
688 pub fn read_scalar(
699 &self,
700 cx: &impl HasDataLayout,
701 range: AllocRange,
702 read_provenance: bool,
703 ) -> AllocResult<Scalar<Prov>> {
704 if let Err(bad) = self.init_mask.is_range_initialized(range) {
706 return Err(AllocError::InvalidUninitBytes(Some(BadBytesAccess {
707 access: range,
708 bad,
709 })));
710 }
711
712 let bytes = self.get_bytes_unchecked(range);
714 let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
715
716 if read_provenance {
717 assert_eq!(range.size, cx.data_layout().pointer_size());
718
719 if let Some(prov) = self.provenance.get_ptr(range.start) {
722 let ptr = Pointer::new(prov, Size::from_bytes(bits));
724 return Ok(Scalar::from_pointer(ptr, cx));
725 }
726
727 if Prov::OFFSET_IS_ADDR {
729 let mut prov = self.provenance.get(range.start, cx);
730 for offset in Size::from_bytes(1)..range.size {
731 let this_prov = self.provenance.get(range.start + offset, cx);
732 prov = Prov::join(prov, this_prov);
733 }
734 let ptr = Pointer::new(prov, Size::from_bytes(bits));
736 return Ok(Scalar::from_maybe_pointer(ptr, cx));
737 } else {
738 if self.provenance.range_empty(range, cx) {
741 return Ok(Scalar::from_uint(bits, range.size));
742 }
743 return Err(AllocError::ReadPartialPointer(range.start));
745 }
746 } else {
747 if Prov::OFFSET_IS_ADDR || self.provenance.range_empty(range, cx) {
750 return Ok(Scalar::from_uint(bits, range.size));
752 }
753 return Err(AllocError::ReadPointerAsInt(None));
755 }
756 }
757
758 pub fn write_scalar(
766 &mut self,
767 cx: &impl HasDataLayout,
768 range: AllocRange,
769 val: Scalar<Prov>,
770 ) -> AllocResult {
771 assert!(self.mutability == Mutability::Mut);
772
773 let (bytes, provenance) = match val.to_bits_or_ptr_internal(range.size)? {
776 Right(ptr) => {
777 let (provenance, offset) = ptr.into_raw_parts();
778 (u128::from(offset.bytes()), Some(provenance))
779 }
780 Left(data) => (data, None),
781 };
782
783 let endian = cx.data_layout().endian;
784 let dst = self.get_bytes_unchecked_for_overwrite(cx, range)?;
786 write_target_uint(endian, dst, bytes).unwrap();
787
788 if let Some(provenance) = provenance {
790 assert_eq!(range.size, cx.data_layout().pointer_size());
791 self.provenance.insert_ptr(range.start, provenance, cx);
792 }
793
794 Ok(())
795 }
796
797 pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
799 self.mark_init(range, false);
800 self.provenance.clear(range, cx)?;
801 Ok(())
802 }
803
804 pub fn process_native_write(&mut self, cx: &impl HasDataLayout, range: Option<AllocRange>) {
811 let range = range.unwrap_or_else(|| AllocRange {
812 start: Size::ZERO,
813 size: Size::from_bytes(self.len()),
814 });
815 self.mark_init(range, true);
816 self.provenance.write_wildcards(cx, range);
817 }
818
819 pub fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
821 self.provenance.clear(range, cx)?;
822 return Ok(());
823 }
824
825 pub fn provenance_apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
832 self.provenance.apply_copy(copy)
833 }
834
835 pub fn init_mask_apply_copy(&mut self, copy: InitCopy, range: AllocRange, repeat: u64) {
840 self.init_mask.apply_copy(copy, range, repeat)
841 }
842}