rustc_middle/mir/interpret/allocation/
provenance_map.rs1use std::cmp;
5use std::ops::Range;
6
7use rustc_abi::{HasDataLayout, Size};
8use rustc_data_structures::sorted_map::SortedMap;
9use rustc_macros::HashStable;
10use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
11use tracing::trace;
12
13use super::{AllocError, AllocRange, AllocResult, CtfeProvenance, Provenance, alloc_range};
14
15#[derive(Clone, PartialEq, Eq, Hash, Debug)]
17#[derive(HashStable)]
18pub struct ProvenanceMap<Prov = CtfeProvenance> {
19    ptrs: SortedMap<Size, Prov>,
22    bytes: Option<Box<SortedMap<Size, Prov>>>,
26}
27
28impl<D: Decoder, Prov: Provenance + Decodable<D>> Decodable<D> for ProvenanceMap<Prov> {
31    fn decode(d: &mut D) -> Self {
32        assert!(!Prov::OFFSET_IS_ADDR); Self { ptrs: Decodable::decode(d), bytes: None }
34    }
35}
36impl<S: Encoder, Prov: Provenance + Encodable<S>> Encodable<S> for ProvenanceMap<Prov> {
37    fn encode(&self, s: &mut S) {
38        let Self { ptrs, bytes } = self;
39        assert!(!Prov::OFFSET_IS_ADDR); debug_assert!(bytes.is_none()); ptrs.encode(s)
42    }
43}
44
45impl<Prov> ProvenanceMap<Prov> {
46    pub fn new() -> Self {
47        ProvenanceMap { ptrs: SortedMap::new(), bytes: None }
48    }
49
50    pub fn from_presorted_ptrs(r: Vec<(Size, Prov)>) -> Self {
53        ProvenanceMap { ptrs: SortedMap::from_presorted_elements(r), bytes: None }
54    }
55}
56
57impl ProvenanceMap {
58    #[inline]
63    pub fn ptrs(&self) -> &SortedMap<Size, CtfeProvenance> {
64        debug_assert!(self.bytes.is_none()); &self.ptrs
66    }
67}
68
69impl<Prov: Provenance> ProvenanceMap<Prov> {
70    fn adjusted_range_ptrs(range: AllocRange, cx: &impl HasDataLayout) -> Range<Size> {
71        let adjusted_start = Size::from_bytes(
74            range.start.bytes().saturating_sub(cx.data_layout().pointer_size().bytes() - 1),
75        );
76        adjusted_start..range.end()
77    }
78
79    pub(super) fn range_ptrs_get(
83        &self,
84        range: AllocRange,
85        cx: &impl HasDataLayout,
86    ) -> &[(Size, Prov)] {
87        self.ptrs.range(Self::adjusted_range_ptrs(range, cx))
88    }
89
90    pub(super) fn range_ptrs_is_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
92        self.ptrs.range_is_empty(Self::adjusted_range_ptrs(range, cx))
93    }
94
95    fn range_bytes_get(&self, range: AllocRange) -> &[(Size, Prov)] {
97        if let Some(bytes) = self.bytes.as_ref() {
98            bytes.range(range.start..range.end())
99        } else {
100            &[]
101        }
102    }
103
104    fn range_bytes_is_empty(&self, range: AllocRange) -> bool {
106        self.bytes.as_ref().is_none_or(|bytes| bytes.range_is_empty(range.start..range.end()))
107    }
108
109    pub fn get(&self, offset: Size, cx: &impl HasDataLayout) -> Option<Prov> {
111        let prov = self.range_ptrs_get(alloc_range(offset, Size::from_bytes(1)), cx);
112        debug_assert!(prov.len() <= 1);
113        if let Some(entry) = prov.first() {
114            debug_assert!(self.bytes.as_ref().is_none_or(|b| !b.contains_key(&offset)));
116            Some(entry.1)
117        } else {
118            self.bytes.as_ref().and_then(|b| b.get(&offset).copied())
120        }
121    }
122
123    pub fn get_ptr(&self, offset: Size) -> Option<Prov> {
126        self.ptrs.get(&offset).copied()
127    }
128
129    pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
135        self.range_ptrs_is_empty(range, cx) && self.range_bytes_is_empty(range)
136    }
137
138    pub fn provenances(&self) -> impl Iterator<Item = Prov> {
140        let bytes = self.bytes.iter().flat_map(|b| b.values());
141        self.ptrs.values().chain(bytes).copied()
142    }
143
144    pub fn insert_ptr(&mut self, offset: Size, prov: Prov, cx: &impl HasDataLayout) {
145        debug_assert!(self.range_empty(alloc_range(offset, cx.data_layout().pointer_size()), cx));
146        self.ptrs.insert(offset, prov);
147    }
148
149    pub fn clear(&mut self, range: AllocRange, cx: &impl HasDataLayout) -> AllocResult {
152        let start = range.start;
153        let end = range.end();
154        if Prov::OFFSET_IS_ADDR {
156            if let Some(bytes) = self.bytes.as_mut() {
157                bytes.remove_range(start..end);
158            }
159        } else {
160            debug_assert!(self.bytes.is_none());
161        }
162
163        let pointer_size = cx.data_layout().pointer_size();
164
165        let (first, last) = {
168            if self.range_ptrs_is_empty(range, cx) {
170                return Ok(());
172            }
173
174            let provenance = self.range_ptrs_get(range, cx);
177            (provenance.first().unwrap().0, provenance.last().unwrap().0 + pointer_size)
178        };
179
180        if first < start {
182            if !Prov::OFFSET_IS_ADDR {
183                return Err(AllocError::OverwritePartialPointer(first));
185            }
186            let prov = self.ptrs[&first];
188            let bytes = self.bytes.get_or_insert_with(Box::default);
189            for offset in first..start {
190                bytes.insert(offset, prov);
191            }
192        }
193        if last > end {
194            let begin_of_last = last - pointer_size;
195            if !Prov::OFFSET_IS_ADDR {
196                return Err(AllocError::OverwritePartialPointer(begin_of_last));
198            }
199            let prov = self.ptrs[&begin_of_last];
201            let bytes = self.bytes.get_or_insert_with(Box::default);
202            for offset in end..last {
203                bytes.insert(offset, prov);
204            }
205        }
206
207        self.ptrs.remove_range(first..last);
211
212        Ok(())
213    }
214
215    pub fn write_wildcards(&mut self, cx: &impl HasDataLayout, range: AllocRange) {
221        assert!(
222            Prov::OFFSET_IS_ADDR,
223            "writing wildcard provenance is not supported when `OFFSET_IS_ADDR` is false"
224        );
225        let wildcard = Prov::WILDCARD.unwrap();
226
227        let bytes = self.bytes.get_or_insert_with(Box::default);
228
229        let ptr_range = Self::adjusted_range_ptrs(range, cx);
231        let ptrs = self.ptrs.range(ptr_range.clone());
232        if let Some((offset, prov)) = ptrs.first() {
233            for byte_ofs in *offset..range.start {
234                bytes.insert(byte_ofs, *prov);
235            }
236        }
237        if let Some((offset, prov)) = ptrs.last() {
238            for byte_ofs in range.end()..*offset + cx.data_layout().pointer_size() {
239                bytes.insert(byte_ofs, *prov);
240            }
241        }
242        self.ptrs.remove_range(ptr_range);
243
244        for offset in range.start..range.end() {
246            bytes.insert(offset, wildcard);
247        }
248    }
249}
250
251pub struct ProvenanceCopy<Prov> {
255    dest_ptrs: Option<Box<[(Size, Prov)]>>,
256    dest_bytes: Option<Box<[(Size, Prov)]>>,
257}
258
259impl<Prov: Provenance> ProvenanceMap<Prov> {
260    pub fn prepare_copy(
261        &self,
262        src: AllocRange,
263        dest: Size,
264        count: u64,
265        cx: &impl HasDataLayout,
266    ) -> AllocResult<ProvenanceCopy<Prov>> {
267        let shift_offset = move |idx, offset| {
268            let dest_offset = dest + src.size * idx; (offset - src.start) + dest_offset };
273        let ptr_size = cx.data_layout().pointer_size();
274
275        let mut dest_ptrs_box = None;
280        if src.size >= ptr_size {
281            let adjusted_end = Size::from_bytes(src.end().bytes() - (ptr_size.bytes() - 1));
282            let ptrs = self.ptrs.range(src.start..adjusted_end);
283            let mut dest_ptrs = Vec::with_capacity(ptrs.len() * (count as usize));
290            for i in 0..count {
291                dest_ptrs
292                    .extend(ptrs.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
293            }
294            debug_assert_eq!(dest_ptrs.len(), dest_ptrs.capacity());
295            dest_ptrs_box = Some(dest_ptrs.into_boxed_slice());
296        };
297
298        let mut dest_bytes_box = None;
302        let begin_overlap = self.range_ptrs_get(alloc_range(src.start, Size::ZERO), cx).first();
303        let end_overlap = self.range_ptrs_get(alloc_range(src.end(), Size::ZERO), cx).first();
304        if !Prov::OFFSET_IS_ADDR {
305            if let Some(entry) = begin_overlap {
307                return Err(AllocError::ReadPartialPointer(entry.0));
308            }
309            if let Some(entry) = end_overlap {
310                return Err(AllocError::ReadPartialPointer(entry.0));
311            }
312            debug_assert!(self.bytes.is_none());
313        } else {
314            let mut bytes = Vec::new();
315            if let Some(entry) = begin_overlap {
317                trace!("start overlapping entry: {entry:?}");
318                let entry_end = cmp::min(entry.0 + ptr_size, src.end());
320                for offset in src.start..entry_end {
321                    bytes.push((offset, entry.1));
322                }
323            } else {
324                trace!("no start overlapping entry");
325            }
326
327            bytes.extend(self.range_bytes_get(src));
329
330            if let Some(entry) = end_overlap {
332                trace!("end overlapping entry: {entry:?}");
333                let entry_start = cmp::max(entry.0, src.start);
335                for offset in entry_start..src.end() {
336                    if bytes.last().is_none_or(|bytes_entry| bytes_entry.0 < offset) {
337                        bytes.push((offset, entry.1));
339                    } else {
340                        assert!(entry.0 <= src.start);
344                    }
345                }
346            } else {
347                trace!("no end overlapping entry");
348            }
349            trace!("byte provenances: {bytes:?}");
350
351            let mut dest_bytes = Vec::with_capacity(bytes.len() * (count as usize));
353            for i in 0..count {
354                dest_bytes
355                    .extend(bytes.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
356            }
357            debug_assert_eq!(dest_bytes.len(), dest_bytes.capacity());
358            dest_bytes_box = Some(dest_bytes.into_boxed_slice());
359        }
360
361        Ok(ProvenanceCopy { dest_ptrs: dest_ptrs_box, dest_bytes: dest_bytes_box })
362    }
363
364    pub fn apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
368        if let Some(dest_ptrs) = copy.dest_ptrs {
369            self.ptrs.insert_presorted(dest_ptrs.into());
370        }
371        if Prov::OFFSET_IS_ADDR {
372            if let Some(dest_bytes) = copy.dest_bytes
373                && !dest_bytes.is_empty()
374            {
375                self.bytes.get_or_insert_with(Box::default).insert_presorted(dest_bytes.into());
376            }
377        } else {
378            debug_assert!(copy.dest_bytes.is_none());
379        }
380    }
381}