miri/borrow_tracker/tree_borrows/
tree.rs

1//! In this file we handle the "Tree" part of Tree Borrows, i.e. all tree
2//! traversal functions, optimizations to trim branches, and keeping track of
3//! the relative position of the access to each node being updated. This of course
4//! also includes the definition of the tree structure.
5//!
6//! Functions here manipulate permissions but are oblivious to them: as
7//! the internals of `Permission` are private, the update process is a black
8//! box. All we need to know here are
9//! - the fact that updates depend only on the old state, the status of protectors,
10//!   and the relative position of the access;
11//! - idempotency properties asserted in `perms.rs` (for optimizations)
12
13use std::ops::Range;
14use std::{cmp, fmt, mem};
15
16use rustc_abi::Size;
17use rustc_data_structures::fx::FxHashSet;
18use rustc_span::Span;
19use smallvec::SmallVec;
20
21use super::diagnostics::AccessCause;
22use super::wildcard::WildcardState;
23use crate::borrow_tracker::tree_borrows::Permission;
24use crate::borrow_tracker::tree_borrows::diagnostics::{
25    self, NodeDebugInfo, TbError, TransitionError, no_valid_exposed_references_error,
26};
27use crate::borrow_tracker::tree_borrows::foreign_access_skipping::IdempotentForeignAccess;
28use crate::borrow_tracker::tree_borrows::perms::PermTransition;
29use crate::borrow_tracker::tree_borrows::unimap::{UniIndex, UniKeyMap, UniValMap};
30use crate::borrow_tracker::{AccessKind, GlobalState, ProtectorKind};
31use crate::*;
32
33mod tests;
34
35/// Data for a reference at single *location*.
36#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
37pub(super) struct LocationState {
38    /// A location is "accessed" when it is child-accessed for the first time (and the initial
39    /// retag initializes the location for the range covered by the type), and it then stays
40    /// accessed forever.
41    /// For accessed locations, "permission" is the current permission. However, for
42    /// non-accessed locations, we still need to track the "future initial permission": this will
43    /// start out to be `default_initial_perm`, but foreign accesses need to be taken into account.
44    /// Crucially however, while transitions to `Disabled` would usually be UB if this location is
45    /// protected, that is *not* the case for non-accessed locations. Instead we just have a latent
46    /// "future initial permission" of `Disabled`, causing UB only if an access is ever actually
47    /// performed.
48    /// Note that the tree root is also always accessed, as if the allocation was a write access.
49    accessed: bool,
50    /// This pointer's current permission / future initial permission.
51    permission: Permission,
52    /// See `foreign_access_skipping.rs`.
53    /// Stores an idempotent foreign access for this location and its children.
54    /// For correctness, this must not be too strong, and the recorded idempotent foreign access
55    /// of all children must be at least as strong as this. For performance, it should be as strong as possible.
56    idempotent_foreign_access: IdempotentForeignAccess,
57}
58
59impl LocationState {
60    /// Constructs a new initial state. It has neither been accessed, nor been subjected
61    /// to any foreign access yet.
62    /// The permission is not allowed to be `Unique`.
63    /// `sifa` is the (strongest) idempotent foreign access, see `foreign_access_skipping.rs`
64    pub fn new_non_accessed(permission: Permission, sifa: IdempotentForeignAccess) -> Self {
65        assert!(permission.is_initial() || permission.is_disabled());
66        Self { permission, accessed: false, idempotent_foreign_access: sifa }
67    }
68
69    /// Constructs a new initial state. It has not yet been subjected
70    /// to any foreign access. However, it is already marked as having been accessed.
71    /// `sifa` is the (strongest) idempotent foreign access, see `foreign_access_skipping.rs`
72    pub fn new_accessed(permission: Permission, sifa: IdempotentForeignAccess) -> Self {
73        Self { permission, accessed: true, idempotent_foreign_access: sifa }
74    }
75
76    /// Check if the location has been accessed, i.e. if it has
77    /// ever been accessed through a child pointer.
78    pub fn accessed(&self) -> bool {
79        self.accessed
80    }
81
82    pub fn permission(&self) -> Permission {
83        self.permission
84    }
85
86    /// Performs an access on this index and updates node,
87    /// perm and wildcard_state to reflect the transition.
88    fn perform_transition(
89        &mut self,
90        idx: UniIndex,
91        nodes: &mut UniValMap<Node>,
92        wildcard_accesses: &mut UniValMap<WildcardState>,
93        access_kind: AccessKind,
94        access_cause: AccessCause,
95        access_range: Option<AllocRange>,
96        relatedness: AccessRelatedness,
97        span: Span,
98        location_range: Range<u64>,
99        protected: bool,
100    ) -> Result<(), TransitionError> {
101        // Call this function now (i.e. only if we know `relatedness`), which
102        // ensures it is only called when `skip_if_known_noop` returns
103        // `Recurse`, due to the contract of `traverse_this_parents_children_other`.
104        self.record_new_access(access_kind, relatedness);
105
106        let transition = self.perform_access(access_kind, relatedness, protected)?;
107        if !transition.is_noop() {
108            let node = nodes.get_mut(idx).unwrap();
109            // Record the event as part of the history.
110            node.debug_info.history.push(diagnostics::Event {
111                transition,
112                is_foreign: relatedness.is_foreign(),
113                access_cause,
114                access_range,
115                transition_range: location_range,
116                span,
117            });
118
119            // We need to update the wildcard state, if the permission
120            // of an exposed pointer changes.
121            if node.is_exposed {
122                let access_type = self.permission.strongest_allowed_child_access(protected);
123                WildcardState::update_exposure(idx, access_type, nodes, wildcard_accesses);
124            }
125        }
126        Ok(())
127    }
128
129    /// Apply the effect of an access to one location, including
130    /// - applying `Permission::perform_access` to the inner `Permission`,
131    /// - emitting protector UB if the location is accessed,
132    /// - updating the accessed status (child accesses produce accessed locations).
133    fn perform_access(
134        &mut self,
135        access_kind: AccessKind,
136        rel_pos: AccessRelatedness,
137        protected: bool,
138    ) -> Result<PermTransition, TransitionError> {
139        let old_perm = self.permission;
140        let transition = Permission::perform_access(access_kind, rel_pos, old_perm, protected)
141            .ok_or(TransitionError::ChildAccessForbidden(old_perm))?;
142        self.accessed |= !rel_pos.is_foreign();
143        self.permission = transition.applied(old_perm).unwrap();
144        // Why do only accessed locations cause protector errors?
145        // Consider two mutable references `x`, `y` into disjoint parts of
146        // the same allocation. A priori, these may actually both be used to
147        // access the entire allocation, as long as only reads occur. However,
148        // a write to `y` needs to somehow record that `x` can no longer be used
149        // on that location at all. For these non-accessed locations (i.e., locations
150        // that haven't been accessed with `x` yet), we track the "future initial state":
151        // it defaults to whatever the initial state of the tag is,
152        // but the access to `y` moves that "future initial state" of `x` to `Disabled`.
153        // However, usually a `Reserved -> Disabled` transition would be UB due to the protector!
154        // So clearly protectors shouldn't fire for such "future initial state" transitions.
155        //
156        // See the test `two_mut_protected_same_alloc` in `tests/pass/tree_borrows/tree-borrows.rs`
157        // for an example of safe code that would be UB if we forgot to check `self.accessed`.
158        if protected && self.accessed && transition.produces_disabled() {
159            return Err(TransitionError::ProtectedDisabled(old_perm));
160        }
161        Ok(transition)
162    }
163
164    /// Like `perform_access`, but ignores the concrete error cause and also uses state-passing
165    /// rather than a mutable reference. As such, it returns `Some(x)` if the transition succeeded,
166    /// or `None` if there was an error.
167    #[cfg(test)]
168    fn perform_access_no_fluff(
169        mut self,
170        access_kind: AccessKind,
171        rel_pos: AccessRelatedness,
172        protected: bool,
173    ) -> Option<Self> {
174        match self.perform_access(access_kind, rel_pos, protected) {
175            Ok(_) => Some(self),
176            Err(_) => None,
177        }
178    }
179
180    /// Tree traversal optimizations. See `foreign_access_skipping.rs`.
181    /// This checks if such a foreign access can be skipped.
182    fn skip_if_known_noop(
183        &self,
184        access_kind: AccessKind,
185        rel_pos: AccessRelatedness,
186    ) -> ContinueTraversal {
187        if rel_pos.is_foreign() {
188            let happening_now = IdempotentForeignAccess::from_foreign(access_kind);
189            let mut new_access_noop =
190                self.idempotent_foreign_access.can_skip_foreign_access(happening_now);
191            if self.permission.is_disabled() {
192                // A foreign access to a `Disabled` tag will have almost no observable effect.
193                // It's a theorem that `Disabled` node have no protected accessed children,
194                // and so this foreign access will never trigger any protector.
195                // (Intuition: You're either protected accessed, and thus can't become Disabled
196                // or you're already Disabled protected, but not accessed, and then can't
197                // become accessed since that requires a child access, which Disabled blocks.)
198                // Further, the children will never be able to read or write again, since they
199                // have a `Disabled` parent. So this only affects diagnostics, such that the
200                // blocking write will still be identified directly, just at a different tag.
201                new_access_noop = true;
202            }
203            if self.permission.is_frozen() && access_kind == AccessKind::Read {
204                // A foreign read to a `Frozen` tag will have almost no observable effect.
205                // It's a theorem that `Frozen` nodes have no `Unique` children, so all children
206                // already survive foreign reads. Foreign reads in general have almost no
207                // effect, the only further thing they could do is make protected `Reserved`
208                // nodes become conflicted, i.e. make them reject child writes for the further
209                // duration of their protector. But such a child write is already rejected
210                // because this node is frozen. So this only affects diagnostics, but the
211                // blocking read will still be identified directly, just at a different tag.
212                new_access_noop = true;
213            }
214            if new_access_noop {
215                // Abort traversal if the new access is indeed guaranteed
216                // to be noop.
217                // No need to update `self.idempotent_foreign_access`,
218                // the type of the current streak among nonempty read-only
219                // or nonempty with at least one write has not changed.
220                ContinueTraversal::SkipSelfAndChildren
221            } else {
222                // Otherwise propagate this time, and also record the
223                // access that just occurred so that we can skip the propagation
224                // next time.
225                ContinueTraversal::Recurse
226            }
227        } else {
228            // A child access occurred, this breaks the streak of foreign
229            // accesses in a row and the sequence since the previous child access
230            // is now empty.
231            ContinueTraversal::Recurse
232        }
233    }
234
235    /// Records a new access, so that future access can potentially be skipped
236    /// by `skip_if_known_noop`. This must be called on child accesses, and otherwise
237    /// shoud be called on foreign accesses for increased performance. It should not be called
238    /// when `skip_if_known_noop` indicated skipping, since it then is a no-op.
239    /// See `foreign_access_skipping.rs`
240    fn record_new_access(&mut self, access_kind: AccessKind, rel_pos: AccessRelatedness) {
241        debug_assert!(matches!(
242            self.skip_if_known_noop(access_kind, rel_pos),
243            ContinueTraversal::Recurse
244        ));
245        self.idempotent_foreign_access
246            .record_new(IdempotentForeignAccess::from_acc_and_rel(access_kind, rel_pos));
247    }
248}
249
250impl fmt::Display for LocationState {
251    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
252        write!(f, "{}", self.permission)?;
253        if !self.accessed {
254            write!(f, "?")?;
255        }
256        Ok(())
257    }
258}
259/// The state of the full tree for a particular location: for all nodes, the local permissions
260/// of that node, and the tracking for wildcard accesses.
261#[derive(Clone, Debug, PartialEq, Eq)]
262pub struct LocationTree {
263    /// Maps a tag to a perm, with possible lazy initialization.
264    ///
265    /// NOTE: not all tags registered in `Tree::nodes` are necessarily in all
266    /// ranges of `perms`, because `perms` is in part lazily initialized.
267    /// Just because `nodes.get(key)` is `Some(_)` does not mean you can safely
268    /// `unwrap` any `perm.get(key)`.
269    ///
270    /// We do uphold the fact that `keys(perms)` is a subset of `keys(nodes)`
271    pub perms: UniValMap<LocationState>,
272    /// Maps a tag and a location to its wildcard access tracking information,
273    /// with possible lazy initialization.
274    ///
275    /// If this allocation doesn't have any exposed nodes, then this map doesn't get
276    /// initialized. This way we only need to allocate the map if we need it.
277    ///
278    /// NOTE: same guarantees on entry initialization as for `perms`.
279    pub wildcard_accesses: UniValMap<WildcardState>,
280}
281/// Tree structure with both parents and children since we want to be
282/// able to traverse the tree efficiently in both directions.
283#[derive(Clone, Debug)]
284pub struct Tree {
285    /// Mapping from tags to keys. The key obtained can then be used in
286    /// any of the `UniValMap` relative to this allocation, i.e.
287    /// `nodes`, `LocationTree::perms` and `LocationTree::wildcard_accesses`
288    /// of the same `Tree`.
289    /// The parent-child relationship in `Node` is encoded in terms of these same
290    /// keys, so traversing the entire tree needs exactly one access to
291    /// `tag_mapping`.
292    pub(super) tag_mapping: UniKeyMap<BorTag>,
293    /// All nodes of this tree.
294    pub(super) nodes: UniValMap<Node>,
295    /// Associates with each location its state and wildcard access tracking.
296    pub(super) locations: DedupRangeMap<LocationTree>,
297    /// The index of the root node.
298    pub(super) root: UniIndex,
299}
300
301/// A node in the borrow tree. Each node is uniquely identified by a tag via
302/// the `nodes` map of `Tree`.
303#[derive(Clone, Debug)]
304pub(super) struct Node {
305    /// The tag of this node.
306    pub tag: BorTag,
307    /// All tags except the root have a parent tag.
308    pub parent: Option<UniIndex>,
309    /// If the pointer was reborrowed, it has children.
310    // FIXME: bench to compare this to FxHashSet and to other SmallVec sizes
311    pub children: SmallVec<[UniIndex; 4]>,
312    /// Either `Reserved`,  `Frozen`, or `Disabled`, it is the permission this tag will
313    /// lazily be initialized to on the first access.
314    /// It is only ever `Disabled` for a tree root, since the root is initialized to `Unique` by
315    /// its own separate mechanism.
316    default_initial_perm: Permission,
317    /// The default initial (strongest) idempotent foreign access.
318    /// This participates in the invariant for `LocationState::idempotent_foreign_access`
319    /// in cases where there is no location state yet. See `foreign_access_skipping.rs`,
320    /// and `LocationState::idempotent_foreign_access` for more information
321    default_initial_idempotent_foreign_access: IdempotentForeignAccess,
322    /// Whether a wildcard access could happen through this node.
323    pub is_exposed: bool,
324    /// Some extra information useful only for debugging purposes.
325    pub debug_info: NodeDebugInfo,
326}
327
328/// Data given to the transition function
329struct NodeAppArgs<'visit> {
330    /// The index of the current node.
331    idx: UniIndex,
332    /// Relative position of the access.
333    rel_pos: AccessRelatedness,
334    /// The node map of this tree.
335    nodes: &'visit mut UniValMap<Node>,
336    /// The permissions map of this tree.
337    loc: &'visit mut LocationTree,
338}
339/// Data given to the error handler
340struct ErrHandlerArgs<'node, InErr> {
341    /// Kind of error that occurred
342    error_kind: InErr,
343    /// Tag that triggered the error (not the tag that was accessed,
344    /// rather the parent tag that had insufficient permissions or the
345    /// non-parent tag that had a protector).
346    conflicting_info: &'node NodeDebugInfo,
347    /// Information about the tag that was accessed just before the
348    /// error was triggered.
349    accessed_info: &'node NodeDebugInfo,
350}
351/// Internal contents of `Tree` with the minimum of mutable access for
352/// the purposes of the tree traversal functions: the permissions (`perms`) can be
353/// updated but not the tree structure (`tag_mapping` and `nodes`)
354struct TreeVisitor<'tree> {
355    tag_mapping: &'tree UniKeyMap<BorTag>,
356    nodes: &'tree mut UniValMap<Node>,
357    loc: &'tree mut LocationTree,
358}
359
360/// Whether to continue exploring the children recursively or not.
361enum ContinueTraversal {
362    Recurse,
363    SkipSelfAndChildren,
364}
365
366#[derive(Clone, Copy)]
367pub enum ChildrenVisitMode {
368    VisitChildrenOfAccessed,
369    SkipChildrenOfAccessed,
370}
371
372enum RecursionState {
373    BeforeChildren,
374    AfterChildren,
375}
376
377/// Stack of nodes left to explore in a tree traversal.
378/// See the docs of `traverse_this_parents_children_other` for details on the
379/// traversal order.
380struct TreeVisitorStack<NodeContinue, NodeApp, ErrHandler> {
381    /// Identifier of the original access.
382    initial: UniIndex,
383    /// Function describing whether to continue at a tag.
384    /// This is only invoked for foreign accesses.
385    f_continue: NodeContinue,
386    /// Function to apply to each tag.
387    f_propagate: NodeApp,
388    /// Handler to add the required context to diagnostics.
389    err_builder: ErrHandler,
390    /// Mutable state of the visit: the tags left to handle.
391    /// Every tag pushed should eventually be handled,
392    /// and the precise order is relevant for diagnostics.
393    /// Since the traversal is piecewise bottom-up, we need to
394    /// remember whether we're here initially, or after visiting all children.
395    /// The last element indicates this.
396    /// This is just an artifact of how you hand-roll recursion,
397    /// it does not have a deeper meaning otherwise.
398    stack: Vec<(UniIndex, AccessRelatedness, RecursionState)>,
399}
400
401impl<NodeContinue, NodeApp, InnErr, OutErr, ErrHandler>
402    TreeVisitorStack<NodeContinue, NodeApp, ErrHandler>
403where
404    NodeContinue: Fn(&NodeAppArgs<'_>) -> ContinueTraversal,
405    NodeApp: Fn(NodeAppArgs<'_>) -> Result<(), InnErr>,
406    ErrHandler: Fn(ErrHandlerArgs<'_, InnErr>) -> OutErr,
407{
408    fn should_continue_at(
409        &self,
410        this: &mut TreeVisitor<'_>,
411        idx: UniIndex,
412        rel_pos: AccessRelatedness,
413    ) -> ContinueTraversal {
414        let args = NodeAppArgs { idx, rel_pos, nodes: this.nodes, loc: this.loc };
415        (self.f_continue)(&args)
416    }
417
418    fn propagate_at(
419        &mut self,
420        this: &mut TreeVisitor<'_>,
421        idx: UniIndex,
422        rel_pos: AccessRelatedness,
423    ) -> Result<(), OutErr> {
424        (self.f_propagate)(NodeAppArgs { idx, rel_pos, nodes: this.nodes, loc: this.loc }).map_err(
425            |error_kind| {
426                (self.err_builder)(ErrHandlerArgs {
427                    error_kind,
428                    conflicting_info: &this.nodes.get(idx).unwrap().debug_info,
429                    accessed_info: &this.nodes.get(self.initial).unwrap().debug_info,
430                })
431            },
432        )
433    }
434
435    fn go_upwards_from_accessed(
436        &mut self,
437        this: &mut TreeVisitor<'_>,
438        accessed_node: UniIndex,
439        visit_children: ChildrenVisitMode,
440    ) -> Result<(), OutErr> {
441        // We want to visit the accessed node's children first.
442        // However, we will below walk up our parents and push their children (our cousins)
443        // onto the stack. To ensure correct iteration order, this method thus finishes
444        // by reversing the stack. This only works if the stack is empty initially.
445        assert!(self.stack.is_empty());
446        // First, handle accessed node. A bunch of things need to
447        // be handled differently here compared to the further parents
448        // of `accesssed_node`.
449        {
450            self.propagate_at(this, accessed_node, AccessRelatedness::LocalAccess)?;
451            if matches!(visit_children, ChildrenVisitMode::VisitChildrenOfAccessed) {
452                let accessed_node = this.nodes.get(accessed_node).unwrap();
453                // We `rev()` here because we reverse the entire stack later.
454                for &child in accessed_node.children.iter().rev() {
455                    self.stack.push((
456                        child,
457                        AccessRelatedness::ForeignAccess,
458                        RecursionState::BeforeChildren,
459                    ));
460                }
461            }
462        }
463        // Then, handle the accessed node's parents. Here, we need to
464        // make sure we only mark the "cousin" subtrees for later visitation,
465        // not the subtree that contains the accessed node.
466        let mut last_node = accessed_node;
467        while let Some(current) = this.nodes.get(last_node).unwrap().parent {
468            self.propagate_at(this, current, AccessRelatedness::LocalAccess)?;
469            let node = this.nodes.get(current).unwrap();
470            // We `rev()` here because we reverse the entire stack later.
471            for &child in node.children.iter().rev() {
472                if last_node == child {
473                    continue;
474                }
475                self.stack.push((
476                    child,
477                    AccessRelatedness::ForeignAccess,
478                    RecursionState::BeforeChildren,
479                ));
480            }
481            last_node = current;
482        }
483        // Reverse the stack, as discussed above.
484        self.stack.reverse();
485        Ok(())
486    }
487
488    fn finish_foreign_accesses(&mut self, this: &mut TreeVisitor<'_>) -> Result<(), OutErr> {
489        while let Some((idx, rel_pos, step)) = self.stack.last_mut() {
490            let idx = *idx;
491            let rel_pos = *rel_pos;
492            match *step {
493                // How to do bottom-up traversal, 101: Before you handle a node, you handle all children.
494                // For this, you must first find the children, which is what this code here does.
495                RecursionState::BeforeChildren => {
496                    // Next time we come back will be when all the children are handled.
497                    *step = RecursionState::AfterChildren;
498                    // Now push the children, except if we are told to skip this subtree.
499                    let handle_children = self.should_continue_at(this, idx, rel_pos);
500                    match handle_children {
501                        ContinueTraversal::Recurse => {
502                            let node = this.nodes.get(idx).unwrap();
503                            for &child in node.children.iter() {
504                                self.stack.push((child, rel_pos, RecursionState::BeforeChildren));
505                            }
506                        }
507                        ContinueTraversal::SkipSelfAndChildren => {
508                            // skip self
509                            self.stack.pop();
510                            continue;
511                        }
512                    }
513                }
514                // All the children are handled, let's actually visit this node
515                RecursionState::AfterChildren => {
516                    self.stack.pop();
517                    self.propagate_at(this, idx, rel_pos)?;
518                }
519            }
520        }
521        Ok(())
522    }
523
524    fn new(
525        initial: UniIndex,
526        f_continue: NodeContinue,
527        f_propagate: NodeApp,
528        err_builder: ErrHandler,
529    ) -> Self {
530        Self { initial, f_continue, f_propagate, err_builder, stack: Vec::new() }
531    }
532}
533
534impl<'tree> TreeVisitor<'tree> {
535    /// Applies `f_propagate` to every vertex of the tree in a piecewise bottom-up way: First, visit
536    /// all ancestors of `start` (starting with `start` itself), then children of `start`, then the rest,
537    /// going bottom-up in each of these two "pieces" / sections.
538    /// This ensures that errors are triggered in the following order
539    /// - first invalid accesses with insufficient permissions, closest to the accessed node first,
540    /// - then protector violations, bottom-up, starting with the children of the accessed node, and then
541    ///   going upwards and outwards.
542    ///
543    /// The following graphic visualizes it, with numbers indicating visitation order and `start` being
544    /// the node that is visited first ("1"):
545    ///
546    /// ```text
547    ///         3
548    ///        /|
549    ///       / |
550    ///      9  2
551    ///      |  |\
552    ///      |  | \
553    ///      8  1  7
554    ///        / \
555    ///       4   6
556    ///           |
557    ///           5
558    /// ```
559    ///
560    /// `f_propagate` should follow the following format: for a given `Node` it updates its
561    /// `Permission` depending on the position relative to `start` (given by an
562    /// `AccessRelatedness`).
563    /// `f_continue` is called earlier on foreign nodes, and describes whether to even start
564    /// visiting the subtree at that node. If it e.g. returns `SkipSelfAndChildren` on node 6
565    /// above, then nodes 5 _and_ 6 would not be visited by `f_propagate`. It is not used for
566    /// notes having a child access (nodes 1, 2, 3).
567    ///
568    /// Finally, remember that the iteration order is not relevant for UB, it only affects
569    /// diagnostics. It also affects tree traversal optimizations built on top of this, so
570    /// those need to be reviewed carefully as well whenever this changes.
571    fn traverse_this_parents_children_other<InnErr, OutErr>(
572        mut self,
573        start: BorTag,
574        f_continue: impl Fn(&NodeAppArgs<'_>) -> ContinueTraversal,
575        f_propagate: impl Fn(NodeAppArgs<'_>) -> Result<(), InnErr>,
576        err_builder: impl Fn(ErrHandlerArgs<'_, InnErr>) -> OutErr,
577    ) -> Result<(), OutErr> {
578        let start_idx = self.tag_mapping.get(&start).unwrap();
579        let mut stack = TreeVisitorStack::new(start_idx, f_continue, f_propagate, err_builder);
580        // Visits the accessed node itself, and all its parents, i.e. all nodes
581        // undergoing a child access. Also pushes the children and the other
582        // cousin nodes (i.e. all nodes undergoing a foreign access) to the stack
583        // to be processed later.
584        stack.go_upwards_from_accessed(
585            &mut self,
586            start_idx,
587            ChildrenVisitMode::VisitChildrenOfAccessed,
588        )?;
589        // Now visit all the foreign nodes we remembered earlier.
590        // For this we go bottom-up, but also allow f_continue to skip entire
591        // subtrees from being visited if it would be a NOP.
592        stack.finish_foreign_accesses(&mut self)
593    }
594
595    /// Like `traverse_this_parents_children_other`, but skips the children of `start`.
596    fn traverse_nonchildren<InnErr, OutErr>(
597        mut self,
598        start: BorTag,
599        f_continue: impl Fn(&NodeAppArgs<'_>) -> ContinueTraversal,
600        f_propagate: impl Fn(NodeAppArgs<'_>) -> Result<(), InnErr>,
601        err_builder: impl Fn(ErrHandlerArgs<'_, InnErr>) -> OutErr,
602    ) -> Result<(), OutErr> {
603        let start_idx = self.tag_mapping.get(&start).unwrap();
604        let mut stack = TreeVisitorStack::new(start_idx, f_continue, f_propagate, err_builder);
605        // Visits the accessed node itself, and all its parents, i.e. all nodes
606        // undergoing a child access. Also pushes the other cousin nodes to the
607        // stack, but not the children of the accessed node.
608        stack.go_upwards_from_accessed(
609            &mut self,
610            start_idx,
611            ChildrenVisitMode::SkipChildrenOfAccessed,
612        )?;
613        // Now visit all the foreign nodes we remembered earlier.
614        // For this we go bottom-up, but also allow f_continue to skip entire
615        // subtrees from being visited if it would be a NOP.
616        stack.finish_foreign_accesses(&mut self)
617    }
618}
619
620impl Tree {
621    /// Create a new tree, with only a root pointer.
622    pub fn new(root_tag: BorTag, size: Size, span: Span) -> Self {
623        // The root has `Disabled` as the default permission,
624        // so that any access out of bounds is invalid.
625        let root_default_perm = Permission::new_disabled();
626        let mut tag_mapping = UniKeyMap::default();
627        let root_idx = tag_mapping.insert(root_tag);
628        let nodes = {
629            let mut nodes = UniValMap::<Node>::default();
630            let mut debug_info = NodeDebugInfo::new(root_tag, root_default_perm, span);
631            // name the root so that all allocations contain one named pointer
632            debug_info.add_name("root of the allocation");
633            nodes.insert(
634                root_idx,
635                Node {
636                    tag: root_tag,
637                    parent: None,
638                    children: SmallVec::default(),
639                    default_initial_perm: root_default_perm,
640                    // The root may never be skipped, all accesses will be local.
641                    default_initial_idempotent_foreign_access: IdempotentForeignAccess::None,
642                    is_exposed: false,
643                    debug_info,
644                },
645            );
646            nodes
647        };
648        let rperms = {
649            let mut perms = UniValMap::default();
650            // We manually set it to `Unique` on all in-bounds positions.
651            // We also ensure that it is accessed, so that no `Unique` but
652            // not yet accessed nodes exist. Essentially, we pretend there
653            // was a write that initialized these to `Unique`.
654            perms.insert(
655                root_idx,
656                LocationState::new_accessed(
657                    Permission::new_unique(),
658                    IdempotentForeignAccess::None,
659                ),
660            );
661            let wildcard_accesses = UniValMap::default();
662            DedupRangeMap::new(size, LocationTree { perms, wildcard_accesses })
663        };
664        Self { root: root_idx, nodes, locations: rperms, tag_mapping }
665    }
666}
667
668impl<'tcx> Tree {
669    /// Insert a new tag in the tree.
670    ///
671    /// `inside_perm` defines the initial permissions for a block of memory starting at
672    /// `base_offset`. These may nor may not be already marked as "accessed".
673    /// `outside_perm` defines the initial permission for the rest of the allocation.
674    /// These are definitely not "accessed".
675    pub(super) fn new_child(
676        &mut self,
677        base_offset: Size,
678        parent_tag: BorTag,
679        new_tag: BorTag,
680        inside_perms: DedupRangeMap<LocationState>,
681        outside_perm: Permission,
682        protected: bool,
683        span: Span,
684    ) -> InterpResult<'tcx> {
685        let idx = self.tag_mapping.insert(new_tag);
686        let parent_idx = self.tag_mapping.get(&parent_tag).unwrap();
687        assert!(outside_perm.is_initial());
688
689        let default_strongest_idempotent =
690            outside_perm.strongest_idempotent_foreign_access(protected);
691        // Create the node
692        self.nodes.insert(
693            idx,
694            Node {
695                tag: new_tag,
696                parent: Some(parent_idx),
697                children: SmallVec::default(),
698                default_initial_perm: outside_perm,
699                default_initial_idempotent_foreign_access: default_strongest_idempotent,
700                is_exposed: false,
701                debug_info: NodeDebugInfo::new(new_tag, outside_perm, span),
702            },
703        );
704        let parent_node = self.nodes.get_mut(parent_idx).unwrap();
705        // Register new_tag as a child of parent_tag
706        parent_node.children.push(idx);
707
708        // We need to know the weakest SIFA for `update_idempotent_foreign_access_after_retag`.
709        let mut min_sifa = default_strongest_idempotent;
710        for (Range { start, end }, &perm) in
711            inside_perms.iter(Size::from_bytes(0), inside_perms.size())
712        {
713            assert!(perm.permission.is_initial());
714            assert_eq!(
715                perm.idempotent_foreign_access,
716                perm.permission.strongest_idempotent_foreign_access(protected)
717            );
718
719            min_sifa = cmp::min(min_sifa, perm.idempotent_foreign_access);
720            for (_range, loc) in self
721                .locations
722                .iter_mut(Size::from_bytes(start) + base_offset, Size::from_bytes(end - start))
723            {
724                loc.perms.insert(idx, perm);
725            }
726        }
727
728        // We need to ensure the consistency of the wildcard access tracking data structure.
729        // For this, we insert the correct entry for this tag based on its parent, if it exists.
730        for (_range, loc) in self.locations.iter_mut_all() {
731            if let Some(parent_access) = loc.wildcard_accesses.get(parent_idx) {
732                loc.wildcard_accesses.insert(idx, parent_access.for_new_child());
733            }
734        }
735
736        // Inserting the new perms might have broken the SIFA invariant (see
737        // `foreign_access_skipping.rs`) if the SIFA we inserted is weaker than that of some parent.
738        // We now weaken the recorded SIFA for our parents, until the invariant is restored. We
739        // could weaken them all to `None`, but it is more efficient to compute the SIFA for the new
740        // permission statically, and use that. For this we need the *minimum* SIFA (`None` needs
741        // more fixup than `Write`).
742        self.update_idempotent_foreign_access_after_retag(parent_idx, min_sifa);
743
744        interp_ok(())
745    }
746
747    /// Restores the SIFA "children are stronger"/"parents are weaker" invariant after a retag:
748    /// reduce the SIFA of `current` and its parents to be no stronger than `strongest_allowed`.
749    /// See `foreign_access_skipping.rs` and [`Tree::new_child`].
750    fn update_idempotent_foreign_access_after_retag(
751        &mut self,
752        mut current: UniIndex,
753        strongest_allowed: IdempotentForeignAccess,
754    ) {
755        if strongest_allowed == IdempotentForeignAccess::Write {
756            // Nothing is stronger than `Write`.
757            return;
758        }
759        // We walk the tree upwards, until the invariant is restored
760        loop {
761            let current_node = self.nodes.get_mut(current).unwrap();
762            // Call `ensure_no_stronger_than` on all SIFAs for this node: the per-location SIFA, as well
763            // as the default SIFA for not-yet-initialized locations.
764            // Record whether we did any change; if not, the invariant is restored and we can stop the traversal.
765            let mut any_change = false;
766            for (_range, loc) in self.locations.iter_mut_all() {
767                // Check if this node has a state for this location (or range of locations).
768                if let Some(perm) = loc.perms.get_mut(current) {
769                    // Update the per-location SIFA, recording if it changed.
770                    any_change |=
771                        perm.idempotent_foreign_access.ensure_no_stronger_than(strongest_allowed);
772                }
773            }
774            // Now update `default_initial_idempotent_foreign_access`, which stores the default SIFA for not-yet-initialized locations.
775            any_change |= current_node
776                .default_initial_idempotent_foreign_access
777                .ensure_no_stronger_than(strongest_allowed);
778
779            if any_change {
780                let Some(next) = self.nodes.get(current).unwrap().parent else {
781                    // We have arrived at the root.
782                    break;
783                };
784                current = next;
785                continue;
786            } else {
787                break;
788            }
789        }
790    }
791
792    /// Deallocation requires
793    /// - a pointer that permits write accesses
794    /// - the absence of Strong Protectors anywhere in the allocation
795    pub fn dealloc(
796        &mut self,
797        prov: ProvenanceExtra,
798        access_range: AllocRange,
799        global: &GlobalState,
800        alloc_id: AllocId, // diagnostics
801        span: Span,        // diagnostics
802    ) -> InterpResult<'tcx> {
803        self.perform_access(
804            prov,
805            Some((access_range, AccessKind::Write, diagnostics::AccessCause::Dealloc)),
806            global,
807            alloc_id,
808            span,
809        )?;
810
811        // Check if this breaks any strong protector.
812        // (Weak protectors are already handled by `perform_access`.)
813        for (loc_range, loc) in self.locations.iter_mut(access_range.start, access_range.size) {
814            // The order in which we check if any nodes are invalidated only
815            // matters to diagnostics, so we use the root as a default tag.
816            let start_tag = match prov {
817                ProvenanceExtra::Concrete(tag) => tag,
818                ProvenanceExtra::Wildcard => self.nodes.get(self.root).unwrap().tag,
819            };
820            TreeVisitor { nodes: &mut self.nodes, tag_mapping: &self.tag_mapping, loc }
821                .traverse_this_parents_children_other(
822                    start_tag,
823                    // Visit all children, skipping none.
824                    |_| ContinueTraversal::Recurse,
825                    |args: NodeAppArgs<'_>| -> Result<(), TransitionError> {
826                        let node = args.nodes.get(args.idx).unwrap();
827                        let perm = args.loc.perms.entry(args.idx);
828
829                        let perm =
830                            perm.get().copied().unwrap_or_else(|| node.default_location_state());
831                        if global.borrow().protected_tags.get(&node.tag)
832                            == Some(&ProtectorKind::StrongProtector)
833                            // Don't check for protector if it is a Cell (see `unsafe_cell_deallocate` in `interior_mutability.rs`).
834                            // Related to https://github.com/rust-lang/rust/issues/55005.
835                            && !perm.permission.is_cell()
836                            // Only trigger UB if the accessed bit is set, i.e. if the protector is actually protecting this offset. See #4579.
837                            && perm.accessed
838                        {
839                            Err(TransitionError::ProtectedDealloc)
840                        } else {
841                            Ok(())
842                        }
843                    },
844                    |args: ErrHandlerArgs<'_, TransitionError>| -> InterpErrorKind<'tcx> {
845                        let ErrHandlerArgs { error_kind, conflicting_info, accessed_info } = args;
846                        TbError {
847                            conflicting_info,
848                            access_cause: diagnostics::AccessCause::Dealloc,
849                            alloc_id,
850                            error_offset: loc_range.start,
851                            error_kind,
852                            accessed_info: match prov {
853                                ProvenanceExtra::Concrete(_) => Some(accessed_info),
854                                // `accessed_info` contains the info of `start_tag`.
855                                // On a wildcard access this is not the info of the accessed tag
856                                // (as we don't know the accessed tag).
857                                ProvenanceExtra::Wildcard => None,
858                            },
859                        }
860                        .build()
861                    },
862                )?;
863        }
864        interp_ok(())
865    }
866
867    /// Map the per-node and per-location `LocationState::perform_access`
868    /// to each location of the first component of `access_range_and_kind`,
869    /// on every tag of the allocation.
870    ///
871    /// If `access_range_and_kind` is `None`, this is interpreted as the special
872    /// access that is applied on protector release:
873    /// - the access will be applied only to accessed locations of the allocation,
874    /// - it will not be visible to children,
875    /// - it will be recorded as a `FnExit` diagnostic access
876    /// - and it will be a read except if the location is `Unique`, i.e. has been written to,
877    ///   in which case it will be a write.
878    ///
879    /// `LocationState::perform_access` will take care of raising transition
880    /// errors and updating the `accessed` status of each location,
881    /// this traversal adds to that:
882    /// - inserting into the map locations that do not exist yet,
883    /// - trimming the traversal,
884    /// - recording the history.
885    pub fn perform_access(
886        &mut self,
887        prov: ProvenanceExtra,
888        access_range_and_kind: Option<(AllocRange, AccessKind, diagnostics::AccessCause)>,
889        global: &GlobalState,
890        alloc_id: AllocId, // diagnostics
891        span: Span,        // diagnostics
892    ) -> InterpResult<'tcx> {
893        let ProvenanceExtra::Concrete(tag) = prov else {
894            return self.perform_wildcard_access(access_range_and_kind, global, alloc_id, span);
895        };
896        use std::ops::Range;
897        // Performs the per-node work:
898        // - insert the permission if it does not exist
899        // - perform the access
900        // - record the transition
901        // to which some optimizations are added:
902        // - skip the traversal of the children in some cases
903        // - do not record noop transitions
904        //
905        // `perms_range` is only for diagnostics (it is the range of
906        // the `RangeMap` on which we are currently working).
907        let node_skipper = |access_kind: AccessKind, args: &NodeAppArgs<'_>| -> ContinueTraversal {
908            let node = args.nodes.get(args.idx).unwrap();
909            let perm = args.loc.perms.get(args.idx);
910
911            let old_state = perm.copied().unwrap_or_else(|| node.default_location_state());
912            old_state.skip_if_known_noop(access_kind, args.rel_pos)
913        };
914        let node_app = |perms_range: Range<u64>,
915                        access_kind: AccessKind,
916                        access_cause: diagnostics::AccessCause,
917                        args: NodeAppArgs<'_>|
918         -> Result<(), TransitionError> {
919            let node = args.nodes.get_mut(args.idx).unwrap();
920            let mut perm = args.loc.perms.entry(args.idx);
921
922            let state = perm.or_insert(node.default_location_state());
923
924            let protected = global.borrow().protected_tags.contains_key(&node.tag);
925            state.perform_transition(
926                args.idx,
927                args.nodes,
928                &mut args.loc.wildcard_accesses,
929                access_kind,
930                access_cause,
931                /* access_range */ access_range_and_kind.map(|x| x.0),
932                args.rel_pos,
933                span,
934                perms_range,
935                protected,
936            )
937        };
938
939        // Error handler in case `node_app` goes wrong.
940        // Wraps the faulty transition in more context for diagnostics.
941        let err_handler = |perms_range: Range<u64>,
942                           access_cause: diagnostics::AccessCause,
943                           args: ErrHandlerArgs<'_, TransitionError>|
944         -> InterpErrorKind<'tcx> {
945            let ErrHandlerArgs { error_kind, conflicting_info, accessed_info } = args;
946            TbError {
947                conflicting_info,
948                access_cause,
949                alloc_id,
950                error_offset: perms_range.start,
951                error_kind,
952                accessed_info: Some(accessed_info),
953            }
954            .build()
955        };
956
957        if let Some((access_range, access_kind, access_cause)) = access_range_and_kind {
958            // Default branch: this is a "normal" access through a known range.
959            // We iterate over affected locations and traverse the tree for each of them.
960            for (loc_range, loc) in self.locations.iter_mut(access_range.start, access_range.size) {
961                TreeVisitor { nodes: &mut self.nodes, tag_mapping: &self.tag_mapping, loc }
962                    .traverse_this_parents_children_other(
963                        tag,
964                        |args| node_skipper(access_kind, args),
965                        |args| node_app(loc_range.clone(), access_kind, access_cause, args),
966                        |args| err_handler(loc_range.clone(), access_cause, args),
967                    )?;
968            }
969        } else {
970            // This is a special access through the entire allocation.
971            // It actually only affects `accessed` locations, so we need
972            // to filter on those before initiating the traversal.
973            //
974            // In addition this implicit access should not be visible to children,
975            // thus the use of `traverse_nonchildren`.
976            // See the test case `returned_mut_is_usable` from
977            // `tests/pass/tree_borrows/tree-borrows.rs` for an example of
978            // why this is important.
979            for (loc_range, loc) in self.locations.iter_mut_all() {
980                let idx = self.tag_mapping.get(&tag).unwrap();
981                // Only visit accessed permissions
982                if let Some(p) = loc.perms.get(idx)
983                    && let Some(access_kind) = p.permission.protector_end_access()
984                    && p.accessed
985                {
986                    let access_cause = diagnostics::AccessCause::FnExit(access_kind);
987                    TreeVisitor { nodes: &mut self.nodes, tag_mapping: &self.tag_mapping, loc }
988                        .traverse_nonchildren(
989                            tag,
990                            |args| node_skipper(access_kind, args),
991                            |args| node_app(loc_range.clone(), access_kind, access_cause, args),
992                            |args| err_handler(loc_range.clone(), access_cause, args),
993                        )?;
994                }
995            }
996        }
997        interp_ok(())
998    }
999}
1000
1001/// Integration with the BorTag garbage collector
1002impl Tree {
1003    pub fn remove_unreachable_tags(&mut self, live_tags: &FxHashSet<BorTag>) {
1004        self.remove_useless_children(self.root, live_tags);
1005        // Right after the GC runs is a good moment to check if we can
1006        // merge some adjacent ranges that were made equal by the removal of some
1007        // tags (this does not necessarily mean that they have identical internal representations,
1008        // see the `PartialEq` impl for `UniValMap`)
1009        self.locations.merge_adjacent_thorough();
1010    }
1011
1012    /// Checks if a node is useless and should be GC'ed.
1013    /// A node is useless if it has no children and also the tag is no longer live.
1014    fn is_useless(&self, idx: UniIndex, live: &FxHashSet<BorTag>) -> bool {
1015        let node = self.nodes.get(idx).unwrap();
1016        node.children.is_empty() && !live.contains(&node.tag)
1017    }
1018
1019    /// Checks whether a node can be replaced by its only child.
1020    /// If so, returns the index of said only child.
1021    /// If not, returns none.
1022    fn can_be_replaced_by_single_child(
1023        &self,
1024        idx: UniIndex,
1025        live: &FxHashSet<BorTag>,
1026    ) -> Option<UniIndex> {
1027        let node = self.nodes.get(idx).unwrap();
1028
1029        let [child_idx] = node.children[..] else { return None };
1030
1031        // We never want to replace the root node, as it is also kept in `root_ptr_tags`.
1032        if live.contains(&node.tag) || node.parent.is_none() {
1033            return None;
1034        }
1035        // Since protected nodes are never GC'd (see `borrow_tracker::FrameExtra::visit_provenance`),
1036        // we know that `node` is not protected because otherwise `live` would
1037        // have contained `node.tag`.
1038        let child = self.nodes.get(child_idx).unwrap();
1039        // Check that for that one child, `can_be_replaced_by_child` holds for the permission
1040        // on all locations.
1041        for (_range, loc) in self.locations.iter_all() {
1042            let parent_perm = loc
1043                .perms
1044                .get(idx)
1045                .map(|x| x.permission)
1046                .unwrap_or_else(|| node.default_initial_perm);
1047            let child_perm = loc
1048                .perms
1049                .get(child_idx)
1050                .map(|x| x.permission)
1051                .unwrap_or_else(|| child.default_initial_perm);
1052            if !parent_perm.can_be_replaced_by_child(child_perm) {
1053                return None;
1054            }
1055        }
1056
1057        Some(child_idx)
1058    }
1059
1060    /// Properly removes a node.
1061    /// The node to be removed should not otherwise be usable. It also
1062    /// should have no children, but this is not checked, so that nodes
1063    /// whose children were rotated somewhere else can be deleted without
1064    /// having to first modify them to clear that array.
1065    fn remove_useless_node(&mut self, this: UniIndex) {
1066        // Due to the API of UniMap we must make sure to call
1067        // `UniValMap::remove` for the key of this node on *all* maps that used it
1068        // (which are `self.nodes` and every range of `self.rperms`)
1069        // before we can safely apply `UniKeyMap::remove` to truly remove
1070        // this tag from the `tag_mapping`.
1071        let node = self.nodes.remove(this).unwrap();
1072        for (_range, loc) in self.locations.iter_mut_all() {
1073            loc.perms.remove(this);
1074            loc.wildcard_accesses.remove(this);
1075        }
1076        self.tag_mapping.remove(&node.tag);
1077    }
1078
1079    /// Traverses the entire tree looking for useless tags.
1080    /// Removes from the tree all useless child nodes of root.
1081    /// It will not delete the root itself.
1082    ///
1083    /// NOTE: This leaves in the middle of the tree tags that are unreachable but have
1084    /// reachable children. There is a potential for compacting the tree by reassigning
1085    /// children of dead tags to the nearest live parent, but it must be done with care
1086    /// not to remove UB.
1087    ///
1088    /// Example: Consider the tree `root - parent - child`, with `parent: Frozen` and
1089    /// `child: Reserved`. This tree can exist. If we blindly delete `parent` and reassign
1090    /// `child` to be a direct child of `root` then Writes to `child` are now permitted
1091    /// whereas they were not when `parent` was still there.
1092    fn remove_useless_children(&mut self, root: UniIndex, live: &FxHashSet<BorTag>) {
1093        // To avoid stack overflows, we roll our own stack.
1094        // Each element in the stack consists of the current tag, and the number of the
1095        // next child to be processed.
1096
1097        // The other functions are written using the `TreeVisitorStack`, but that does not work here
1098        // since we need to 1) do a post-traversal and 2) remove nodes from the tree.
1099        // Since we do a post-traversal (by deleting nodes only after handling all children),
1100        // we also need to be a bit smarter than "pop node, push all children."
1101        let mut stack = vec![(root, 0)];
1102        while let Some((tag, nth_child)) = stack.last_mut() {
1103            let node = self.nodes.get(*tag).unwrap();
1104            if *nth_child < node.children.len() {
1105                // Visit the child by pushing it to the stack.
1106                // Also increase `nth_child` so that when we come back to the `tag` node, we
1107                // look at the next child.
1108                let next_child = node.children[*nth_child];
1109                *nth_child += 1;
1110                stack.push((next_child, 0));
1111                continue;
1112            } else {
1113                // We have processed all children of `node`, so now it is time to process `node` itself.
1114                // First, get the current children of `node`. To appease the borrow checker,
1115                // we have to temporarily move the list out of the node, and then put the
1116                // list of remaining children back in.
1117                let mut children_of_node =
1118                    mem::take(&mut self.nodes.get_mut(*tag).unwrap().children);
1119                // Remove all useless children.
1120                children_of_node.retain_mut(|idx| {
1121                    if self.is_useless(*idx, live) {
1122                        // Delete `idx` node everywhere else.
1123                        self.remove_useless_node(*idx);
1124                        // And delete it from children_of_node.
1125                        false
1126                    } else {
1127                        if let Some(nextchild) = self.can_be_replaced_by_single_child(*idx, live) {
1128                            // `nextchild` is our grandchild, and will become our direct child.
1129                            // Delete the in-between node, `idx`.
1130                            self.remove_useless_node(*idx);
1131                            // Set the new child's parent.
1132                            self.nodes.get_mut(nextchild).unwrap().parent = Some(*tag);
1133                            // Save the new child in children_of_node.
1134                            *idx = nextchild;
1135                        }
1136                        // retain it
1137                        true
1138                    }
1139                });
1140                // Put back the now-filtered vector.
1141                self.nodes.get_mut(*tag).unwrap().children = children_of_node;
1142
1143                // We are done, the parent can continue.
1144                stack.pop();
1145                continue;
1146            }
1147        }
1148    }
1149}
1150
1151/// Methods for wildcard accesses.
1152impl<'tcx> Tree {
1153    /// Analogous to `perform_access`, but we do not know from which exposed
1154    /// reference the access happens.
1155    pub fn perform_wildcard_access(
1156        &mut self,
1157        access_range_and_kind: Option<(AllocRange, AccessKind, diagnostics::AccessCause)>,
1158        global: &GlobalState,
1159        alloc_id: AllocId, // diagnostics
1160        span: Span,        // diagnostics
1161    ) -> InterpResult<'tcx> {
1162        #[cfg(feature = "expensive-consistency-checks")]
1163        self.verify_wildcard_consistency(global);
1164
1165        if let Some((access_range, access_kind, access_cause)) = access_range_and_kind {
1166            // This does a traversal starting from the root through the tree updating
1167            // the permissions of each node.
1168            // The difference to `perform_access` is that we take the access
1169            // relatedness from the wildcard tracking state of the node instead of
1170            // from the visitor itself.
1171            for (loc_range, loc) in self.locations.iter_mut(access_range.start, access_range.size) {
1172                let root_tag = self.nodes.get(self.root).unwrap().tag;
1173                TreeVisitor { loc, nodes: &mut self.nodes, tag_mapping: &self.tag_mapping }
1174                    .traverse_this_parents_children_other(
1175                        root_tag,
1176                        |args: &NodeAppArgs<'_>| -> ContinueTraversal {
1177                            let node = args.nodes.get(args.idx).unwrap();
1178                            let perm = args.loc.perms.get(args.idx);
1179                            let wildcard_state = args
1180                                .loc
1181                                .wildcard_accesses
1182                                .get(args.idx)
1183                                .cloned()
1184                                .unwrap_or_default();
1185
1186                            let old_state =
1187                                perm.copied().unwrap_or_else(|| node.default_location_state());
1188                            // If we know where, relative to this node, the wildcard access occurs,
1189                            // then check if we can skip the entire subtree.
1190                            if let Some(relatedness) =
1191                                wildcard_state.access_relatedness(access_kind)
1192                                && let Some(relatedness) = relatedness.to_relatedness()
1193                            {
1194                                // We can use the usual SIFA machinery to skip nodes.
1195                                old_state.skip_if_known_noop(access_kind, relatedness)
1196                            } else {
1197                                ContinueTraversal::Recurse
1198                            }
1199                        },
1200                        |args| {
1201                            let node = args.nodes.get_mut(args.idx).unwrap();
1202                            let mut entry = args.loc.perms.entry(args.idx);
1203                            let perm = entry.or_insert(node.default_location_state());
1204
1205                            let protected = global.borrow().protected_tags.contains_key(&node.tag);
1206
1207                            let Some(wildcard_relatedness) = args
1208                                .loc
1209                                .wildcard_accesses
1210                                .get(args.idx)
1211                                .and_then(|s| s.access_relatedness(access_kind))
1212                            else {
1213                                // There doesn't exist a valid exposed reference for this access to
1214                                // happen through.
1215                                // If this fails for one id, then it fails for all ids so this.
1216                                // Since we always check the root first, this means it should always
1217                                // fail on the root.
1218                                assert_eq!(self.root, args.idx);
1219                                return Err(no_valid_exposed_references_error(
1220                                    alloc_id,
1221                                    loc_range.start,
1222                                    access_cause,
1223                                ));
1224                            };
1225
1226                            let Some(relatedness) = wildcard_relatedness.to_relatedness() else {
1227                                // If the access type is Either, then we do not apply any transition
1228                                // to this node, but we still update each of its children.
1229                                // This is an imprecision! In the future, maybe we can still do some sort
1230                                // of best-effort update here.
1231                                return Ok(());
1232                            };
1233                            // We know the exact relatedness, so we can actually do precise checks.
1234                            perm.perform_transition(
1235                                args.idx,
1236                                args.nodes,
1237                                &mut args.loc.wildcard_accesses,
1238                                access_kind,
1239                                access_cause,
1240                                Some(access_range),
1241                                relatedness,
1242                                span,
1243                                loc_range.clone(),
1244                                protected,
1245                            )
1246                            .map_err(|trans| {
1247                                let node = args.nodes.get(args.idx).unwrap();
1248                                TbError {
1249                                    conflicting_info: &node.debug_info,
1250                                    access_cause,
1251                                    alloc_id,
1252                                    error_offset: loc_range.start,
1253                                    error_kind: trans,
1254                                    accessed_info: None,
1255                                }
1256                                .build()
1257                            })
1258                        },
1259                        |err| err.error_kind,
1260                    )?;
1261            }
1262        } else {
1263            // This is for the special access when a protector gets released.
1264            // Wildcard pointers are never protected, so this is unreachable.
1265            unreachable!()
1266        };
1267        interp_ok(())
1268    }
1269}
1270
1271impl Node {
1272    pub fn default_location_state(&self) -> LocationState {
1273        LocationState::new_non_accessed(
1274            self.default_initial_perm,
1275            self.default_initial_idempotent_foreign_access,
1276        )
1277    }
1278}
1279
1280impl VisitProvenance for Tree {
1281    fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
1282        // To ensure that the root never gets removed, we visit it
1283        // (the `root` node of `Tree` is not an `Option<_>`)
1284        visit(None, Some(self.nodes.get(self.root).unwrap().tag));
1285
1286        // We also need to keep around any exposed tags through which
1287        // an access could still happen.
1288        for (_id, node) in self.nodes.iter() {
1289            if node.is_exposed {
1290                visit(None, Some(node.tag))
1291            }
1292        }
1293    }
1294}
1295
1296/// Relative position of the access
1297#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1298pub enum AccessRelatedness {
1299    /// The access happened either through the node itself or one of
1300    /// its transitive children.
1301    LocalAccess,
1302    /// The access happened through this nodes ancestor or through
1303    /// a sibling/cousin/uncle/etc.
1304    ForeignAccess,
1305}
1306
1307impl AccessRelatedness {
1308    /// Check that access is either Ancestor or Distant, i.e. not
1309    /// a transitive child (initial pointer included).
1310    pub fn is_foreign(self) -> bool {
1311        matches!(self, AccessRelatedness::ForeignAccess)
1312    }
1313}