rustc_mir_dataflow/impls/
initialized.rs

1use std::assert_matches::assert_matches;
2
3use rustc_abi::VariantIdx;
4use rustc_index::Idx;
5use rustc_index::bit_set::{DenseBitSet, MixedBitSet};
6use rustc_middle::bug;
7use rustc_middle::mir::{
8    self, Body, CallReturnPlaces, Location, SwitchTargetValue, TerminatorEdges,
9};
10use rustc_middle::ty::util::Discr;
11use rustc_middle::ty::{self, TyCtxt};
12use smallvec::SmallVec;
13use tracing::{debug, instrument};
14
15use crate::drop_flag_effects::{DropFlagState, InactiveVariants};
16use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
17use crate::{
18    Analysis, GenKill, MaybeReachable, drop_flag_effects, drop_flag_effects_for_function_entry,
19    drop_flag_effects_for_location, on_all_children_bits, on_lookup_result_bits,
20};
21
22// Used by both `MaybeInitializedPlaces` and `MaybeUninitializedPlaces`.
23pub struct MaybePlacesSwitchIntData<'tcx> {
24    enum_place: mir::Place<'tcx>,
25    discriminants: Vec<(VariantIdx, Discr<'tcx>)>,
26    index: usize,
27}
28
29impl<'tcx> MaybePlacesSwitchIntData<'tcx> {
30    /// Creates a `SmallVec` mapping each target in `targets` to its `VariantIdx`.
31    fn variants(&mut self, targets: &mir::SwitchTargets) -> SmallVec<[VariantIdx; 4]> {
32        self.index = 0;
33        targets.all_values().iter().map(|value| self.next_discr(value.get())).collect()
34    }
35
36    // The discriminant order in the `SwitchInt` targets should match the order yielded by
37    // `AdtDef::discriminants`. We rely on this to match each discriminant in the targets to its
38    // corresponding variant in linear time.
39    fn next_discr(&mut self, value: u128) -> VariantIdx {
40        // An out-of-bounds abort will occur if the discriminant ordering isn't as described above.
41        loop {
42            let (variant, discr) = self.discriminants[self.index];
43            self.index += 1;
44            if discr.val == value {
45                return variant;
46            }
47        }
48    }
49}
50
51impl<'tcx> MaybePlacesSwitchIntData<'tcx> {
52    fn new(
53        tcx: TyCtxt<'tcx>,
54        body: &Body<'tcx>,
55        block: mir::BasicBlock,
56        discr: &mir::Operand<'tcx>,
57    ) -> Option<Self> {
58        let Some(discr) = discr.place() else { return None };
59
60        // Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt`
61        // is an enum discriminant.
62        //
63        // We expect such blocks to have a call to `discriminant` as their last statement like so:
64        // ```text
65        // ...
66        // _42 = discriminant(_1)
67        // SwitchInt(_42, ..)
68        // ```
69        // If the basic block matches this pattern, this function gathers the place corresponding
70        // to the enum (`_1` in the example above) as well as the discriminants.
71        let block_data = &body[block];
72        for statement in block_data.statements.iter().rev() {
73            match statement.kind {
74                mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(enum_place)))
75                    if lhs == discr =>
76                {
77                    match enum_place.ty(body, tcx).ty.kind() {
78                        ty::Adt(enum_def, _) => {
79                            return Some(MaybePlacesSwitchIntData {
80                                enum_place,
81                                discriminants: enum_def.discriminants(tcx).collect(),
82                                index: 0,
83                            });
84                        }
85
86                        // `Rvalue::Discriminant` is also used to get the active yield point for a
87                        // coroutine, but we do not need edge-specific effects in that case. This
88                        // may change in the future.
89                        ty::Coroutine(..) => break,
90
91                        t => bug!("`discriminant` called on unexpected type {:?}", t),
92                    }
93                }
94                mir::StatementKind::Coverage(_) => continue,
95                _ => break,
96            }
97        }
98        None
99    }
100}
101
102/// `MaybeInitializedPlaces` tracks all places that might be
103/// initialized upon reaching a particular point in the control flow
104/// for a function.
105///
106/// For example, in code like the following, we have corresponding
107/// dataflow information shown in the right-hand comments.
108///
109/// ```rust
110/// struct S;
111/// fn foo(pred: bool) {                        // maybe-init:
112///                                             // {}
113///     let a = S; let mut b = S; let c; let d; // {a, b}
114///
115///     if pred {
116///         drop(a);                            // {   b}
117///         b = S;                              // {   b}
118///
119///     } else {
120///         drop(b);                            // {a}
121///         d = S;                              // {a,       d}
122///
123///     }                                       // {a, b,    d}
124///
125///     c = S;                                  // {a, b, c, d}
126/// }
127/// ```
128///
129/// To determine whether a place is *definitely* initialized at a
130/// particular control-flow point, one can take the set-complement
131/// of the data from `MaybeUninitializedPlaces` at the corresponding
132/// control-flow point.
133///
134/// Similarly, at a given `drop` statement, the set-intersection
135/// between this data and `MaybeUninitializedPlaces` yields the set of
136/// places that would require a dynamic drop-flag at that statement.
137pub struct MaybeInitializedPlaces<'a, 'tcx> {
138    tcx: TyCtxt<'tcx>,
139    body: &'a Body<'tcx>,
140    move_data: &'a MoveData<'tcx>,
141    exclude_inactive_in_otherwise: bool,
142    skip_unreachable_unwind: bool,
143}
144
145impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
146    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, move_data: &'a MoveData<'tcx>) -> Self {
147        MaybeInitializedPlaces {
148            tcx,
149            body,
150            move_data,
151            exclude_inactive_in_otherwise: false,
152            skip_unreachable_unwind: false,
153        }
154    }
155
156    /// Ensures definitely inactive variants are excluded from the set of initialized places for
157    /// blocks reached through an `otherwise` edge.
158    pub fn exclude_inactive_in_otherwise(mut self) -> Self {
159        self.exclude_inactive_in_otherwise = true;
160        self
161    }
162
163    pub fn skipping_unreachable_unwind(mut self) -> Self {
164        self.skip_unreachable_unwind = true;
165        self
166    }
167
168    pub fn is_unwind_dead(
169        &self,
170        place: mir::Place<'tcx>,
171        state: &<Self as Analysis<'tcx>>::Domain,
172    ) -> bool {
173        if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) {
174            let mut maybe_live = false;
175            on_all_children_bits(self.move_data(), path, |child| {
176                maybe_live |= state.contains(child);
177            });
178            !maybe_live
179        } else {
180            false
181        }
182    }
183}
184
185impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
186    fn move_data(&self) -> &MoveData<'tcx> {
187        self.move_data
188    }
189}
190
191/// `MaybeUninitializedPlaces` tracks all places that might be
192/// uninitialized upon reaching a particular point in the control flow
193/// for a function.
194///
195/// For example, in code like the following, we have corresponding
196/// dataflow information shown in the right-hand comments.
197///
198/// ```rust
199/// struct S;
200/// fn foo(pred: bool) {                        // maybe-uninit:
201///                                             // {a, b, c, d}
202///     let a = S; let mut b = S; let c; let d; // {      c, d}
203///
204///     if pred {
205///         drop(a);                            // {a,    c, d}
206///         b = S;                              // {a,    c, d}
207///
208///     } else {
209///         drop(b);                            // {   b, c, d}
210///         d = S;                              // {   b, c   }
211///
212///     }                                       // {a, b, c, d}
213///
214///     c = S;                                  // {a, b,    d}
215/// }
216/// ```
217///
218/// To determine whether a place is *definitely* uninitialized at a
219/// particular control-flow point, one can take the set-complement
220/// of the data from `MaybeInitializedPlaces` at the corresponding
221/// control-flow point.
222///
223/// Similarly, at a given `drop` statement, the set-intersection
224/// between this data and `MaybeInitializedPlaces` yields the set of
225/// places that would require a dynamic drop-flag at that statement.
226pub struct MaybeUninitializedPlaces<'a, 'tcx> {
227    tcx: TyCtxt<'tcx>,
228    body: &'a Body<'tcx>,
229    move_data: &'a MoveData<'tcx>,
230
231    mark_inactive_variants_as_uninit: bool,
232    include_inactive_in_otherwise: bool,
233    skip_unreachable_unwind: DenseBitSet<mir::BasicBlock>,
234}
235
236impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
237    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, move_data: &'a MoveData<'tcx>) -> Self {
238        MaybeUninitializedPlaces {
239            tcx,
240            body,
241            move_data,
242            mark_inactive_variants_as_uninit: false,
243            include_inactive_in_otherwise: false,
244            skip_unreachable_unwind: DenseBitSet::new_empty(body.basic_blocks.len()),
245        }
246    }
247
248    /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
249    /// enum discriminant.
250    ///
251    /// This is correct in a vacuum but is not the default because it causes problems in the borrow
252    /// checker, where this information gets propagated along `FakeEdge`s.
253    pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
254        self.mark_inactive_variants_as_uninit = true;
255        self
256    }
257
258    /// Ensures definitely inactive variants are included in the set of uninitialized places for
259    /// blocks reached through an `otherwise` edge.
260    pub fn include_inactive_in_otherwise(mut self) -> Self {
261        self.include_inactive_in_otherwise = true;
262        self
263    }
264
265    pub fn skipping_unreachable_unwind(
266        mut self,
267        unreachable_unwind: DenseBitSet<mir::BasicBlock>,
268    ) -> Self {
269        self.skip_unreachable_unwind = unreachable_unwind;
270        self
271    }
272}
273
274impl<'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
275    fn move_data(&self) -> &MoveData<'tcx> {
276        self.move_data
277    }
278}
279
280/// `EverInitializedPlaces` tracks all places that might have ever been
281/// initialized upon reaching a particular point in the control flow
282/// for a function, without an intervening `StorageDead`.
283///
284/// This dataflow is used to determine if an immutable local variable may
285/// be assigned to.
286///
287/// For example, in code like the following, we have corresponding
288/// dataflow information shown in the right-hand comments.
289///
290/// ```rust
291/// struct S;
292/// fn foo(pred: bool) {                        // ever-init:
293///                                             // {          }
294///     let a = S; let mut b = S; let c; let d; // {a, b      }
295///
296///     if pred {
297///         drop(a);                            // {a, b,     }
298///         b = S;                              // {a, b,     }
299///
300///     } else {
301///         drop(b);                            // {a, b,      }
302///         d = S;                              // {a, b,    d }
303///
304///     }                                       // {a, b,    d }
305///
306///     c = S;                                  // {a, b, c, d }
307/// }
308/// ```
309pub struct EverInitializedPlaces<'a, 'tcx> {
310    body: &'a Body<'tcx>,
311    move_data: &'a MoveData<'tcx>,
312}
313
314impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
315    pub fn new(body: &'a Body<'tcx>, move_data: &'a MoveData<'tcx>) -> Self {
316        EverInitializedPlaces { body, move_data }
317    }
318}
319
320impl<'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'_, 'tcx> {
321    fn move_data(&self) -> &MoveData<'tcx> {
322        self.move_data
323    }
324}
325
326impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
327    fn update_bits(
328        state: &mut <Self as Analysis<'tcx>>::Domain,
329        path: MovePathIndex,
330        dfstate: DropFlagState,
331    ) {
332        match dfstate {
333            DropFlagState::Absent => state.kill(path),
334            DropFlagState::Present => state.gen_(path),
335        }
336    }
337}
338
339impl<'tcx> MaybeUninitializedPlaces<'_, 'tcx> {
340    fn update_bits(
341        state: &mut <Self as Analysis<'tcx>>::Domain,
342        path: MovePathIndex,
343        dfstate: DropFlagState,
344    ) {
345        match dfstate {
346            DropFlagState::Absent => state.gen_(path),
347            DropFlagState::Present => state.kill(path),
348        }
349    }
350}
351
352impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
353    /// There can be many more `MovePathIndex` than there are locals in a MIR body.
354    /// We use a mixed bitset to avoid paying too high a memory footprint.
355    type Domain = MaybeReachable<MixedBitSet<MovePathIndex>>;
356
357    type SwitchIntData = MaybePlacesSwitchIntData<'tcx>;
358
359    const NAME: &'static str = "maybe_init";
360
361    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
362        // bottom = uninitialized
363        MaybeReachable::Unreachable
364    }
365
366    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
367        *state =
368            MaybeReachable::Reachable(MixedBitSet::new_empty(self.move_data().move_paths.len()));
369        drop_flag_effects_for_function_entry(self.body, self.move_data, |path, s| {
370            assert!(s == DropFlagState::Present);
371            state.gen_(path);
372        });
373    }
374
375    fn apply_primary_statement_effect(
376        &mut self,
377        state: &mut Self::Domain,
378        statement: &mir::Statement<'tcx>,
379        location: Location,
380    ) {
381        drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| {
382            Self::update_bits(state, path, s)
383        });
384
385        // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
386        if self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration
387            && let Some((_, rvalue)) = statement.kind.as_assign()
388            && let mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
389                // FIXME: Does `&raw const foo` allow mutation? See #90413.
390                | mir::Rvalue::RawPtr(_, place) = rvalue
391            && let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref())
392        {
393            on_all_children_bits(self.move_data(), mpi, |child| {
394                state.gen_(child);
395            })
396        }
397    }
398
399    fn apply_primary_terminator_effect<'mir>(
400        &mut self,
401        state: &mut Self::Domain,
402        terminator: &'mir mir::Terminator<'tcx>,
403        location: Location,
404    ) -> TerminatorEdges<'mir, 'tcx> {
405        // Note: `edges` must be computed first because `drop_flag_effects_for_location` can change
406        // the result of `is_unwind_dead`.
407        let mut edges = terminator.edges();
408        if self.skip_unreachable_unwind
409            && let mir::TerminatorKind::Drop {
410                target,
411                unwind,
412                place,
413                replace: _,
414                drop: _,
415                async_fut: _,
416            } = terminator.kind
417            && matches!(unwind, mir::UnwindAction::Cleanup(_))
418            && self.is_unwind_dead(place, state)
419        {
420            edges = TerminatorEdges::Single(target);
421        }
422        drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| {
423            Self::update_bits(state, path, s)
424        });
425        edges
426    }
427
428    fn apply_call_return_effect(
429        &mut self,
430        state: &mut Self::Domain,
431        _block: mir::BasicBlock,
432        return_places: CallReturnPlaces<'_, 'tcx>,
433    ) {
434        return_places.for_each(|place| {
435            // when a call returns successfully, that means we need to set
436            // the bits for that dest_place to 1 (initialized).
437            on_lookup_result_bits(
438                self.move_data(),
439                self.move_data().rev_lookup.find(place.as_ref()),
440                |mpi| {
441                    state.gen_(mpi);
442                },
443            );
444        });
445    }
446
447    fn get_switch_int_data(
448        &mut self,
449        block: mir::BasicBlock,
450        discr: &mir::Operand<'tcx>,
451    ) -> Option<Self::SwitchIntData> {
452        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
453            return None;
454        }
455
456        MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr)
457    }
458
459    fn apply_switch_int_edge_effect(
460        &mut self,
461        data: &mut Self::SwitchIntData,
462        state: &mut Self::Domain,
463        value: SwitchTargetValue,
464        targets: &mir::SwitchTargets,
465    ) {
466        let inactive_variants = match value {
467            SwitchTargetValue::Normal(value) => InactiveVariants::Active(data.next_discr(value)),
468            SwitchTargetValue::Otherwise if self.exclude_inactive_in_otherwise => {
469                InactiveVariants::Inactives(data.variants(targets))
470            }
471            _ => return,
472        };
473
474        // Kill all move paths that correspond to variants we know to be inactive along this
475        // particular outgoing edge of a `SwitchInt`.
476        drop_flag_effects::on_all_inactive_variants(
477            self.move_data,
478            data.enum_place,
479            &inactive_variants,
480            |mpi| state.kill(mpi),
481        );
482    }
483}
484
485/// There can be many more `MovePathIndex` than there are locals in a MIR body.
486/// We use a mixed bitset to avoid paying too high a memory footprint.
487pub type MaybeUninitializedPlacesDomain = MixedBitSet<MovePathIndex>;
488
489impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
490    type Domain = MaybeUninitializedPlacesDomain;
491
492    type SwitchIntData = MaybePlacesSwitchIntData<'tcx>;
493
494    const NAME: &'static str = "maybe_uninit";
495
496    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
497        // bottom = initialized (`initialize_start_block` overwrites this on first entry)
498        MixedBitSet::new_empty(self.move_data().move_paths.len())
499    }
500
501    // sets state bits for Arg places
502    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
503        // set all bits to 1 (uninit) before gathering counter-evidence
504        state.insert_all();
505
506        drop_flag_effects_for_function_entry(self.body, self.move_data, |path, s| {
507            assert!(s == DropFlagState::Present);
508            state.remove(path);
509        });
510    }
511
512    fn apply_primary_statement_effect(
513        &mut self,
514        state: &mut Self::Domain,
515        _statement: &mir::Statement<'tcx>,
516        location: Location,
517    ) {
518        drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| {
519            Self::update_bits(state, path, s)
520        });
521
522        // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
523        // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
524    }
525
526    fn apply_primary_terminator_effect<'mir>(
527        &mut self,
528        state: &mut Self::Domain,
529        terminator: &'mir mir::Terminator<'tcx>,
530        location: Location,
531    ) -> TerminatorEdges<'mir, 'tcx> {
532        drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| {
533            Self::update_bits(state, path, s)
534        });
535        if self.skip_unreachable_unwind.contains(location.block) {
536            let mir::TerminatorKind::Drop { target, unwind, .. } = terminator.kind else { bug!() };
537            assert_matches!(unwind, mir::UnwindAction::Cleanup(_));
538            TerminatorEdges::Single(target)
539        } else {
540            terminator.edges()
541        }
542    }
543
544    fn apply_call_return_effect(
545        &mut self,
546        state: &mut Self::Domain,
547        _block: mir::BasicBlock,
548        return_places: CallReturnPlaces<'_, 'tcx>,
549    ) {
550        return_places.for_each(|place| {
551            // when a call returns successfully, that means we need to set
552            // the bits for that dest_place to 0 (initialized).
553            on_lookup_result_bits(
554                self.move_data(),
555                self.move_data().rev_lookup.find(place.as_ref()),
556                |mpi| {
557                    state.kill(mpi);
558                },
559            );
560        });
561    }
562
563    fn get_switch_int_data(
564        &mut self,
565        block: mir::BasicBlock,
566        discr: &mir::Operand<'tcx>,
567    ) -> Option<Self::SwitchIntData> {
568        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
569            return None;
570        }
571
572        if !self.mark_inactive_variants_as_uninit {
573            return None;
574        }
575
576        MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr)
577    }
578
579    fn apply_switch_int_edge_effect(
580        &mut self,
581        data: &mut Self::SwitchIntData,
582        state: &mut Self::Domain,
583        value: SwitchTargetValue,
584        targets: &mir::SwitchTargets,
585    ) {
586        let inactive_variants = match value {
587            SwitchTargetValue::Normal(value) => InactiveVariants::Active(data.next_discr(value)),
588            SwitchTargetValue::Otherwise if self.include_inactive_in_otherwise => {
589                InactiveVariants::Inactives(data.variants(targets))
590            }
591            _ => return,
592        };
593
594        // Mark all move paths that correspond to variants other than this one as maybe
595        // uninitialized (in reality, they are *definitely* uninitialized).
596        drop_flag_effects::on_all_inactive_variants(
597            self.move_data,
598            data.enum_place,
599            &inactive_variants,
600            |mpi| state.gen_(mpi),
601        );
602    }
603}
604
605/// There can be many more `InitIndex` than there are locals in a MIR body.
606/// We use a mixed bitset to avoid paying too high a memory footprint.
607pub type EverInitializedPlacesDomain = MixedBitSet<InitIndex>;
608
609impl<'tcx> Analysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
610    type Domain = EverInitializedPlacesDomain;
611
612    const NAME: &'static str = "ever_init";
613
614    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
615        // bottom = no initialized variables by default
616        MixedBitSet::new_empty(self.move_data().inits.len())
617    }
618
619    fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
620        for arg_init in 0..body.arg_count {
621            state.insert(InitIndex::new(arg_init));
622        }
623    }
624
625    #[instrument(skip(self, state), level = "debug")]
626    fn apply_primary_statement_effect(
627        &mut self,
628        state: &mut Self::Domain,
629        stmt: &mir::Statement<'tcx>,
630        location: Location,
631    ) {
632        let move_data = self.move_data();
633        let init_path_map = &move_data.init_path_map;
634        let init_loc_map = &move_data.init_loc_map;
635        let rev_lookup = &move_data.rev_lookup;
636
637        debug!("initializes move_indexes {:?}", init_loc_map[location]);
638        state.gen_all(init_loc_map[location].iter().copied());
639
640        if let mir::StatementKind::StorageDead(local) = stmt.kind {
641            // End inits for StorageDead, so that an immutable variable can
642            // be reinitialized on the next iteration of the loop.
643            if let Some(move_path_index) = rev_lookup.find_local(local) {
644                debug!(
645                    "clears the ever initialized status of {:?}",
646                    init_path_map[move_path_index]
647                );
648                state.kill_all(init_path_map[move_path_index].iter().copied());
649            }
650        }
651    }
652
653    #[instrument(skip(self, state, terminator), level = "debug")]
654    fn apply_primary_terminator_effect<'mir>(
655        &mut self,
656        state: &mut Self::Domain,
657        terminator: &'mir mir::Terminator<'tcx>,
658        location: Location,
659    ) -> TerminatorEdges<'mir, 'tcx> {
660        let (body, move_data) = (self.body, self.move_data());
661        let term = body[location.block].terminator();
662        let init_loc_map = &move_data.init_loc_map;
663        debug!(?term);
664        debug!("initializes move_indexes {:?}", init_loc_map[location]);
665        state.gen_all(
666            init_loc_map[location]
667                .iter()
668                .filter(|init_index| {
669                    move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
670                })
671                .copied(),
672        );
673        terminator.edges()
674    }
675
676    fn apply_call_return_effect(
677        &mut self,
678        state: &mut Self::Domain,
679        block: mir::BasicBlock,
680        _return_places: CallReturnPlaces<'_, 'tcx>,
681    ) {
682        let move_data = self.move_data();
683        let init_loc_map = &move_data.init_loc_map;
684
685        let call_loc = self.body.terminator_loc(block);
686        for init_index in &init_loc_map[call_loc] {
687            state.gen_(*init_index);
688        }
689    }
690}