rustc_mir_transform/
elaborate_drop.rs

1use std::{fmt, iter, mem};
2
3use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
4use rustc_hir::def::DefKind;
5use rustc_hir::lang_items::LangItem;
6use rustc_index::Idx;
7use rustc_middle::mir::*;
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::util::IntTypeExt;
10use rustc_middle::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt};
11use rustc_middle::{bug, span_bug, traits};
12use rustc_span::DUMMY_SP;
13use rustc_span::source_map::{Spanned, dummy_spanned};
14use tracing::{debug, instrument};
15
16use crate::patch::MirPatch;
17
18/// Describes how/if a value should be dropped.
19#[derive(Debug)]
20pub(crate) enum DropStyle {
21    /// The value is already dead at the drop location, no drop will be executed.
22    Dead,
23
24    /// The value is known to always be initialized at the drop location, drop will always be
25    /// executed.
26    Static,
27
28    /// Whether the value needs to be dropped depends on its drop flag.
29    Conditional,
30
31    /// An "open" drop is one where only the fields of a value are dropped.
32    ///
33    /// For example, this happens when moving out of a struct field: The rest of the struct will be
34    /// dropped in such an "open" drop. It is also used to generate drop glue for the individual
35    /// components of a value, for example for dropping array elements.
36    Open,
37}
38
39/// Which drop flags to affect/check with an operation.
40#[derive(Debug)]
41pub(crate) enum DropFlagMode {
42    /// Only affect the top-level drop flag, not that of any contained fields.
43    Shallow,
44    /// Affect all nested drop flags in addition to the top-level one.
45    Deep,
46}
47
48/// Describes if unwinding is necessary and where to unwind to if a panic occurs.
49#[derive(Copy, Clone, Debug)]
50pub(crate) enum Unwind {
51    /// Unwind to this block.
52    To(BasicBlock),
53    /// Already in an unwind path, any panic will cause an abort.
54    InCleanup,
55}
56
57impl Unwind {
58    fn is_cleanup(self) -> bool {
59        match self {
60            Unwind::To(..) => false,
61            Unwind::InCleanup => true,
62        }
63    }
64
65    fn into_action(self) -> UnwindAction {
66        match self {
67            Unwind::To(bb) => UnwindAction::Cleanup(bb),
68            Unwind::InCleanup => UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
69        }
70    }
71
72    fn map<F>(self, f: F) -> Self
73    where
74        F: FnOnce(BasicBlock) -> BasicBlock,
75    {
76        match self {
77            Unwind::To(bb) => Unwind::To(f(bb)),
78            Unwind::InCleanup => Unwind::InCleanup,
79        }
80    }
81}
82
83pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
84    /// The type representing paths that can be moved out of.
85    ///
86    /// Users can move out of individual fields of a struct, such as `a.b.c`. This type is used to
87    /// represent such move paths. Sometimes tracking individual move paths is not necessary, in
88    /// which case this may be set to (for example) `()`.
89    type Path: Copy + fmt::Debug;
90
91    // Accessors
92
93    fn patch_ref(&self) -> &MirPatch<'tcx>;
94    fn patch(&mut self) -> &mut MirPatch<'tcx>;
95    fn body(&self) -> &'a Body<'tcx>;
96    fn tcx(&self) -> TyCtxt<'tcx>;
97    fn typing_env(&self) -> ty::TypingEnv<'tcx>;
98    fn allow_async_drops(&self) -> bool;
99
100    fn terminator_loc(&self, bb: BasicBlock) -> Location;
101
102    // Drop logic
103
104    /// Returns how `path` should be dropped, given `mode`.
105    fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
106
107    /// Returns the drop flag of `path` as a MIR `Operand` (or `None` if `path` has no drop flag).
108    fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
109
110    /// Modifies the MIR patch so that the drop flag of `path` (if any) is cleared at `location`.
111    ///
112    /// If `mode` is deep, drop flags of all child paths should also be cleared by inserting
113    /// additional statements.
114    fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
115
116    // Subpaths
117
118    /// Returns the subpath of a field of `path` (or `None` if there is no dedicated subpath).
119    ///
120    /// If this returns `None`, `field` will not get a dedicated drop flag.
121    fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path>;
122
123    /// Returns the subpath of a dereference of `path` (or `None` if there is no dedicated subpath).
124    ///
125    /// If this returns `None`, `*path` will not get a dedicated drop flag.
126    ///
127    /// This is only relevant for `Box<T>`, where the contained `T` can be moved out of the box.
128    fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
129
130    /// Returns the subpath of downcasting `path` to one of its variants.
131    ///
132    /// If this returns `None`, the downcast of `path` will not get a dedicated drop flag.
133    fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
134
135    /// Returns the subpath of indexing a fixed-size array `path`.
136    ///
137    /// If this returns `None`, elements of `path` will not get a dedicated drop flag.
138    ///
139    /// This is only relevant for array patterns, which can move out of individual array elements.
140    fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
141}
142
143#[derive(Debug)]
144struct DropCtxt<'a, 'b, 'tcx, D>
145where
146    D: DropElaborator<'b, 'tcx>,
147{
148    elaborator: &'a mut D,
149
150    source_info: SourceInfo,
151
152    place: Place<'tcx>,
153    path: D::Path,
154    succ: BasicBlock,
155    unwind: Unwind,
156    dropline: Option<BasicBlock>,
157}
158
159/// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it.
160///
161/// The passed `elaborator` is used to determine what should happen at the drop terminator. It
162/// decides whether the drop can be statically determined or whether it needs a dynamic drop flag,
163/// and whether the drop is "open", ie. should be expanded to drop all subfields of the dropped
164/// value.
165///
166/// When this returns, the MIR patch in the `elaborator` contains the necessary changes.
167pub(crate) fn elaborate_drop<'b, 'tcx, D>(
168    elaborator: &mut D,
169    source_info: SourceInfo,
170    place: Place<'tcx>,
171    path: D::Path,
172    succ: BasicBlock,
173    unwind: Unwind,
174    bb: BasicBlock,
175    dropline: Option<BasicBlock>,
176) where
177    D: DropElaborator<'b, 'tcx>,
178    'tcx: 'b,
179{
180    DropCtxt { elaborator, source_info, place, path, succ, unwind, dropline }.elaborate_drop(bb)
181}
182
183impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
184where
185    D: DropElaborator<'b, 'tcx>,
186    'tcx: 'b,
187{
188    #[instrument(level = "trace", skip(self), ret)]
189    fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
190        if place.local < self.elaborator.body().local_decls.next_index() {
191            place.ty(self.elaborator.body(), self.tcx()).ty
192        } else {
193            // We don't have a slice with all the locals, since some are in the patch.
194            PlaceTy::from_ty(self.elaborator.patch_ref().local_ty(place.local))
195                .multi_projection_ty(self.elaborator.tcx(), place.projection)
196                .ty
197        }
198    }
199
200    fn tcx(&self) -> TyCtxt<'tcx> {
201        self.elaborator.tcx()
202    }
203
204    // Generates three blocks:
205    // * #1:pin_obj_bb:   call Pin<ObjTy>::new_unchecked(&mut obj)
206    // * #2:call_drop_bb: fut = call obj.<AsyncDrop::drop>() OR call async_drop_in_place<T>(obj)
207    // * #3:drop_term_bb: drop (obj, fut, ...)
208    // We keep async drop unexpanded to poll-loop here, to expand it later, at StateTransform -
209    //   into states expand.
210    // call_destructor_only - to call only AsyncDrop::drop, not full async_drop_in_place glue
211    fn build_async_drop(
212        &mut self,
213        place: Place<'tcx>,
214        drop_ty: Ty<'tcx>,
215        bb: Option<BasicBlock>,
216        succ: BasicBlock,
217        unwind: Unwind,
218        dropline: Option<BasicBlock>,
219        call_destructor_only: bool,
220    ) -> BasicBlock {
221        let tcx = self.tcx();
222        let span = self.source_info.span;
223
224        let pin_obj_bb = bb.unwrap_or_else(|| {
225            self.elaborator.patch().new_block(BasicBlockData::new(
226                Some(Terminator {
227                    // Temporary terminator, will be replaced by patch
228                    source_info: self.source_info,
229                    kind: TerminatorKind::Return,
230                }),
231                false,
232            ))
233        });
234
235        let (fut_ty, drop_fn_def_id, trait_args) = if call_destructor_only {
236            // Resolving obj.<AsyncDrop::drop>()
237            let trait_ref =
238                ty::TraitRef::new(tcx, tcx.require_lang_item(LangItem::AsyncDrop, span), [drop_ty]);
239            let (drop_trait, trait_args) = match tcx.codegen_select_candidate(
240                ty::TypingEnv::fully_monomorphized().as_query_input(trait_ref),
241            ) {
242                Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData {
243                    impl_def_id,
244                    args,
245                    ..
246                })) => (*impl_def_id, *args),
247                impl_source => {
248                    span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source);
249                }
250            };
251            // impl_item_refs may be empty if drop fn is not implemented in 'impl AsyncDrop for ...'
252            // (#140974).
253            // Such code will report error, so just generate sync drop here and return
254            let Some(drop_fn_def_id) = tcx
255                .associated_item_def_ids(drop_trait)
256                .first()
257                .and_then(|def_id| {
258                    if tcx.def_kind(def_id) == DefKind::AssocFn
259                        && tcx.check_args_compatible(*def_id, trait_args)
260                    {
261                        Some(def_id)
262                    } else {
263                        None
264                    }
265                })
266                .copied()
267            else {
268                tcx.dcx().span_delayed_bug(
269                    self.elaborator.body().span,
270                    "AsyncDrop type without correct `async fn drop(...)`.",
271                );
272                self.elaborator.patch().patch_terminator(
273                    pin_obj_bb,
274                    TerminatorKind::Drop {
275                        place,
276                        target: succ,
277                        unwind: unwind.into_action(),
278                        replace: false,
279                        drop: None,
280                        async_fut: None,
281                    },
282                );
283                return pin_obj_bb;
284            };
285            let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args);
286            let sig = drop_fn.fn_sig(tcx);
287            let sig = tcx.instantiate_bound_regions_with_erased(sig);
288            (sig.output(), drop_fn_def_id, trait_args)
289        } else {
290            // Resolving async_drop_in_place<T> function for drop_ty
291            let drop_fn_def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, span);
292            let trait_args = tcx.mk_args(&[drop_ty.into()]);
293            let sig = tcx.fn_sig(drop_fn_def_id).instantiate(tcx, trait_args);
294            let sig = tcx.instantiate_bound_regions_with_erased(sig);
295            (sig.output(), drop_fn_def_id, trait_args)
296        };
297
298        let fut = Place::from(self.new_temp(fut_ty));
299
300        // #1:pin_obj_bb >>> obj_ref = &mut obj
301        let obj_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, drop_ty);
302        let obj_ref_place = Place::from(self.new_temp(obj_ref_ty));
303
304        let term_loc = self.elaborator.terminator_loc(pin_obj_bb);
305        self.elaborator.patch().add_assign(
306            term_loc,
307            obj_ref_place,
308            Rvalue::Ref(
309                tcx.lifetimes.re_erased,
310                BorrowKind::Mut { kind: MutBorrowKind::Default },
311                place,
312            ),
313        );
314
315        // pin_obj_place preparation
316        let pin_obj_new_unchecked_fn = Ty::new_fn_def(
317            tcx,
318            tcx.require_lang_item(LangItem::PinNewUnchecked, span),
319            [GenericArg::from(obj_ref_ty)],
320        );
321        let pin_obj_ty = pin_obj_new_unchecked_fn.fn_sig(tcx).output().no_bound_vars().unwrap();
322        let pin_obj_place = Place::from(self.new_temp(pin_obj_ty));
323        let pin_obj_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand {
324            span,
325            user_ty: None,
326            const_: Const::zero_sized(pin_obj_new_unchecked_fn),
327        }));
328
329        // #3:drop_term_bb
330        let drop_term_bb = self.new_block(
331            unwind,
332            TerminatorKind::Drop {
333                place,
334                target: succ,
335                unwind: unwind.into_action(),
336                replace: false,
337                drop: dropline,
338                async_fut: Some(fut.local),
339            },
340        );
341
342        // #2:call_drop_bb
343        let mut call_statements = Vec::new();
344        let drop_arg = if call_destructor_only {
345            pin_obj_place
346        } else {
347            let ty::Adt(adt_def, adt_args) = pin_obj_ty.kind() else {
348                bug!();
349            };
350            let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty);
351            let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args);
352            let obj_ref_place = Place::from(self.new_temp(unwrap_ty));
353            call_statements.push(self.assign(
354                obj_ref_place,
355                Rvalue::Use(Operand::Copy(tcx.mk_place_field(
356                    pin_obj_place,
357                    FieldIdx::ZERO,
358                    unwrap_ty,
359                ))),
360            ));
361
362            let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty));
363
364            let addr = Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_deref(obj_ref_place));
365            call_statements.push(self.assign(obj_ptr_place, addr));
366            obj_ptr_place
367        };
368        call_statements
369            .push(Statement::new(self.source_info, StatementKind::StorageLive(fut.local)));
370
371        let call_drop_bb = self.new_block_with_statements(
372            unwind,
373            call_statements,
374            TerminatorKind::Call {
375                func: Operand::function_handle(tcx, drop_fn_def_id, trait_args, span),
376                args: [Spanned { node: Operand::Move(drop_arg), span: DUMMY_SP }].into(),
377                destination: fut,
378                target: Some(drop_term_bb),
379                unwind: unwind.into_action(),
380                call_source: CallSource::Misc,
381                fn_span: self.source_info.span,
382            },
383        );
384
385        // StorageDead(fut) in self.succ block (at the begin)
386        self.elaborator.patch().add_statement(
387            Location { block: self.succ, statement_index: 0 },
388            StatementKind::StorageDead(fut.local),
389        );
390        // StorageDead(fut) in unwind block (at the begin)
391        if let Unwind::To(block) = unwind {
392            self.elaborator.patch().add_statement(
393                Location { block, statement_index: 0 },
394                StatementKind::StorageDead(fut.local),
395            );
396        }
397        // StorageDead(fut) in dropline block (at the begin)
398        if let Some(block) = dropline {
399            self.elaborator.patch().add_statement(
400                Location { block, statement_index: 0 },
401                StatementKind::StorageDead(fut.local),
402            );
403        }
404
405        // #1:pin_obj_bb >>> call Pin<ObjTy>::new_unchecked(&mut obj)
406        self.elaborator.patch().patch_terminator(
407            pin_obj_bb,
408            TerminatorKind::Call {
409                func: pin_obj_new_unchecked_fn,
410                args: [dummy_spanned(Operand::Move(obj_ref_place))].into(),
411                destination: pin_obj_place,
412                target: Some(call_drop_bb),
413                unwind: unwind.into_action(),
414                call_source: CallSource::Misc,
415                fn_span: span,
416            },
417        );
418        pin_obj_bb
419    }
420
421    fn build_drop(&mut self, bb: BasicBlock) {
422        let drop_ty = self.place_ty(self.place);
423        if self.tcx().features().async_drop()
424            && self.elaborator.body().coroutine.is_some()
425            && self.elaborator.allow_async_drops()
426            && !self.elaborator.patch_ref().block(self.elaborator.body(), bb).is_cleanup
427            && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
428        {
429            self.build_async_drop(
430                self.place,
431                drop_ty,
432                Some(bb),
433                self.succ,
434                self.unwind,
435                self.dropline,
436                false,
437            );
438        } else {
439            self.elaborator.patch().patch_terminator(
440                bb,
441                TerminatorKind::Drop {
442                    place: self.place,
443                    target: self.succ,
444                    unwind: self.unwind.into_action(),
445                    replace: false,
446                    drop: None,
447                    async_fut: None,
448                },
449            );
450        }
451    }
452
453    /// This elaborates a single drop instruction, located at `bb`, and
454    /// patches over it.
455    ///
456    /// The elaborated drop checks the drop flags to only drop what
457    /// is initialized.
458    ///
459    /// In addition, the relevant drop flags also need to be cleared
460    /// to avoid double-drops. However, in the middle of a complex
461    /// drop, one must avoid clearing some of the flags before they
462    /// are read, as that would cause a memory leak.
463    ///
464    /// In particular, when dropping an ADT, multiple fields may be
465    /// joined together under the `rest` subpath. They are all controlled
466    /// by the primary drop flag, but only the last rest-field dropped
467    /// should clear it (and it must also not clear anything else).
468    //
469    // FIXME: I think we should just control the flags externally,
470    // and then we do not need this machinery.
471    #[instrument(level = "debug")]
472    fn elaborate_drop(&mut self, bb: BasicBlock) {
473        match self.elaborator.drop_style(self.path, DropFlagMode::Deep) {
474            DropStyle::Dead => {
475                self.elaborator
476                    .patch()
477                    .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
478            }
479            DropStyle::Static => {
480                self.build_drop(bb);
481            }
482            DropStyle::Conditional => {
483                let drop_bb = self.complete_drop(self.succ, self.unwind);
484                self.elaborator
485                    .patch()
486                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
487            }
488            DropStyle::Open => {
489                let drop_bb = self.open_drop();
490                self.elaborator
491                    .patch()
492                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
493            }
494        }
495    }
496
497    /// Returns the place and move path for each field of `variant`,
498    /// (the move path is `None` if the field is a rest field).
499    fn move_paths_for_fields(
500        &self,
501        base_place: Place<'tcx>,
502        variant_path: D::Path,
503        variant: &'tcx ty::VariantDef,
504        args: GenericArgsRef<'tcx>,
505    ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
506        variant
507            .fields
508            .iter_enumerated()
509            .map(|(field_idx, field)| {
510                let subpath = self.elaborator.field_subpath(variant_path, field_idx);
511                let tcx = self.tcx();
512
513                assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
514                let field_ty = field.ty(tcx, args);
515                // We silently leave an unnormalized type here to support polymorphic drop
516                // elaboration for users of rustc internal APIs
517                let field_ty = tcx
518                    .try_normalize_erasing_regions(self.elaborator.typing_env(), field_ty)
519                    .unwrap_or(field_ty);
520
521                (tcx.mk_place_field(base_place, field_idx, field_ty), subpath)
522            })
523            .collect()
524    }
525
526    fn drop_subpath(
527        &mut self,
528        place: Place<'tcx>,
529        path: Option<D::Path>,
530        succ: BasicBlock,
531        unwind: Unwind,
532        dropline: Option<BasicBlock>,
533    ) -> BasicBlock {
534        if let Some(path) = path {
535            debug!("drop_subpath: for std field {:?}", place);
536
537            DropCtxt {
538                elaborator: self.elaborator,
539                source_info: self.source_info,
540                path,
541                place,
542                succ,
543                unwind,
544                dropline,
545            }
546            .elaborated_drop_block()
547        } else {
548            debug!("drop_subpath: for rest field {:?}", place);
549
550            DropCtxt {
551                elaborator: self.elaborator,
552                source_info: self.source_info,
553                place,
554                succ,
555                unwind,
556                dropline,
557                // Using `self.path` here to condition the drop on
558                // our own drop flag.
559                path: self.path,
560            }
561            .complete_drop(succ, unwind)
562        }
563    }
564
565    /// Creates one-half of the drop ladder for a list of fields, and return
566    /// the list of steps in it in reverse order, with the first step
567    /// dropping 0 fields and so on.
568    ///
569    /// `unwind_ladder` is such a list of steps in reverse order,
570    /// which is called if the matching step of the drop glue panics.
571    ///
572    /// `dropline_ladder` is a similar list of steps in reverse order,
573    /// which is called if the matching step of the drop glue will contain async drop
574    /// (expanded later to Yield) and the containing coroutine will be dropped at this point.
575    fn drop_halfladder(
576        &mut self,
577        unwind_ladder: &[Unwind],
578        dropline_ladder: &[Option<BasicBlock>],
579        mut succ: BasicBlock,
580        fields: &[(Place<'tcx>, Option<D::Path>)],
581    ) -> Vec<BasicBlock> {
582        iter::once(succ)
583            .chain(itertools::izip!(fields.iter().rev(), unwind_ladder, dropline_ladder).map(
584                |(&(place, path), &unwind_succ, &dropline_to)| {
585                    succ = self.drop_subpath(place, path, succ, unwind_succ, dropline_to);
586                    succ
587                },
588            ))
589            .collect()
590    }
591
592    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind, Option<BasicBlock>) {
593        // Clear the "master" drop flag at the end. This is needed
594        // because the "master" drop protects the ADT's discriminant,
595        // which is invalidated after the ADT is dropped.
596        (
597            self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind),
598            self.unwind,
599            self.dropline,
600        )
601    }
602
603    /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
604    ///
605    /// For example, with 3 fields, the drop ladder is
606    ///
607    /// ```text
608    /// .d0:
609    ///     ELAB(drop location.0 [target=.d1, unwind=.c1])
610    /// .d1:
611    ///     ELAB(drop location.1 [target=.d2, unwind=.c2])
612    /// .d2:
613    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
614    /// .c1:
615    ///     ELAB(drop location.1 [target=.c2])
616    /// .c2:
617    ///     ELAB(drop location.2 [target=`self.unwind`])
618    /// ```
619    ///
620    /// For possible-async drops in coroutines we also need dropline ladder
621    /// ```text
622    /// .d0 (mainline):
623    ///     ELAB(drop location.0 [target=.d1, unwind=.c1, drop=.e1])
624    /// .d1 (mainline):
625    ///     ELAB(drop location.1 [target=.d2, unwind=.c2, drop=.e2])
626    /// .d2 (mainline):
627    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`, drop=`self.drop`])
628    /// .c1 (unwind):
629    ///     ELAB(drop location.1 [target=.c2])
630    /// .c2 (unwind):
631    ///     ELAB(drop location.2 [target=`self.unwind`])
632    /// .e1 (dropline):
633    ///     ELAB(drop location.1 [target=.e2, unwind=.c2])
634    /// .e2 (dropline):
635    ///     ELAB(drop location.2 [target=`self.drop`, unwind=`self.unwind`])
636    /// ```
637    ///
638    /// NOTE: this does not clear the master drop flag, so you need
639    /// to point succ/unwind on a `drop_ladder_bottom`.
640    fn drop_ladder(
641        &mut self,
642        fields: Vec<(Place<'tcx>, Option<D::Path>)>,
643        succ: BasicBlock,
644        unwind: Unwind,
645        dropline: Option<BasicBlock>,
646    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
647        debug!("drop_ladder({:?}, {:?})", self, fields);
648        assert!(
649            if unwind.is_cleanup() { dropline.is_none() } else { true },
650            "Dropline is set for cleanup drop ladder"
651        );
652
653        let mut fields = fields;
654        fields.retain(|&(place, _)| {
655            self.place_ty(place).needs_drop(self.tcx(), self.elaborator.typing_env())
656        });
657
658        debug!("drop_ladder - fields needing drop: {:?}", fields);
659
660        let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
661        let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
662        let unwind_ladder: Vec<_> = if let Unwind::To(succ) = unwind {
663            let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
664            halfladder.into_iter().map(Unwind::To).collect()
665        } else {
666            unwind_ladder
667        };
668        let dropline_ladder: Vec<_> = if let Some(succ) = dropline {
669            let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
670            halfladder.into_iter().map(Some).collect()
671        } else {
672            dropline_ladder
673        };
674
675        let normal_ladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
676
677        (
678            *normal_ladder.last().unwrap(),
679            *unwind_ladder.last().unwrap(),
680            *dropline_ladder.last().unwrap(),
681        )
682    }
683
684    fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
685        debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
686
687        let fields = tys
688            .iter()
689            .enumerate()
690            .map(|(i, &ty)| {
691                (
692                    self.tcx().mk_place_field(self.place, FieldIdx::new(i), ty),
693                    self.elaborator.field_subpath(self.path, FieldIdx::new(i)),
694                )
695            })
696            .collect();
697
698        let (succ, unwind, dropline) = self.drop_ladder_bottom();
699        self.drop_ladder(fields, succ, unwind, dropline).0
700    }
701
702    /// Drops the T contained in a `Box<T>` if it has not been moved out of
703    #[instrument(level = "debug", ret)]
704    fn open_drop_for_box_contents(
705        &mut self,
706        adt: ty::AdtDef<'tcx>,
707        args: GenericArgsRef<'tcx>,
708        succ: BasicBlock,
709        unwind: Unwind,
710        dropline: Option<BasicBlock>,
711    ) -> BasicBlock {
712        // drop glue is sent straight to codegen
713        // box cannot be directly dereferenced
714        let unique_ty = adt.non_enum_variant().fields[FieldIdx::ZERO].ty(self.tcx(), args);
715        let unique_variant = unique_ty.ty_adt_def().unwrap().non_enum_variant();
716        let nonnull_ty = unique_variant.fields[FieldIdx::ZERO].ty(self.tcx(), args);
717        let ptr_ty = Ty::new_imm_ptr(self.tcx(), args[0].expect_ty());
718
719        let unique_place = self.tcx().mk_place_field(self.place, FieldIdx::ZERO, unique_ty);
720        let nonnull_place = self.tcx().mk_place_field(unique_place, FieldIdx::ZERO, nonnull_ty);
721
722        let ptr_local = self.new_temp(ptr_ty);
723
724        let interior = self.tcx().mk_place_deref(Place::from(ptr_local));
725        let interior_path = self.elaborator.deref_subpath(self.path);
726
727        let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind, dropline);
728
729        let setup_bbd = BasicBlockData::new_stmts(
730            vec![self.assign(
731                Place::from(ptr_local),
732                Rvalue::Cast(CastKind::Transmute, Operand::Copy(nonnull_place), ptr_ty),
733            )],
734            Some(Terminator {
735                kind: TerminatorKind::Goto { target: do_drop_bb },
736                source_info: self.source_info,
737            }),
738            unwind.is_cleanup(),
739        );
740        self.elaborator.patch().new_block(setup_bbd)
741    }
742
743    #[instrument(level = "debug", ret)]
744    fn open_drop_for_adt(
745        &mut self,
746        adt: ty::AdtDef<'tcx>,
747        args: GenericArgsRef<'tcx>,
748    ) -> BasicBlock {
749        if adt.variants().is_empty() {
750            return self.elaborator.patch().new_block(BasicBlockData::new(
751                Some(Terminator {
752                    source_info: self.source_info,
753                    kind: TerminatorKind::Unreachable,
754                }),
755                self.unwind.is_cleanup(),
756            ));
757        }
758
759        let skip_contents = adt.is_union() || adt.is_manually_drop();
760        let contents_drop = if skip_contents {
761            if adt.has_dtor(self.tcx()) && self.elaborator.get_drop_flag(self.path).is_some() {
762                // the top-level drop flag is usually cleared by open_drop_for_adt_contents
763                // types with destructors would still need an empty drop ladder to clear it
764
765                // however, these types are only open dropped in `DropShimElaborator`
766                // which does not have drop flags
767                // a future box-like "DerefMove" trait would allow for this case to happen
768                span_bug!(self.source_info.span, "open dropping partially moved union");
769            }
770
771            (self.succ, self.unwind, self.dropline)
772        } else {
773            self.open_drop_for_adt_contents(adt, args)
774        };
775
776        if adt.has_dtor(self.tcx()) {
777            let destructor_block = if adt.is_box() {
778                // we need to drop the inside of the box before running the destructor
779                let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
780                let unwind = contents_drop
781                    .1
782                    .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
783                let dropline = contents_drop
784                    .2
785                    .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
786                self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
787            } else {
788                self.destructor_call_block(contents_drop)
789            };
790
791            self.drop_flag_test_block(destructor_block, contents_drop.0, contents_drop.1)
792        } else {
793            contents_drop.0
794        }
795    }
796
797    fn open_drop_for_adt_contents(
798        &mut self,
799        adt: ty::AdtDef<'tcx>,
800        args: GenericArgsRef<'tcx>,
801    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
802        let (succ, unwind, dropline) = self.drop_ladder_bottom();
803        if !adt.is_enum() {
804            let fields =
805                self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
806            self.drop_ladder(fields, succ, unwind, dropline)
807        } else {
808            self.open_drop_for_multivariant(adt, args, succ, unwind, dropline)
809        }
810    }
811
812    fn open_drop_for_multivariant(
813        &mut self,
814        adt: ty::AdtDef<'tcx>,
815        args: GenericArgsRef<'tcx>,
816        succ: BasicBlock,
817        unwind: Unwind,
818        dropline: Option<BasicBlock>,
819    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
820        let mut values = Vec::with_capacity(adt.variants().len());
821        let mut normal_blocks = Vec::with_capacity(adt.variants().len());
822        let mut unwind_blocks =
823            if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
824        let mut dropline_blocks =
825            if dropline.is_none() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
826
827        let mut have_otherwise_with_drop_glue = false;
828        let mut have_otherwise = false;
829        let tcx = self.tcx();
830
831        for (variant_index, discr) in adt.discriminants(tcx) {
832            let variant = &adt.variant(variant_index);
833            let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
834
835            if let Some(variant_path) = subpath {
836                let base_place = tcx.mk_place_elem(
837                    self.place,
838                    ProjectionElem::Downcast(Some(variant.name), variant_index),
839                );
840                let fields = self.move_paths_for_fields(base_place, variant_path, variant, args);
841                values.push(discr.val);
842                if let Unwind::To(unwind) = unwind {
843                    // We can't use the half-ladder from the original
844                    // drop ladder, because this breaks the
845                    // "funclet can't have 2 successor funclets"
846                    // requirement from MSVC:
847                    //
848                    //           switch       unwind-switch
849                    //          /      \         /        \
850                    //         v1.0    v2.0  v2.0-unwind  v1.0-unwind
851                    //         |        |      /             |
852                    //    v1.1-unwind  v2.1-unwind           |
853                    //      ^                                |
854                    //       \-------------------------------/
855                    //
856                    // Create a duplicate half-ladder to avoid that. We
857                    // could technically only do this on MSVC, but I
858                    // I want to minimize the divergence between MSVC
859                    // and non-MSVC.
860
861                    let unwind_blocks = unwind_blocks.as_mut().unwrap();
862                    let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
863                    let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
864                    let halfladder =
865                        self.drop_halfladder(&unwind_ladder, &dropline_ladder, unwind, &fields);
866                    unwind_blocks.push(halfladder.last().cloned().unwrap());
867                }
868                let (normal, _, drop_bb) = self.drop_ladder(fields, succ, unwind, dropline);
869                normal_blocks.push(normal);
870                if dropline.is_some() {
871                    dropline_blocks.as_mut().unwrap().push(drop_bb.unwrap());
872                }
873            } else {
874                have_otherwise = true;
875
876                let typing_env = self.elaborator.typing_env();
877                let have_field_with_drop_glue = variant
878                    .fields
879                    .iter()
880                    .any(|field| field.ty(tcx, args).needs_drop(tcx, typing_env));
881                if have_field_with_drop_glue {
882                    have_otherwise_with_drop_glue = true;
883                }
884            }
885        }
886
887        if !have_otherwise {
888            values.pop();
889        } else if !have_otherwise_with_drop_glue {
890            normal_blocks.push(self.goto_block(succ, unwind));
891            if let Unwind::To(unwind) = unwind {
892                unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
893            }
894        } else {
895            normal_blocks.push(self.drop_block(succ, unwind));
896            if let Unwind::To(unwind) = unwind {
897                unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
898            }
899        }
900
901        (
902            self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
903            unwind.map(|unwind| {
904                self.adt_switch_block(
905                    adt,
906                    unwind_blocks.unwrap(),
907                    &values,
908                    unwind,
909                    Unwind::InCleanup,
910                )
911            }),
912            dropline.map(|dropline| {
913                self.adt_switch_block(adt, dropline_blocks.unwrap(), &values, dropline, unwind)
914            }),
915        )
916    }
917
918    fn adt_switch_block(
919        &mut self,
920        adt: ty::AdtDef<'tcx>,
921        blocks: Vec<BasicBlock>,
922        values: &[u128],
923        succ: BasicBlock,
924        unwind: Unwind,
925    ) -> BasicBlock {
926        // If there are multiple variants, then if something
927        // is present within the enum the discriminant, tracked
928        // by the rest path, must be initialized.
929        //
930        // Additionally, we do not want to switch on the
931        // discriminant after it is free-ed, because that
932        // way lies only trouble.
933        let discr_ty = adt.repr().discr_type().to_ty(self.tcx());
934        let discr = Place::from(self.new_temp(discr_ty));
935        let discr_rv = Rvalue::Discriminant(self.place);
936        let switch_block = BasicBlockData::new_stmts(
937            vec![self.assign(discr, discr_rv)],
938            Some(Terminator {
939                source_info: self.source_info,
940                kind: TerminatorKind::SwitchInt {
941                    discr: Operand::Move(discr),
942                    targets: SwitchTargets::new(
943                        values.iter().copied().zip(blocks.iter().copied()),
944                        *blocks.last().unwrap(),
945                    ),
946                },
947            }),
948            unwind.is_cleanup(),
949        );
950        let switch_block = self.elaborator.patch().new_block(switch_block);
951        self.drop_flag_test_block(switch_block, succ, unwind)
952    }
953
954    fn destructor_call_block_sync(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
955        debug!("destructor_call_block_sync({:?}, {:?})", self, succ);
956        let tcx = self.tcx();
957        let drop_trait = tcx.require_lang_item(LangItem::Drop, DUMMY_SP);
958        let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
959        let ty = self.place_ty(self.place);
960
961        let ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
962        let ref_place = self.new_temp(ref_ty);
963        let unit_temp = Place::from(self.new_temp(tcx.types.unit));
964
965        let result = BasicBlockData::new_stmts(
966            vec![self.assign(
967                Place::from(ref_place),
968                Rvalue::Ref(
969                    tcx.lifetimes.re_erased,
970                    BorrowKind::Mut { kind: MutBorrowKind::Default },
971                    self.place,
972                ),
973            )],
974            Some(Terminator {
975                kind: TerminatorKind::Call {
976                    func: Operand::function_handle(
977                        tcx,
978                        drop_fn,
979                        [ty.into()],
980                        self.source_info.span,
981                    ),
982                    args: [Spanned { node: Operand::Move(Place::from(ref_place)), span: DUMMY_SP }]
983                        .into(),
984                    destination: unit_temp,
985                    target: Some(succ),
986                    unwind: unwind.into_action(),
987                    call_source: CallSource::Misc,
988                    fn_span: self.source_info.span,
989                },
990                source_info: self.source_info,
991            }),
992            unwind.is_cleanup(),
993        );
994
995        self.elaborator.patch().new_block(result)
996    }
997
998    fn destructor_call_block(
999        &mut self,
1000        (succ, unwind, dropline): (BasicBlock, Unwind, Option<BasicBlock>),
1001    ) -> BasicBlock {
1002        debug!("destructor_call_block({:?}, {:?})", self, succ);
1003        let ty = self.place_ty(self.place);
1004        if self.tcx().features().async_drop()
1005            && self.elaborator.body().coroutine.is_some()
1006            && self.elaborator.allow_async_drops()
1007            && !unwind.is_cleanup()
1008            && ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
1009        {
1010            self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true)
1011        } else {
1012            self.destructor_call_block_sync((succ, unwind))
1013        }
1014    }
1015
1016    /// Create a loop that drops an array:
1017    ///
1018    /// ```text
1019    /// loop-block:
1020    ///    can_go = cur == len
1021    ///    if can_go then succ else drop-block
1022    /// drop-block:
1023    ///    ptr = &raw mut P[cur]
1024    ///    cur = cur + 1
1025    ///    drop(ptr)
1026    /// ```
1027    fn drop_loop(
1028        &mut self,
1029        succ: BasicBlock,
1030        cur: Local,
1031        len: Local,
1032        ety: Ty<'tcx>,
1033        unwind: Unwind,
1034        dropline: Option<BasicBlock>,
1035    ) -> BasicBlock {
1036        let copy = |place: Place<'tcx>| Operand::Copy(place);
1037        let move_ = |place: Place<'tcx>| Operand::Move(place);
1038        let tcx = self.tcx();
1039
1040        let ptr_ty = Ty::new_mut_ptr(tcx, ety);
1041        let ptr = Place::from(self.new_temp(ptr_ty));
1042        let can_go = Place::from(self.new_temp(tcx.types.bool));
1043        let one = self.constant_usize(1);
1044
1045        let drop_block = BasicBlockData::new_stmts(
1046            vec![
1047                self.assign(
1048                    ptr,
1049                    Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_index(self.place, cur)),
1050                ),
1051                self.assign(
1052                    cur.into(),
1053                    Rvalue::BinaryOp(BinOp::Add, Box::new((move_(cur.into()), one))),
1054                ),
1055            ],
1056            Some(Terminator {
1057                source_info: self.source_info,
1058                // this gets overwritten by drop elaboration.
1059                kind: TerminatorKind::Unreachable,
1060            }),
1061            unwind.is_cleanup(),
1062        );
1063        let drop_block = self.elaborator.patch().new_block(drop_block);
1064
1065        let loop_block = BasicBlockData::new_stmts(
1066            vec![self.assign(
1067                can_go,
1068                Rvalue::BinaryOp(BinOp::Eq, Box::new((copy(Place::from(cur)), copy(len.into())))),
1069            )],
1070            Some(Terminator {
1071                source_info: self.source_info,
1072                kind: TerminatorKind::if_(move_(can_go), succ, drop_block),
1073            }),
1074            unwind.is_cleanup(),
1075        );
1076        let loop_block = self.elaborator.patch().new_block(loop_block);
1077
1078        let place = tcx.mk_place_deref(ptr);
1079        if self.tcx().features().async_drop()
1080            && self.elaborator.body().coroutine.is_some()
1081            && self.elaborator.allow_async_drops()
1082            && !unwind.is_cleanup()
1083            && ety.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1084        {
1085            self.build_async_drop(
1086                place,
1087                ety,
1088                Some(drop_block),
1089                loop_block,
1090                unwind,
1091                dropline,
1092                false,
1093            );
1094        } else {
1095            self.elaborator.patch().patch_terminator(
1096                drop_block,
1097                TerminatorKind::Drop {
1098                    place,
1099                    target: loop_block,
1100                    unwind: unwind.into_action(),
1101                    replace: false,
1102                    drop: None,
1103                    async_fut: None,
1104                },
1105            );
1106        }
1107        loop_block
1108    }
1109
1110    fn open_drop_for_array(
1111        &mut self,
1112        array_ty: Ty<'tcx>,
1113        ety: Ty<'tcx>,
1114        opt_size: Option<u64>,
1115    ) -> BasicBlock {
1116        debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size);
1117        let tcx = self.tcx();
1118
1119        if let Some(size) = opt_size {
1120            enum ProjectionKind<Path> {
1121                Drop(std::ops::Range<u64>),
1122                Keep(u64, Path),
1123            }
1124            // Previously, we'd make a projection for every element in the array and create a drop
1125            // ladder if any `array_subpath` was `Some`, i.e. moving out with an array pattern.
1126            // This caused huge memory usage when generating the drops for large arrays, so we instead
1127            // record the *subslices* which are dropped and the *indexes* which are kept
1128            let mut drop_ranges = vec![];
1129            let mut dropping = true;
1130            let mut start = 0;
1131            for i in 0..size {
1132                let path = self.elaborator.array_subpath(self.path, i, size);
1133                if dropping && path.is_some() {
1134                    drop_ranges.push(ProjectionKind::Drop(start..i));
1135                    dropping = false;
1136                } else if !dropping && path.is_none() {
1137                    dropping = true;
1138                    start = i;
1139                }
1140                if let Some(path) = path {
1141                    drop_ranges.push(ProjectionKind::Keep(i, path));
1142                }
1143            }
1144            if !drop_ranges.is_empty() {
1145                if dropping {
1146                    drop_ranges.push(ProjectionKind::Drop(start..size));
1147                }
1148                let fields = drop_ranges
1149                    .iter()
1150                    .rev()
1151                    .map(|p| {
1152                        let (project, path) = match p {
1153                            ProjectionKind::Drop(r) => (
1154                                ProjectionElem::Subslice {
1155                                    from: r.start,
1156                                    to: r.end,
1157                                    from_end: false,
1158                                },
1159                                None,
1160                            ),
1161                            &ProjectionKind::Keep(offset, path) => (
1162                                ProjectionElem::ConstantIndex {
1163                                    offset,
1164                                    min_length: size,
1165                                    from_end: false,
1166                                },
1167                                Some(path),
1168                            ),
1169                        };
1170                        (tcx.mk_place_elem(self.place, project), path)
1171                    })
1172                    .collect::<Vec<_>>();
1173                let (succ, unwind, dropline) = self.drop_ladder_bottom();
1174                return self.drop_ladder(fields, succ, unwind, dropline).0;
1175            }
1176        }
1177
1178        let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty);
1179        let array_ptr = self.new_temp(array_ptr_ty);
1180
1181        let slice_ty = Ty::new_slice(tcx, ety);
1182        let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty);
1183        let slice_ptr = self.new_temp(slice_ptr_ty);
1184
1185        let mut delegate_block = BasicBlockData::new_stmts(
1186            vec![
1187                self.assign(Place::from(array_ptr), Rvalue::RawPtr(RawPtrKind::Mut, self.place)),
1188                self.assign(
1189                    Place::from(slice_ptr),
1190                    Rvalue::Cast(
1191                        CastKind::PointerCoercion(
1192                            PointerCoercion::Unsize,
1193                            CoercionSource::Implicit,
1194                        ),
1195                        Operand::Move(Place::from(array_ptr)),
1196                        slice_ptr_ty,
1197                    ),
1198                ),
1199            ],
1200            None,
1201            self.unwind.is_cleanup(),
1202        );
1203
1204        let array_place = mem::replace(
1205            &mut self.place,
1206            Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
1207        );
1208        let slice_block = self.drop_loop_trio_for_slice(ety);
1209        self.place = array_place;
1210
1211        delegate_block.terminator = Some(Terminator {
1212            source_info: self.source_info,
1213            kind: TerminatorKind::Goto { target: slice_block },
1214        });
1215        self.elaborator.patch().new_block(delegate_block)
1216    }
1217
1218    /// Creates a trio of drop-loops of `place`, which drops its contents, even
1219    /// in the case of 1 panic or in the case of coroutine drop
1220    fn drop_loop_trio_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
1221        debug!("drop_loop_trio_for_slice({:?})", ety);
1222        let tcx = self.tcx();
1223        let len = self.new_temp(tcx.types.usize);
1224        let cur = self.new_temp(tcx.types.usize);
1225
1226        let unwind = self
1227            .unwind
1228            .map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup, None));
1229
1230        let dropline =
1231            self.dropline.map(|dropline| self.drop_loop(dropline, cur, len, ety, unwind, None));
1232
1233        let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind, dropline);
1234
1235        let [PlaceElem::Deref] = self.place.projection.as_slice() else {
1236            span_bug!(
1237                self.source_info.span,
1238                "Expected place for slice drop shim to be *_n, but it's {:?}",
1239                self.place,
1240            );
1241        };
1242
1243        let zero = self.constant_usize(0);
1244        let block = BasicBlockData::new_stmts(
1245            vec![
1246                self.assign(
1247                    len.into(),
1248                    Rvalue::UnaryOp(
1249                        UnOp::PtrMetadata,
1250                        Operand::Copy(Place::from(self.place.local)),
1251                    ),
1252                ),
1253                self.assign(cur.into(), Rvalue::Use(zero)),
1254            ],
1255            Some(Terminator {
1256                source_info: self.source_info,
1257                kind: TerminatorKind::Goto { target: loop_block },
1258            }),
1259            unwind.is_cleanup(),
1260        );
1261
1262        let drop_block = self.elaborator.patch().new_block(block);
1263        // FIXME(#34708): handle partially-dropped array/slice elements.
1264        let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
1265        self.drop_flag_test_block(reset_block, self.succ, unwind)
1266    }
1267
1268    /// The slow-path - create an "open", elaborated drop for a type
1269    /// which is moved-out-of only partially, and patch `bb` to a jump
1270    /// to it. This must not be called on ADTs with a destructor,
1271    /// as these can't be moved-out-of, except for `Box<T>`, which is
1272    /// special-cased.
1273    ///
1274    /// This creates a "drop ladder" that drops the needed fields of the
1275    /// ADT, both in the success case or if one of the destructors fail.
1276    fn open_drop(&mut self) -> BasicBlock {
1277        let ty = self.place_ty(self.place);
1278        match ty.kind() {
1279            ty::Closure(_, args) => self.open_drop_for_tuple(args.as_closure().upvar_tys()),
1280            ty::CoroutineClosure(_, args) => {
1281                self.open_drop_for_tuple(args.as_coroutine_closure().upvar_tys())
1282            }
1283            // Note that `elaborate_drops` only drops the upvars of a coroutine,
1284            // and this is ok because `open_drop` here can only be reached
1285            // within that own coroutine's resume function.
1286            // This should only happen for the self argument on the resume function.
1287            // It effectively only contains upvars until the coroutine transformation runs.
1288            // See librustc_body/transform/coroutine.rs for more details.
1289            ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()),
1290            ty::Tuple(fields) => self.open_drop_for_tuple(fields),
1291            ty::Adt(def, args) => self.open_drop_for_adt(*def, args),
1292            ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind),
1293            ty::Array(ety, size) => {
1294                let size = size.try_to_target_usize(self.tcx());
1295                self.open_drop_for_array(ty, *ety, size)
1296            }
1297            ty::Slice(ety) => self.drop_loop_trio_for_slice(*ety),
1298
1299            ty::UnsafeBinder(_) => {
1300                // Unsafe binders may elaborate drops if their inner type isn't copy.
1301                // This is enforced in typeck, so this should never happen.
1302                self.tcx().dcx().span_delayed_bug(
1303                    self.source_info.span,
1304                    "open drop for unsafe binder shouldn't be encountered",
1305                );
1306                self.elaborator.patch().new_block(BasicBlockData::new(
1307                    Some(Terminator {
1308                        source_info: self.source_info,
1309                        kind: TerminatorKind::Unreachable,
1310                    }),
1311                    self.unwind.is_cleanup(),
1312                ))
1313            }
1314
1315            _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
1316        }
1317    }
1318
1319    fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock {
1320        debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
1321
1322        let drop_block = self.drop_block(succ, unwind);
1323
1324        self.drop_flag_test_block(drop_block, succ, unwind)
1325    }
1326
1327    /// Creates a block that resets the drop flag. If `mode` is deep, all children drop flags will
1328    /// also be cleared.
1329    fn drop_flag_reset_block(
1330        &mut self,
1331        mode: DropFlagMode,
1332        succ: BasicBlock,
1333        unwind: Unwind,
1334    ) -> BasicBlock {
1335        debug!("drop_flag_reset_block({:?},{:?})", self, mode);
1336
1337        if unwind.is_cleanup() {
1338            // The drop flag isn't read again on the unwind path, so don't
1339            // bother setting it.
1340            return succ;
1341        }
1342        let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
1343        let block_start = Location { block, statement_index: 0 };
1344        self.elaborator.clear_drop_flag(block_start, self.path, mode);
1345        block
1346    }
1347
1348    fn elaborated_drop_block(&mut self) -> BasicBlock {
1349        debug!("elaborated_drop_block({:?})", self);
1350        let blk = self.drop_block_simple(self.succ, self.unwind);
1351        self.elaborate_drop(blk);
1352        blk
1353    }
1354
1355    fn drop_block_simple(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1356        let block = TerminatorKind::Drop {
1357            place: self.place,
1358            target,
1359            unwind: unwind.into_action(),
1360            replace: false,
1361            drop: self.dropline,
1362            async_fut: None,
1363        };
1364        self.new_block(unwind, block)
1365    }
1366
1367    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1368        let drop_ty = self.place_ty(self.place);
1369        if self.tcx().features().async_drop()
1370            && self.elaborator.body().coroutine.is_some()
1371            && self.elaborator.allow_async_drops()
1372            && !unwind.is_cleanup()
1373            && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1374        {
1375            self.build_async_drop(
1376                self.place,
1377                drop_ty,
1378                None,
1379                self.succ,
1380                unwind,
1381                self.dropline,
1382                false,
1383            )
1384        } else {
1385            let block = TerminatorKind::Drop {
1386                place: self.place,
1387                target,
1388                unwind: unwind.into_action(),
1389                replace: false,
1390                drop: None,
1391                async_fut: None,
1392            };
1393            self.new_block(unwind, block)
1394        }
1395    }
1396
1397    fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1398        let block = TerminatorKind::Goto { target };
1399        self.new_block(unwind, block)
1400    }
1401
1402    /// Returns the block to jump to in order to test the drop flag and execute the drop.
1403    ///
1404    /// Depending on the required `DropStyle`, this might be a generated block with an `if`
1405    /// terminator (for dynamic/open drops), or it might be `on_set` or `on_unset` itself, in case
1406    /// the drop can be statically determined.
1407    fn drop_flag_test_block(
1408        &mut self,
1409        on_set: BasicBlock,
1410        on_unset: BasicBlock,
1411        unwind: Unwind,
1412    ) -> BasicBlock {
1413        let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
1414        debug!(
1415            "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
1416            self, on_set, on_unset, unwind, style
1417        );
1418
1419        match style {
1420            DropStyle::Dead => on_unset,
1421            DropStyle::Static => on_set,
1422            DropStyle::Conditional | DropStyle::Open => {
1423                let flag = self.elaborator.get_drop_flag(self.path).unwrap();
1424                let term = TerminatorKind::if_(flag, on_set, on_unset);
1425                self.new_block(unwind, term)
1426            }
1427        }
1428    }
1429
1430    fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
1431        self.elaborator.patch().new_block(BasicBlockData::new(
1432            Some(Terminator { source_info: self.source_info, kind: k }),
1433            unwind.is_cleanup(),
1434        ))
1435    }
1436
1437    fn new_block_with_statements(
1438        &mut self,
1439        unwind: Unwind,
1440        statements: Vec<Statement<'tcx>>,
1441        k: TerminatorKind<'tcx>,
1442    ) -> BasicBlock {
1443        self.elaborator.patch().new_block(BasicBlockData::new_stmts(
1444            statements,
1445            Some(Terminator { source_info: self.source_info, kind: k }),
1446            unwind.is_cleanup(),
1447        ))
1448    }
1449
1450    fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
1451        self.elaborator.patch().new_temp(ty, self.source_info.span)
1452    }
1453
1454    fn constant_usize(&self, val: u16) -> Operand<'tcx> {
1455        Operand::Constant(Box::new(ConstOperand {
1456            span: self.source_info.span,
1457            user_ty: None,
1458            const_: Const::from_usize(self.tcx(), val.into()),
1459        }))
1460    }
1461
1462    fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
1463        Statement::new(self.source_info, StatementKind::Assign(Box::new((lhs, rhs))))
1464    }
1465}