rustc_mir_transform/
coroutine.rs

1//! This is the implementation of the pass which transforms coroutines into state machines.
2//!
3//! MIR generation for coroutines creates a function which has a self argument which
4//! passes by value. This argument is effectively a coroutine type which only contains upvars and
5//! is only used for this argument inside the MIR for the coroutine.
6//! It is passed by value to enable upvars to be moved out of it. Drop elaboration runs on that
7//! MIR before this pass and creates drop flags for MIR locals.
8//! It will also drop the coroutine argument (which only consists of upvars) if any of the upvars
9//! are moved out of. This pass elaborates the drops of upvars / coroutine argument in the case
10//! that none of the upvars were moved out of. This is because we cannot have any drops of this
11//! coroutine in the MIR, since it is used to create the drop glue for the coroutine. We'd get
12//! infinite recursion otherwise.
13//!
14//! This pass creates the implementation for either the `Coroutine::resume` or `Future::poll`
15//! function and the drop shim for the coroutine based on the MIR input.
16//! It converts the coroutine argument from Self to &mut Self adding derefs in the MIR as needed.
17//! It computes the final layout of the coroutine struct which looks like this:
18//!     First upvars are stored
19//!     It is followed by the coroutine state field.
20//!     Then finally the MIR locals which are live across a suspension point are stored.
21//!     ```ignore (illustrative)
22//!     struct Coroutine {
23//!         upvars...,
24//!         state: u32,
25//!         mir_locals...,
26//!     }
27//!     ```
28//! This pass computes the meaning of the state field and the MIR locals which are live
29//! across a suspension point. There are however three hardcoded coroutine states:
30//!     0 - Coroutine have not been resumed yet
31//!     1 - Coroutine has returned / is completed
32//!     2 - Coroutine has been poisoned
33//!
34//! It also rewrites `return x` and `yield y` as setting a new coroutine state and returning
35//! `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`,
36//! or `Poll::Ready(x)` and `Poll::Pending` respectively.
37//! MIR locals which are live across a suspension point are moved to the coroutine struct
38//! with references to them being updated with references to the coroutine struct.
39//!
40//! The pass creates two functions which have a switch on the coroutine state giving
41//! the action to take.
42//!
43//! One of them is the implementation of `Coroutine::resume` / `Future::poll`.
44//! For coroutines with state 0 (unresumed) it starts the execution of the coroutine.
45//! For coroutines with state 1 (returned) and state 2 (poisoned) it panics.
46//! Otherwise it continues the execution from the last suspension point.
47//!
48//! The other function is the drop glue for the coroutine.
49//! For coroutines with state 0 (unresumed) it drops the upvars of the coroutine.
50//! For coroutines with state 1 (returned) and state 2 (poisoned) it does nothing.
51//! Otherwise it drops all the values in scope at the last suspension point.
52
53mod by_move_body;
54mod drop;
55use std::{iter, ops};
56
57pub(super) use by_move_body::coroutine_by_move_body_def_id;
58use drop::{
59    cleanup_async_drops, create_coroutine_drop_shim, create_coroutine_drop_shim_async,
60    create_coroutine_drop_shim_proxy_async, elaborate_coroutine_drops, expand_async_drops,
61    has_expandable_async_drops, insert_clean_drop,
62};
63use rustc_abi::{FieldIdx, VariantIdx};
64use rustc_data_structures::fx::FxHashSet;
65use rustc_errors::pluralize;
66use rustc_hir as hir;
67use rustc_hir::lang_items::LangItem;
68use rustc_hir::{CoroutineDesugaring, CoroutineKind};
69use rustc_index::bit_set::{BitMatrix, DenseBitSet, GrowableBitSet};
70use rustc_index::{Idx, IndexVec};
71use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
72use rustc_middle::mir::*;
73use rustc_middle::ty::util::Discr;
74use rustc_middle::ty::{
75    self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, TypingMode,
76};
77use rustc_middle::{bug, span_bug};
78use rustc_mir_dataflow::impls::{
79    MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive,
80    always_storage_live_locals,
81};
82use rustc_mir_dataflow::{
83    Analysis, Results, ResultsCursor, ResultsVisitor, visit_reachable_results,
84};
85use rustc_span::def_id::{DefId, LocalDefId};
86use rustc_span::source_map::dummy_spanned;
87use rustc_span::symbol::sym;
88use rustc_span::{DUMMY_SP, Span};
89use rustc_target::spec::PanicStrategy;
90use rustc_trait_selection::error_reporting::InferCtxtErrorExt;
91use rustc_trait_selection::infer::TyCtxtInferExt as _;
92use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode, ObligationCtxt};
93use tracing::{debug, instrument, trace};
94
95use crate::deref_separator::deref_finder;
96use crate::{abort_unwinding_calls, errors, pass_manager as pm, simplify};
97
98pub(super) struct StateTransform;
99
100struct RenameLocalVisitor<'tcx> {
101    from: Local,
102    to: Local,
103    tcx: TyCtxt<'tcx>,
104}
105
106impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor<'tcx> {
107    fn tcx(&self) -> TyCtxt<'tcx> {
108        self.tcx
109    }
110
111    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
112        if *local == self.from {
113            *local = self.to;
114        }
115    }
116
117    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
118        match terminator.kind {
119            TerminatorKind::Return => {
120                // Do not replace the implicit `_0` access here, as that's not possible. The
121                // transform already handles `return` correctly.
122            }
123            _ => self.super_terminator(terminator, location),
124        }
125    }
126}
127
128struct SelfArgVisitor<'tcx> {
129    tcx: TyCtxt<'tcx>,
130    new_base: Place<'tcx>,
131}
132
133impl<'tcx> SelfArgVisitor<'tcx> {
134    fn new(tcx: TyCtxt<'tcx>, elem: ProjectionElem<Local, Ty<'tcx>>) -> Self {
135        Self { tcx, new_base: Place { local: SELF_ARG, projection: tcx.mk_place_elems(&[elem]) } }
136    }
137}
138
139impl<'tcx> MutVisitor<'tcx> for SelfArgVisitor<'tcx> {
140    fn tcx(&self) -> TyCtxt<'tcx> {
141        self.tcx
142    }
143
144    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
145        assert_ne!(*local, SELF_ARG);
146    }
147
148    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
149        if place.local == SELF_ARG {
150            replace_base(place, self.new_base, self.tcx);
151        } else {
152            self.visit_local(&mut place.local, context, location);
153
154            for elem in place.projection.iter() {
155                if let PlaceElem::Index(local) = elem {
156                    assert_ne!(local, SELF_ARG);
157                }
158            }
159        }
160    }
161}
162
163fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtxt<'tcx>) {
164    place.local = new_base.local;
165
166    let mut new_projection = new_base.projection.to_vec();
167    new_projection.append(&mut place.projection.to_vec());
168
169    place.projection = tcx.mk_place_elems(&new_projection);
170}
171
172const SELF_ARG: Local = Local::from_u32(1);
173const CTX_ARG: Local = Local::from_u32(2);
174
175/// A `yield` point in the coroutine.
176struct SuspensionPoint<'tcx> {
177    /// State discriminant used when suspending or resuming at this point.
178    state: usize,
179    /// The block to jump to after resumption.
180    resume: BasicBlock,
181    /// Where to move the resume argument after resumption.
182    resume_arg: Place<'tcx>,
183    /// Which block to jump to if the coroutine is dropped in this state.
184    drop: Option<BasicBlock>,
185    /// Set of locals that have live storage while at this suspension point.
186    storage_liveness: GrowableBitSet<Local>,
187}
188
189struct TransformVisitor<'tcx> {
190    tcx: TyCtxt<'tcx>,
191    coroutine_kind: hir::CoroutineKind,
192
193    // The type of the discriminant in the coroutine struct
194    discr_ty: Ty<'tcx>,
195
196    // Mapping from Local to (type of local, coroutine struct index)
197    remap: IndexVec<Local, Option<(Ty<'tcx>, VariantIdx, FieldIdx)>>,
198
199    // A map from a suspension point in a block to the locals which have live storage at that point
200    storage_liveness: IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
201
202    // A list of suspension points, generated during the transform
203    suspension_points: Vec<SuspensionPoint<'tcx>>,
204
205    // The set of locals that have no `StorageLive`/`StorageDead` annotations.
206    always_live_locals: DenseBitSet<Local>,
207
208    // The original RETURN_PLACE local
209    old_ret_local: Local,
210
211    old_yield_ty: Ty<'tcx>,
212
213    old_ret_ty: Ty<'tcx>,
214}
215
216impl<'tcx> TransformVisitor<'tcx> {
217    fn insert_none_ret_block(&self, body: &mut Body<'tcx>) -> BasicBlock {
218        let block = body.basic_blocks.next_index();
219        let source_info = SourceInfo::outermost(body.span);
220
221        let none_value = match self.coroutine_kind {
222            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
223                span_bug!(body.span, "`Future`s are not fused inherently")
224            }
225            CoroutineKind::Coroutine(_) => span_bug!(body.span, "`Coroutine`s cannot be fused"),
226            // `gen` continues return `None`
227            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
228                let option_def_id = self.tcx.require_lang_item(LangItem::Option, body.span);
229                make_aggregate_adt(
230                    option_def_id,
231                    VariantIdx::ZERO,
232                    self.tcx.mk_args(&[self.old_yield_ty.into()]),
233                    IndexVec::new(),
234                )
235            }
236            // `async gen` continues to return `Poll::Ready(None)`
237            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
238                let ty::Adt(_poll_adt, args) = *self.old_yield_ty.kind() else { bug!() };
239                let ty::Adt(_option_adt, args) = *args.type_at(0).kind() else { bug!() };
240                let yield_ty = args.type_at(0);
241                Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
242                    span: source_info.span,
243                    const_: Const::Unevaluated(
244                        UnevaluatedConst::new(
245                            self.tcx.require_lang_item(LangItem::AsyncGenFinished, body.span),
246                            self.tcx.mk_args(&[yield_ty.into()]),
247                        ),
248                        self.old_yield_ty,
249                    ),
250                    user_ty: None,
251                })))
252            }
253        };
254
255        let statements = vec![Statement {
256            kind: StatementKind::Assign(Box::new((Place::return_place(), none_value))),
257            source_info,
258        }];
259
260        body.basic_blocks_mut().push(BasicBlockData {
261            statements,
262            terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }),
263            is_cleanup: false,
264        });
265
266        block
267    }
268
269    // Make a `CoroutineState` or `Poll` variant assignment.
270    //
271    // `core::ops::CoroutineState` only has single element tuple variants,
272    // so we can just write to the downcasted first field and then set the
273    // discriminant to the appropriate variant.
274    fn make_state(
275        &self,
276        val: Operand<'tcx>,
277        source_info: SourceInfo,
278        is_return: bool,
279        statements: &mut Vec<Statement<'tcx>>,
280    ) {
281        const ZERO: VariantIdx = VariantIdx::ZERO;
282        const ONE: VariantIdx = VariantIdx::from_usize(1);
283        let rvalue = match self.coroutine_kind {
284            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
285                let poll_def_id = self.tcx.require_lang_item(LangItem::Poll, source_info.span);
286                let args = self.tcx.mk_args(&[self.old_ret_ty.into()]);
287                let (variant_idx, operands) = if is_return {
288                    (ZERO, IndexVec::from_raw(vec![val])) // Poll::Ready(val)
289                } else {
290                    (ONE, IndexVec::new()) // Poll::Pending
291                };
292                make_aggregate_adt(poll_def_id, variant_idx, args, operands)
293            }
294            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
295                let option_def_id = self.tcx.require_lang_item(LangItem::Option, source_info.span);
296                let args = self.tcx.mk_args(&[self.old_yield_ty.into()]);
297                let (variant_idx, operands) = if is_return {
298                    (ZERO, IndexVec::new()) // None
299                } else {
300                    (ONE, IndexVec::from_raw(vec![val])) // Some(val)
301                };
302                make_aggregate_adt(option_def_id, variant_idx, args, operands)
303            }
304            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
305                if is_return {
306                    let ty::Adt(_poll_adt, args) = *self.old_yield_ty.kind() else { bug!() };
307                    let ty::Adt(_option_adt, args) = *args.type_at(0).kind() else { bug!() };
308                    let yield_ty = args.type_at(0);
309                    Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
310                        span: source_info.span,
311                        const_: Const::Unevaluated(
312                            UnevaluatedConst::new(
313                                self.tcx.require_lang_item(
314                                    LangItem::AsyncGenFinished,
315                                    source_info.span,
316                                ),
317                                self.tcx.mk_args(&[yield_ty.into()]),
318                            ),
319                            self.old_yield_ty,
320                        ),
321                        user_ty: None,
322                    })))
323                } else {
324                    Rvalue::Use(val)
325                }
326            }
327            CoroutineKind::Coroutine(_) => {
328                let coroutine_state_def_id =
329                    self.tcx.require_lang_item(LangItem::CoroutineState, source_info.span);
330                let args = self.tcx.mk_args(&[self.old_yield_ty.into(), self.old_ret_ty.into()]);
331                let variant_idx = if is_return {
332                    ONE // CoroutineState::Complete(val)
333                } else {
334                    ZERO // CoroutineState::Yielded(val)
335                };
336                make_aggregate_adt(
337                    coroutine_state_def_id,
338                    variant_idx,
339                    args,
340                    IndexVec::from_raw(vec![val]),
341                )
342            }
343        };
344
345        statements.push(Statement {
346            kind: StatementKind::Assign(Box::new((Place::return_place(), rvalue))),
347            source_info,
348        });
349    }
350
351    // Create a Place referencing a coroutine struct field
352    fn make_field(&self, variant_index: VariantIdx, idx: FieldIdx, ty: Ty<'tcx>) -> Place<'tcx> {
353        let self_place = Place::from(SELF_ARG);
354        let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index);
355        let mut projection = base.projection.to_vec();
356        projection.push(ProjectionElem::Field(idx, ty));
357
358        Place { local: base.local, projection: self.tcx.mk_place_elems(&projection) }
359    }
360
361    // Create a statement which changes the discriminant
362    fn set_discr(&self, state_disc: VariantIdx, source_info: SourceInfo) -> Statement<'tcx> {
363        let self_place = Place::from(SELF_ARG);
364        Statement {
365            source_info,
366            kind: StatementKind::SetDiscriminant {
367                place: Box::new(self_place),
368                variant_index: state_disc,
369            },
370        }
371    }
372
373    // Create a statement which reads the discriminant into a temporary
374    fn get_discr(&self, body: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) {
375        let temp_decl = LocalDecl::new(self.discr_ty, body.span);
376        let local_decls_len = body.local_decls.push(temp_decl);
377        let temp = Place::from(local_decls_len);
378
379        let self_place = Place::from(SELF_ARG);
380        let assign = Statement {
381            source_info: SourceInfo::outermost(body.span),
382            kind: StatementKind::Assign(Box::new((temp, Rvalue::Discriminant(self_place)))),
383        };
384        (assign, temp)
385    }
386}
387
388impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
389    fn tcx(&self) -> TyCtxt<'tcx> {
390        self.tcx
391    }
392
393    fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
394        assert!(!self.remap.contains(*local));
395    }
396
397    fn visit_place(
398        &mut self,
399        place: &mut Place<'tcx>,
400        _context: PlaceContext,
401        _location: Location,
402    ) {
403        // Replace an Local in the remap with a coroutine struct access
404        if let Some(&Some((ty, variant_index, idx))) = self.remap.get(place.local) {
405            replace_base(place, self.make_field(variant_index, idx, ty), self.tcx);
406        }
407    }
408
409    fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
410        // Remove StorageLive and StorageDead statements for remapped locals
411        for s in &mut data.statements {
412            if let StatementKind::StorageLive(l) | StatementKind::StorageDead(l) = s.kind
413                && self.remap.contains(l)
414            {
415                s.make_nop();
416            }
417        }
418
419        let ret_val = match data.terminator().kind {
420            TerminatorKind::Return => {
421                Some((true, None, Operand::Move(Place::from(self.old_ret_local)), None))
422            }
423            TerminatorKind::Yield { ref value, resume, resume_arg, drop } => {
424                Some((false, Some((resume, resume_arg)), value.clone(), drop))
425            }
426            _ => None,
427        };
428
429        if let Some((is_return, resume, v, drop)) = ret_val {
430            let source_info = data.terminator().source_info;
431            // We must assign the value first in case it gets declared dead below
432            self.make_state(v, source_info, is_return, &mut data.statements);
433            let state = if let Some((resume, mut resume_arg)) = resume {
434                // Yield
435                let state = CoroutineArgs::RESERVED_VARIANTS + self.suspension_points.len();
436
437                // The resume arg target location might itself be remapped if its base local is
438                // live across a yield.
439                if let Some(&Some((ty, variant, idx))) = self.remap.get(resume_arg.local) {
440                    replace_base(&mut resume_arg, self.make_field(variant, idx, ty), self.tcx);
441                }
442
443                let storage_liveness: GrowableBitSet<Local> =
444                    self.storage_liveness[block].clone().unwrap().into();
445
446                for i in 0..self.always_live_locals.domain_size() {
447                    let l = Local::new(i);
448                    let needs_storage_dead = storage_liveness.contains(l)
449                        && !self.remap.contains(l)
450                        && !self.always_live_locals.contains(l);
451                    if needs_storage_dead {
452                        data.statements
453                            .push(Statement { source_info, kind: StatementKind::StorageDead(l) });
454                    }
455                }
456
457                self.suspension_points.push(SuspensionPoint {
458                    state,
459                    resume,
460                    resume_arg,
461                    drop,
462                    storage_liveness,
463                });
464
465                VariantIdx::new(state)
466            } else {
467                // Return
468                VariantIdx::new(CoroutineArgs::RETURNED) // state for returned
469            };
470            data.statements.push(self.set_discr(state, source_info));
471            data.terminator_mut().kind = TerminatorKind::Return;
472        }
473
474        self.super_basic_block_data(block, data);
475    }
476}
477
478fn make_aggregate_adt<'tcx>(
479    def_id: DefId,
480    variant_idx: VariantIdx,
481    args: GenericArgsRef<'tcx>,
482    operands: IndexVec<FieldIdx, Operand<'tcx>>,
483) -> Rvalue<'tcx> {
484    Rvalue::Aggregate(Box::new(AggregateKind::Adt(def_id, variant_idx, args, None, None)), operands)
485}
486
487fn make_coroutine_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
488    let coroutine_ty = body.local_decls.raw[1].ty;
489
490    let ref_coroutine_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty);
491
492    // Replace the by value coroutine argument
493    body.local_decls.raw[1].ty = ref_coroutine_ty;
494
495    // Add a deref to accesses of the coroutine state
496    SelfArgVisitor::new(tcx, ProjectionElem::Deref).visit_body(body);
497}
498
499fn make_coroutine_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
500    let ref_coroutine_ty = body.local_decls.raw[1].ty;
501
502    let pin_did = tcx.require_lang_item(LangItem::Pin, body.span);
503    let pin_adt_ref = tcx.adt_def(pin_did);
504    let args = tcx.mk_args(&[ref_coroutine_ty.into()]);
505    let pin_ref_coroutine_ty = Ty::new_adt(tcx, pin_adt_ref, args);
506
507    // Replace the by ref coroutine argument
508    body.local_decls.raw[1].ty = pin_ref_coroutine_ty;
509
510    // Add the Pin field access to accesses of the coroutine state
511    SelfArgVisitor::new(tcx, ProjectionElem::Field(FieldIdx::ZERO, ref_coroutine_ty))
512        .visit_body(body);
513}
514
515/// Allocates a new local and replaces all references of `local` with it. Returns the new local.
516///
517/// `local` will be changed to a new local decl with type `ty`.
518///
519/// Note that the new local will be uninitialized. It is the caller's responsibility to assign some
520/// valid value to it before its first use.
521fn replace_local<'tcx>(
522    local: Local,
523    ty: Ty<'tcx>,
524    body: &mut Body<'tcx>,
525    tcx: TyCtxt<'tcx>,
526) -> Local {
527    let new_decl = LocalDecl::new(ty, body.span);
528    let new_local = body.local_decls.push(new_decl);
529    body.local_decls.swap(local, new_local);
530
531    RenameLocalVisitor { from: local, to: new_local, tcx }.visit_body(body);
532
533    new_local
534}
535
536/// Transforms the `body` of the coroutine applying the following transforms:
537///
538/// - Eliminates all the `get_context` calls that async lowering created.
539/// - Replace all `Local` `ResumeTy` types with `&mut Context<'_>` (`context_mut_ref`).
540///
541/// The `Local`s that have their types replaced are:
542/// - The `resume` argument itself.
543/// - The argument to `get_context`.
544/// - The yielded value of a `yield`.
545///
546/// The `ResumeTy` hides a `&mut Context<'_>` behind an unsafe raw pointer, and the
547/// `get_context` function is being used to convert that back to a `&mut Context<'_>`.
548///
549/// Ideally the async lowering would not use the `ResumeTy`/`get_context` indirection,
550/// but rather directly use `&mut Context<'_>`, however that would currently
551/// lead to higher-kinded lifetime errors.
552/// See <https://github.com/rust-lang/rust/issues/105501>.
553///
554/// The async lowering step and the type / lifetime inference / checking are
555/// still using the `ResumeTy` indirection for the time being, and that indirection
556/// is removed here. After this transform, the coroutine body only knows about `&mut Context<'_>`.
557fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> Ty<'tcx> {
558    let context_mut_ref = Ty::new_task_context(tcx);
559
560    // replace the type of the `resume` argument
561    replace_resume_ty_local(tcx, body, CTX_ARG, context_mut_ref);
562
563    let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, body.span);
564
565    for bb in body.basic_blocks.indices() {
566        let bb_data = &body[bb];
567        if bb_data.is_cleanup {
568            continue;
569        }
570
571        match &bb_data.terminator().kind {
572            TerminatorKind::Call { func, .. } => {
573                let func_ty = func.ty(body, tcx);
574                if let ty::FnDef(def_id, _) = *func_ty.kind()
575                    && def_id == get_context_def_id
576                {
577                    let local = eliminate_get_context_call(&mut body[bb]);
578                    replace_resume_ty_local(tcx, body, local, context_mut_ref);
579                }
580            }
581            TerminatorKind::Yield { resume_arg, .. } => {
582                replace_resume_ty_local(tcx, body, resume_arg.local, context_mut_ref);
583            }
584            _ => {}
585        }
586    }
587    context_mut_ref
588}
589
590fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local {
591    let terminator = bb_data.terminator.take().unwrap();
592    let TerminatorKind::Call { args, destination, target, .. } = terminator.kind else {
593        bug!();
594    };
595    let [arg] = *Box::try_from(args).unwrap();
596    let local = arg.node.place().unwrap().local;
597
598    let arg = Rvalue::Use(arg.node);
599    let assign = Statement {
600        source_info: terminator.source_info,
601        kind: StatementKind::Assign(Box::new((destination, arg))),
602    };
603    bb_data.statements.push(assign);
604    bb_data.terminator = Some(Terminator {
605        source_info: terminator.source_info,
606        kind: TerminatorKind::Goto { target: target.unwrap() },
607    });
608    local
609}
610
611#[cfg_attr(not(debug_assertions), allow(unused))]
612fn replace_resume_ty_local<'tcx>(
613    tcx: TyCtxt<'tcx>,
614    body: &mut Body<'tcx>,
615    local: Local,
616    context_mut_ref: Ty<'tcx>,
617) {
618    let local_ty = std::mem::replace(&mut body.local_decls[local].ty, context_mut_ref);
619    // We have to replace the `ResumeTy` that is used for type and borrow checking
620    // with `&mut Context<'_>` in MIR.
621    #[cfg(debug_assertions)]
622    {
623        if let ty::Adt(resume_ty_adt, _) = local_ty.kind() {
624            let expected_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, body.span));
625            assert_eq!(*resume_ty_adt, expected_adt);
626        } else {
627            panic!("expected `ResumeTy`, found `{:?}`", local_ty);
628        };
629    }
630}
631
632/// Transforms the `body` of the coroutine applying the following transform:
633///
634/// - Remove the `resume` argument.
635///
636/// Ideally the async lowering would not add the `resume` argument.
637///
638/// The async lowering step and the type / lifetime inference / checking are
639/// still using the `resume` argument for the time being. After this transform,
640/// the coroutine body doesn't have the `resume` argument.
641fn transform_gen_context<'tcx>(body: &mut Body<'tcx>) {
642    // This leaves the local representing the `resume` argument in place,
643    // but turns it into a regular local variable. This is cheaper than
644    // adjusting all local references in the body after removing it.
645    body.arg_count = 1;
646}
647
648struct LivenessInfo {
649    /// Which locals are live across any suspension point.
650    saved_locals: CoroutineSavedLocals,
651
652    /// The set of saved locals live at each suspension point.
653    live_locals_at_suspension_points: Vec<DenseBitSet<CoroutineSavedLocal>>,
654
655    /// Parallel vec to the above with SourceInfo for each yield terminator.
656    source_info_at_suspension_points: Vec<SourceInfo>,
657
658    /// For every saved local, the set of other saved locals that are
659    /// storage-live at the same time as this local. We cannot overlap locals in
660    /// the layout which have conflicting storage.
661    storage_conflicts: BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
662
663    /// For every suspending block, the locals which are storage-live across
664    /// that suspension point.
665    storage_liveness: IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
666}
667
668/// Computes which locals have to be stored in the state-machine for the
669/// given coroutine.
670///
671/// The basic idea is as follows:
672/// - a local is live until we encounter a `StorageDead` statement. In
673///   case none exist, the local is considered to be always live.
674/// - a local has to be stored if it is either directly used after the
675///   the suspend point, or if it is live and has been previously borrowed.
676fn locals_live_across_suspend_points<'tcx>(
677    tcx: TyCtxt<'tcx>,
678    body: &Body<'tcx>,
679    always_live_locals: &DenseBitSet<Local>,
680    movable: bool,
681) -> LivenessInfo {
682    // Calculate when MIR locals have live storage. This gives us an upper bound of their
683    // lifetimes.
684    let mut storage_live = MaybeStorageLive::new(std::borrow::Cow::Borrowed(always_live_locals))
685        .iterate_to_fixpoint(tcx, body, None)
686        .into_results_cursor(body);
687
688    // Calculate the MIR locals that have been previously borrowed (even if they are still active).
689    let borrowed_locals = MaybeBorrowedLocals.iterate_to_fixpoint(tcx, body, Some("coroutine"));
690    let mut borrowed_locals_analysis1 = borrowed_locals.analysis;
691    let mut borrowed_locals_analysis2 = borrowed_locals_analysis1.clone(); // trivial
692    let borrowed_locals_cursor1 = ResultsCursor::new_borrowing(
693        body,
694        &mut borrowed_locals_analysis1,
695        &borrowed_locals.results,
696    );
697    let mut borrowed_locals_cursor2 = ResultsCursor::new_borrowing(
698        body,
699        &mut borrowed_locals_analysis2,
700        &borrowed_locals.results,
701    );
702
703    // Calculate the MIR locals that we need to keep storage around for.
704    let mut requires_storage =
705        MaybeRequiresStorage::new(borrowed_locals_cursor1).iterate_to_fixpoint(tcx, body, None);
706    let mut requires_storage_cursor = ResultsCursor::new_borrowing(
707        body,
708        &mut requires_storage.analysis,
709        &requires_storage.results,
710    );
711
712    // Calculate the liveness of MIR locals ignoring borrows.
713    let mut liveness =
714        MaybeLiveLocals.iterate_to_fixpoint(tcx, body, Some("coroutine")).into_results_cursor(body);
715
716    let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks);
717    let mut live_locals_at_suspension_points = Vec::new();
718    let mut source_info_at_suspension_points = Vec::new();
719    let mut live_locals_at_any_suspension_point = DenseBitSet::new_empty(body.local_decls.len());
720
721    for (block, data) in body.basic_blocks.iter_enumerated() {
722        if let TerminatorKind::Yield { .. } = data.terminator().kind {
723            let loc = Location { block, statement_index: data.statements.len() };
724
725            liveness.seek_to_block_end(block);
726            let mut live_locals = liveness.get().clone();
727
728            if !movable {
729                // The `liveness` variable contains the liveness of MIR locals ignoring borrows.
730                // This is correct for movable coroutines since borrows cannot live across
731                // suspension points. However for immovable coroutines we need to account for
732                // borrows, so we conservatively assume that all borrowed locals are live until
733                // we find a StorageDead statement referencing the locals.
734                // To do this we just union our `liveness` result with `borrowed_locals`, which
735                // contains all the locals which has been borrowed before this suspension point.
736                // If a borrow is converted to a raw reference, we must also assume that it lives
737                // forever. Note that the final liveness is still bounded by the storage liveness
738                // of the local, which happens using the `intersect` operation below.
739                borrowed_locals_cursor2.seek_before_primary_effect(loc);
740                live_locals.union(borrowed_locals_cursor2.get());
741            }
742
743            // Store the storage liveness for later use so we can restore the state
744            // after a suspension point
745            storage_live.seek_before_primary_effect(loc);
746            storage_liveness_map[block] = Some(storage_live.get().clone());
747
748            // Locals live are live at this point only if they are used across
749            // suspension points (the `liveness` variable)
750            // and their storage is required (the `storage_required` variable)
751            requires_storage_cursor.seek_before_primary_effect(loc);
752            live_locals.intersect(requires_storage_cursor.get());
753
754            // The coroutine argument is ignored.
755            live_locals.remove(SELF_ARG);
756
757            debug!("loc = {:?}, live_locals = {:?}", loc, live_locals);
758
759            // Add the locals live at this suspension point to the set of locals which live across
760            // any suspension points
761            live_locals_at_any_suspension_point.union(&live_locals);
762
763            live_locals_at_suspension_points.push(live_locals);
764            source_info_at_suspension_points.push(data.terminator().source_info);
765        }
766    }
767
768    debug!("live_locals_anywhere = {:?}", live_locals_at_any_suspension_point);
769    let saved_locals = CoroutineSavedLocals(live_locals_at_any_suspension_point);
770
771    // Renumber our liveness_map bitsets to include only the locals we are
772    // saving.
773    let live_locals_at_suspension_points = live_locals_at_suspension_points
774        .iter()
775        .map(|live_here| saved_locals.renumber_bitset(live_here))
776        .collect();
777
778    let storage_conflicts = compute_storage_conflicts(
779        body,
780        &saved_locals,
781        always_live_locals.clone(),
782        &mut requires_storage.analysis,
783        &requires_storage.results,
784    );
785
786    LivenessInfo {
787        saved_locals,
788        live_locals_at_suspension_points,
789        source_info_at_suspension_points,
790        storage_conflicts,
791        storage_liveness: storage_liveness_map,
792    }
793}
794
795/// The set of `Local`s that must be saved across yield points.
796///
797/// `CoroutineSavedLocal` is indexed in terms of the elements in this set;
798/// i.e. `CoroutineSavedLocal::new(1)` corresponds to the second local
799/// included in this set.
800struct CoroutineSavedLocals(DenseBitSet<Local>);
801
802impl CoroutineSavedLocals {
803    /// Returns an iterator over each `CoroutineSavedLocal` along with the `Local` it corresponds
804    /// to.
805    fn iter_enumerated(&self) -> impl '_ + Iterator<Item = (CoroutineSavedLocal, Local)> {
806        self.iter().enumerate().map(|(i, l)| (CoroutineSavedLocal::from(i), l))
807    }
808
809    /// Transforms a `DenseBitSet<Local>` that contains only locals saved across yield points to the
810    /// equivalent `DenseBitSet<CoroutineSavedLocal>`.
811    fn renumber_bitset(&self, input: &DenseBitSet<Local>) -> DenseBitSet<CoroutineSavedLocal> {
812        assert!(self.superset(input), "{:?} not a superset of {:?}", self.0, input);
813        let mut out = DenseBitSet::new_empty(self.count());
814        for (saved_local, local) in self.iter_enumerated() {
815            if input.contains(local) {
816                out.insert(saved_local);
817            }
818        }
819        out
820    }
821
822    fn get(&self, local: Local) -> Option<CoroutineSavedLocal> {
823        if !self.contains(local) {
824            return None;
825        }
826
827        let idx = self.iter().take_while(|&l| l < local).count();
828        Some(CoroutineSavedLocal::new(idx))
829    }
830}
831
832impl ops::Deref for CoroutineSavedLocals {
833    type Target = DenseBitSet<Local>;
834
835    fn deref(&self) -> &Self::Target {
836        &self.0
837    }
838}
839
840/// For every saved local, looks for which locals are StorageLive at the same
841/// time. Generates a bitset for every local of all the other locals that may be
842/// StorageLive simultaneously with that local. This is used in the layout
843/// computation; see `CoroutineLayout` for more.
844fn compute_storage_conflicts<'mir, 'tcx>(
845    body: &'mir Body<'tcx>,
846    saved_locals: &'mir CoroutineSavedLocals,
847    always_live_locals: DenseBitSet<Local>,
848    analysis: &mut MaybeRequiresStorage<'mir, 'tcx>,
849    results: &Results<DenseBitSet<Local>>,
850) -> BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal> {
851    assert_eq!(body.local_decls.len(), saved_locals.domain_size());
852
853    debug!("compute_storage_conflicts({:?})", body.span);
854    debug!("always_live = {:?}", always_live_locals);
855
856    // Locals that are always live or ones that need to be stored across
857    // suspension points are not eligible for overlap.
858    let mut ineligible_locals = always_live_locals;
859    ineligible_locals.intersect(&**saved_locals);
860
861    // Compute the storage conflicts for all eligible locals.
862    let mut visitor = StorageConflictVisitor {
863        body,
864        saved_locals,
865        local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()),
866        eligible_storage_live: DenseBitSet::new_empty(body.local_decls.len()),
867    };
868
869    visit_reachable_results(body, analysis, results, &mut visitor);
870
871    let local_conflicts = visitor.local_conflicts;
872
873    // Compress the matrix using only stored locals (Local -> CoroutineSavedLocal).
874    //
875    // NOTE: Today we store a full conflict bitset for every local. Technically
876    // this is twice as many bits as we need, since the relation is symmetric.
877    // However, in practice these bitsets are not usually large. The layout code
878    // also needs to keep track of how many conflicts each local has, so it's
879    // simpler to keep it this way for now.
880    let mut storage_conflicts = BitMatrix::new(saved_locals.count(), saved_locals.count());
881    for (saved_local_a, local_a) in saved_locals.iter_enumerated() {
882        if ineligible_locals.contains(local_a) {
883            // Conflicts with everything.
884            storage_conflicts.insert_all_into_row(saved_local_a);
885        } else {
886            // Keep overlap information only for stored locals.
887            for (saved_local_b, local_b) in saved_locals.iter_enumerated() {
888                if local_conflicts.contains(local_a, local_b) {
889                    storage_conflicts.insert(saved_local_a, saved_local_b);
890                }
891            }
892        }
893    }
894    storage_conflicts
895}
896
897struct StorageConflictVisitor<'a, 'tcx> {
898    body: &'a Body<'tcx>,
899    saved_locals: &'a CoroutineSavedLocals,
900    // FIXME(tmandry): Consider using sparse bitsets here once we have good
901    // benchmarks for coroutines.
902    local_conflicts: BitMatrix<Local, Local>,
903    // We keep this bitset as a buffer to avoid reallocating memory.
904    eligible_storage_live: DenseBitSet<Local>,
905}
906
907impl<'a, 'tcx> ResultsVisitor<'tcx, MaybeRequiresStorage<'a, 'tcx>>
908    for StorageConflictVisitor<'a, 'tcx>
909{
910    fn visit_after_early_statement_effect(
911        &mut self,
912        _analysis: &mut MaybeRequiresStorage<'a, 'tcx>,
913        state: &DenseBitSet<Local>,
914        _statement: &Statement<'tcx>,
915        loc: Location,
916    ) {
917        self.apply_state(state, loc);
918    }
919
920    fn visit_after_early_terminator_effect(
921        &mut self,
922        _analysis: &mut MaybeRequiresStorage<'a, 'tcx>,
923        state: &DenseBitSet<Local>,
924        _terminator: &Terminator<'tcx>,
925        loc: Location,
926    ) {
927        self.apply_state(state, loc);
928    }
929}
930
931impl StorageConflictVisitor<'_, '_> {
932    fn apply_state(&mut self, state: &DenseBitSet<Local>, loc: Location) {
933        // Ignore unreachable blocks.
934        if let TerminatorKind::Unreachable = self.body.basic_blocks[loc.block].terminator().kind {
935            return;
936        }
937
938        self.eligible_storage_live.clone_from(state);
939        self.eligible_storage_live.intersect(&**self.saved_locals);
940
941        for local in self.eligible_storage_live.iter() {
942            self.local_conflicts.union_row_with(&self.eligible_storage_live, local);
943        }
944
945        if self.eligible_storage_live.count() > 1 {
946            trace!("at {:?}, eligible_storage_live={:?}", loc, self.eligible_storage_live);
947        }
948    }
949}
950
951fn compute_layout<'tcx>(
952    liveness: LivenessInfo,
953    body: &Body<'tcx>,
954) -> (
955    IndexVec<Local, Option<(Ty<'tcx>, VariantIdx, FieldIdx)>>,
956    CoroutineLayout<'tcx>,
957    IndexVec<BasicBlock, Option<DenseBitSet<Local>>>,
958) {
959    let LivenessInfo {
960        saved_locals,
961        live_locals_at_suspension_points,
962        source_info_at_suspension_points,
963        storage_conflicts,
964        storage_liveness,
965    } = liveness;
966
967    // Gather live local types and their indices.
968    let mut locals = IndexVec::<CoroutineSavedLocal, _>::new();
969    let mut tys = IndexVec::<CoroutineSavedLocal, _>::new();
970    for (saved_local, local) in saved_locals.iter_enumerated() {
971        debug!("coroutine saved local {:?} => {:?}", saved_local, local);
972
973        locals.push(local);
974        let decl = &body.local_decls[local];
975        debug!(?decl);
976
977        // Do not `unwrap_crate_local` here, as post-borrowck cleanup may have already cleared
978        // the information. This is alright, since `ignore_for_traits` is only relevant when
979        // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer
980        // default.
981        let ignore_for_traits = match decl.local_info {
982            // Do not include raw pointers created from accessing `static` items, as those could
983            // well be re-created by another access to the same static.
984            ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => {
985                !is_thread_local
986            }
987            // Fake borrows are only read by fake reads, so do not have any reality in
988            // post-analysis MIR.
989            ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true,
990            _ => false,
991        };
992        let decl =
993            CoroutineSavedTy { ty: decl.ty, source_info: decl.source_info, ignore_for_traits };
994        debug!(?decl);
995
996        tys.push(decl);
997    }
998
999    // Leave empty variants for the UNRESUMED, RETURNED, and POISONED states.
1000    // In debuginfo, these will correspond to the beginning (UNRESUMED) or end
1001    // (RETURNED, POISONED) of the function.
1002    let body_span = body.source_scopes[OUTERMOST_SOURCE_SCOPE].span;
1003    let mut variant_source_info: IndexVec<VariantIdx, SourceInfo> = [
1004        SourceInfo::outermost(body_span.shrink_to_lo()),
1005        SourceInfo::outermost(body_span.shrink_to_hi()),
1006        SourceInfo::outermost(body_span.shrink_to_hi()),
1007    ]
1008    .iter()
1009    .copied()
1010    .collect();
1011
1012    // Build the coroutine variant field list.
1013    // Create a map from local indices to coroutine struct indices.
1014    let mut variant_fields: IndexVec<VariantIdx, IndexVec<FieldIdx, CoroutineSavedLocal>> =
1015        iter::repeat(IndexVec::new()).take(CoroutineArgs::RESERVED_VARIANTS).collect();
1016    let mut remap = IndexVec::from_elem_n(None, saved_locals.domain_size());
1017    for (suspension_point_idx, live_locals) in live_locals_at_suspension_points.iter().enumerate() {
1018        let variant_index =
1019            VariantIdx::from(CoroutineArgs::RESERVED_VARIANTS + suspension_point_idx);
1020        let mut fields = IndexVec::new();
1021        for (idx, saved_local) in live_locals.iter().enumerate() {
1022            fields.push(saved_local);
1023            // Note that if a field is included in multiple variants, we will
1024            // just use the first one here. That's fine; fields do not move
1025            // around inside coroutines, so it doesn't matter which variant
1026            // index we access them by.
1027            let idx = FieldIdx::from_usize(idx);
1028            remap[locals[saved_local]] = Some((tys[saved_local].ty, variant_index, idx));
1029        }
1030        variant_fields.push(fields);
1031        variant_source_info.push(source_info_at_suspension_points[suspension_point_idx]);
1032    }
1033    debug!("coroutine variant_fields = {:?}", variant_fields);
1034    debug!("coroutine storage_conflicts = {:#?}", storage_conflicts);
1035
1036    let mut field_names = IndexVec::from_elem(None, &tys);
1037    for var in &body.var_debug_info {
1038        let VarDebugInfoContents::Place(place) = &var.value else { continue };
1039        let Some(local) = place.as_local() else { continue };
1040        let Some(&Some((_, variant, field))) = remap.get(local) else {
1041            continue;
1042        };
1043
1044        let saved_local = variant_fields[variant][field];
1045        field_names.get_or_insert_with(saved_local, || var.name);
1046    }
1047
1048    let layout = CoroutineLayout {
1049        field_tys: tys,
1050        field_names,
1051        variant_fields,
1052        variant_source_info,
1053        storage_conflicts,
1054    };
1055    debug!(?layout);
1056
1057    (remap, layout, storage_liveness)
1058}
1059
1060/// Replaces the entry point of `body` with a block that switches on the coroutine discriminant and
1061/// dispatches to blocks according to `cases`.
1062///
1063/// After this function, the former entry point of the function will be bb1.
1064fn insert_switch<'tcx>(
1065    body: &mut Body<'tcx>,
1066    cases: Vec<(usize, BasicBlock)>,
1067    transform: &TransformVisitor<'tcx>,
1068    default_block: BasicBlock,
1069) {
1070    let (assign, discr) = transform.get_discr(body);
1071    let switch_targets =
1072        SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block);
1073    let switch = TerminatorKind::SwitchInt { discr: Operand::Move(discr), targets: switch_targets };
1074
1075    let source_info = SourceInfo::outermost(body.span);
1076    body.basic_blocks_mut().raw.insert(
1077        0,
1078        BasicBlockData {
1079            statements: vec![assign],
1080            terminator: Some(Terminator { source_info, kind: switch }),
1081            is_cleanup: false,
1082        },
1083    );
1084
1085    for b in body.basic_blocks_mut().iter_mut() {
1086        b.terminator_mut().successors_mut(|target| *target += 1);
1087    }
1088}
1089
1090fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
1091    let source_info = SourceInfo::outermost(body.span);
1092    body.basic_blocks_mut().push(BasicBlockData {
1093        statements: Vec::new(),
1094        terminator: Some(Terminator { source_info, kind }),
1095        is_cleanup: false,
1096    })
1097}
1098
1099fn return_poll_ready_assign<'tcx>(tcx: TyCtxt<'tcx>, source_info: SourceInfo) -> Statement<'tcx> {
1100    // Poll::Ready(())
1101    let poll_def_id = tcx.require_lang_item(LangItem::Poll, source_info.span);
1102    let args = tcx.mk_args(&[tcx.types.unit.into()]);
1103    let val = Operand::Constant(Box::new(ConstOperand {
1104        span: source_info.span,
1105        user_ty: None,
1106        const_: Const::zero_sized(tcx.types.unit),
1107    }));
1108    let ready_val = Rvalue::Aggregate(
1109        Box::new(AggregateKind::Adt(poll_def_id, VariantIdx::from_usize(0), args, None, None)),
1110        IndexVec::from_raw(vec![val]),
1111    );
1112    Statement {
1113        kind: StatementKind::Assign(Box::new((Place::return_place(), ready_val))),
1114        source_info,
1115    }
1116}
1117
1118fn insert_poll_ready_block<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> BasicBlock {
1119    let source_info = SourceInfo::outermost(body.span);
1120    body.basic_blocks_mut().push(BasicBlockData {
1121        statements: [return_poll_ready_assign(tcx, source_info)].to_vec(),
1122        terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }),
1123        is_cleanup: false,
1124    })
1125}
1126
1127fn insert_panic_block<'tcx>(
1128    tcx: TyCtxt<'tcx>,
1129    body: &mut Body<'tcx>,
1130    message: AssertMessage<'tcx>,
1131) -> BasicBlock {
1132    let assert_block = body.basic_blocks.next_index();
1133    let kind = TerminatorKind::Assert {
1134        cond: Operand::Constant(Box::new(ConstOperand {
1135            span: body.span,
1136            user_ty: None,
1137            const_: Const::from_bool(tcx, false),
1138        })),
1139        expected: true,
1140        msg: Box::new(message),
1141        target: assert_block,
1142        unwind: UnwindAction::Continue,
1143    };
1144
1145    insert_term_block(body, kind)
1146}
1147
1148fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> bool {
1149    // Returning from a function with an uninhabited return type is undefined behavior.
1150    if body.return_ty().is_privately_uninhabited(tcx, typing_env) {
1151        return false;
1152    }
1153
1154    // If there's a return terminator the function may return.
1155    body.basic_blocks.iter().any(|block| matches!(block.terminator().kind, TerminatorKind::Return))
1156    // Otherwise the function can't return.
1157}
1158
1159fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
1160    // Nothing can unwind when landing pads are off.
1161    if tcx.sess.panic_strategy() == PanicStrategy::Abort {
1162        return false;
1163    }
1164
1165    // Unwinds can only start at certain terminators.
1166    for block in body.basic_blocks.iter() {
1167        match block.terminator().kind {
1168            // These never unwind.
1169            TerminatorKind::Goto { .. }
1170            | TerminatorKind::SwitchInt { .. }
1171            | TerminatorKind::UnwindTerminate(_)
1172            | TerminatorKind::Return
1173            | TerminatorKind::Unreachable
1174            | TerminatorKind::CoroutineDrop
1175            | TerminatorKind::FalseEdge { .. }
1176            | TerminatorKind::FalseUnwind { .. } => {}
1177
1178            // Resume will *continue* unwinding, but if there's no other unwinding terminator it
1179            // will never be reached.
1180            TerminatorKind::UnwindResume => {}
1181
1182            TerminatorKind::Yield { .. } => {
1183                unreachable!("`can_unwind` called before coroutine transform")
1184            }
1185
1186            // These may unwind.
1187            TerminatorKind::Drop { .. }
1188            | TerminatorKind::Call { .. }
1189            | TerminatorKind::InlineAsm { .. }
1190            | TerminatorKind::Assert { .. } => return true,
1191
1192            TerminatorKind::TailCall { .. } => {
1193                unreachable!("tail calls can't be present in generators")
1194            }
1195        }
1196    }
1197
1198    // If we didn't find an unwinding terminator, the function cannot unwind.
1199    false
1200}
1201
1202// Poison the coroutine when it unwinds
1203fn generate_poison_block_and_redirect_unwinds_there<'tcx>(
1204    transform: &TransformVisitor<'tcx>,
1205    body: &mut Body<'tcx>,
1206) {
1207    let source_info = SourceInfo::outermost(body.span);
1208    let poison_block = body.basic_blocks_mut().push(BasicBlockData {
1209        statements: vec![
1210            transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info),
1211        ],
1212        terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }),
1213        is_cleanup: true,
1214    });
1215
1216    for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() {
1217        let source_info = block.terminator().source_info;
1218
1219        if let TerminatorKind::UnwindResume = block.terminator().kind {
1220            // An existing `Resume` terminator is redirected to jump to our dedicated
1221            // "poisoning block" above.
1222            if idx != poison_block {
1223                *block.terminator_mut() =
1224                    Terminator { source_info, kind: TerminatorKind::Goto { target: poison_block } };
1225            }
1226        } else if !block.is_cleanup
1227            // Any terminators that *can* unwind but don't have an unwind target set are also
1228            // pointed at our poisoning block (unless they're part of the cleanup path).
1229            && let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut()
1230        {
1231            *unwind = UnwindAction::Cleanup(poison_block);
1232        }
1233    }
1234}
1235
1236fn create_coroutine_resume_function<'tcx>(
1237    tcx: TyCtxt<'tcx>,
1238    transform: TransformVisitor<'tcx>,
1239    body: &mut Body<'tcx>,
1240    can_return: bool,
1241    can_unwind: bool,
1242) {
1243    // Poison the coroutine when it unwinds
1244    if can_unwind {
1245        generate_poison_block_and_redirect_unwinds_there(&transform, body);
1246    }
1247
1248    let mut cases = create_cases(body, &transform, Operation::Resume);
1249
1250    use rustc_middle::mir::AssertKind::{ResumedAfterPanic, ResumedAfterReturn};
1251
1252    // Jump to the entry point on the unresumed
1253    cases.insert(0, (CoroutineArgs::UNRESUMED, START_BLOCK));
1254
1255    // Panic when resumed on the returned or poisoned state
1256    if can_unwind {
1257        cases.insert(
1258            1,
1259            (
1260                CoroutineArgs::POISONED,
1261                insert_panic_block(tcx, body, ResumedAfterPanic(transform.coroutine_kind)),
1262            ),
1263        );
1264    }
1265
1266    if can_return {
1267        let block = match transform.coroutine_kind {
1268            CoroutineKind::Desugared(CoroutineDesugaring::Async, _)
1269            | CoroutineKind::Coroutine(_) => {
1270                // For `async_drop_in_place<T>::{closure}` we just keep return Poll::Ready,
1271                // because async drop of such coroutine keeps polling original coroutine
1272                if tcx.is_async_drop_in_place_coroutine(body.source.def_id()) {
1273                    insert_poll_ready_block(tcx, body)
1274                } else {
1275                    insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind))
1276                }
1277            }
1278            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)
1279            | CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1280                transform.insert_none_ret_block(body)
1281            }
1282        };
1283        cases.insert(1, (CoroutineArgs::RETURNED, block));
1284    }
1285
1286    let default_block = insert_term_block(body, TerminatorKind::Unreachable);
1287    insert_switch(body, cases, &transform, default_block);
1288
1289    make_coroutine_state_argument_indirect(tcx, body);
1290
1291    match transform.coroutine_kind {
1292        CoroutineKind::Coroutine(_)
1293        | CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) =>
1294        {
1295            make_coroutine_state_argument_pinned(tcx, body);
1296        }
1297        // Iterator::next doesn't accept a pinned argument,
1298        // unlike for all other coroutine kinds.
1299        CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {}
1300    }
1301
1302    // Make sure we remove dead blocks to remove
1303    // unrelated code from the drop part of the function
1304    simplify::remove_dead_blocks(body);
1305
1306    pm::run_passes_no_validate(tcx, body, &[&abort_unwinding_calls::AbortUnwindingCalls], None);
1307
1308    dump_mir(tcx, false, "coroutine_resume", &0, body, |_, _| Ok(()));
1309}
1310
1311/// An operation that can be performed on a coroutine.
1312#[derive(PartialEq, Copy, Clone)]
1313enum Operation {
1314    Resume,
1315    Drop,
1316}
1317
1318impl Operation {
1319    fn target_block(self, point: &SuspensionPoint<'_>) -> Option<BasicBlock> {
1320        match self {
1321            Operation::Resume => Some(point.resume),
1322            Operation::Drop => point.drop,
1323        }
1324    }
1325}
1326
1327fn create_cases<'tcx>(
1328    body: &mut Body<'tcx>,
1329    transform: &TransformVisitor<'tcx>,
1330    operation: Operation,
1331) -> Vec<(usize, BasicBlock)> {
1332    let source_info = SourceInfo::outermost(body.span);
1333
1334    transform
1335        .suspension_points
1336        .iter()
1337        .filter_map(|point| {
1338            // Find the target for this suspension point, if applicable
1339            operation.target_block(point).map(|target| {
1340                let mut statements = Vec::new();
1341
1342                // Create StorageLive instructions for locals with live storage
1343                for l in body.local_decls.indices() {
1344                    let needs_storage_live = point.storage_liveness.contains(l)
1345                        && !transform.remap.contains(l)
1346                        && !transform.always_live_locals.contains(l);
1347                    if needs_storage_live {
1348                        statements
1349                            .push(Statement { source_info, kind: StatementKind::StorageLive(l) });
1350                    }
1351                }
1352
1353                if operation == Operation::Resume {
1354                    // Move the resume argument to the destination place of the `Yield` terminator
1355                    let resume_arg = CTX_ARG;
1356                    statements.push(Statement {
1357                        source_info,
1358                        kind: StatementKind::Assign(Box::new((
1359                            point.resume_arg,
1360                            Rvalue::Use(Operand::Move(resume_arg.into())),
1361                        ))),
1362                    });
1363                }
1364
1365                // Then jump to the real target
1366                let block = body.basic_blocks_mut().push(BasicBlockData {
1367                    statements,
1368                    terminator: Some(Terminator {
1369                        source_info,
1370                        kind: TerminatorKind::Goto { target },
1371                    }),
1372                    is_cleanup: false,
1373                });
1374
1375                (point.state, block)
1376            })
1377        })
1378        .collect()
1379}
1380
1381#[instrument(level = "debug", skip(tcx), ret)]
1382pub(crate) fn mir_coroutine_witnesses<'tcx>(
1383    tcx: TyCtxt<'tcx>,
1384    def_id: LocalDefId,
1385) -> Option<CoroutineLayout<'tcx>> {
1386    let (body, _) = tcx.mir_promoted(def_id);
1387    let body = body.borrow();
1388    let body = &*body;
1389
1390    // The first argument is the coroutine type passed by value
1391    let coroutine_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty;
1392
1393    let movable = match *coroutine_ty.kind() {
1394        ty::Coroutine(def_id, _) => tcx.coroutine_movability(def_id) == hir::Movability::Movable,
1395        ty::Error(_) => return None,
1396        _ => span_bug!(body.span, "unexpected coroutine type {}", coroutine_ty),
1397    };
1398
1399    // The witness simply contains all locals live across suspend points.
1400
1401    let always_live_locals = always_storage_live_locals(body);
1402    let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1403
1404    // Extract locals which are live across suspension point into `layout`
1405    // `remap` gives a mapping from local indices onto coroutine struct indices
1406    // `storage_liveness` tells us which locals have live storage at suspension points
1407    let (_, coroutine_layout, _) = compute_layout(liveness_info, body);
1408
1409    check_suspend_tys(tcx, &coroutine_layout, body);
1410    check_field_tys_sized(tcx, &coroutine_layout, def_id);
1411
1412    Some(coroutine_layout)
1413}
1414
1415fn check_field_tys_sized<'tcx>(
1416    tcx: TyCtxt<'tcx>,
1417    coroutine_layout: &CoroutineLayout<'tcx>,
1418    def_id: LocalDefId,
1419) {
1420    // No need to check if unsized_fn_params is disabled,
1421    // since we will error during typeck.
1422    if !tcx.features().unsized_fn_params() {
1423        return;
1424    }
1425
1426    // FIXME(#132279): @lcnr believes that we may want to support coroutines
1427    // whose `Sized`-ness relies on the hidden types of opaques defined by the
1428    // parent function. In this case we'd have to be able to reveal only these
1429    // opaques here.
1430    let infcx = tcx.infer_ctxt().ignoring_regions().build(TypingMode::non_body_analysis());
1431    let param_env = tcx.param_env(def_id);
1432
1433    let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
1434    for field_ty in &coroutine_layout.field_tys {
1435        ocx.register_bound(
1436            ObligationCause::new(
1437                field_ty.source_info.span,
1438                def_id,
1439                ObligationCauseCode::SizedCoroutineInterior(def_id),
1440            ),
1441            param_env,
1442            field_ty.ty,
1443            tcx.require_lang_item(hir::LangItem::Sized, field_ty.source_info.span),
1444        );
1445    }
1446
1447    let errors = ocx.select_all_or_error();
1448    debug!(?errors);
1449    if !errors.is_empty() {
1450        infcx.err_ctxt().report_fulfillment_errors(errors);
1451    }
1452}
1453
1454impl<'tcx> crate::MirPass<'tcx> for StateTransform {
1455    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
1456        let Some(old_yield_ty) = body.yield_ty() else {
1457            // This only applies to coroutines
1458            return;
1459        };
1460        let old_ret_ty = body.return_ty();
1461
1462        assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none());
1463
1464        dump_mir(tcx, false, "coroutine_before", &0, body, |_, _| Ok(()));
1465
1466        // The first argument is the coroutine type passed by value
1467        let coroutine_ty = body.local_decls.raw[1].ty;
1468        let coroutine_kind = body.coroutine_kind().unwrap();
1469
1470        // Get the discriminant type and args which typeck computed
1471        let ty::Coroutine(_, args) = coroutine_ty.kind() else {
1472            tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}"));
1473        };
1474        let discr_ty = args.as_coroutine().discr_ty(tcx);
1475
1476        let new_ret_ty = match coroutine_kind {
1477            CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
1478                // Compute Poll<return_ty>
1479                let poll_did = tcx.require_lang_item(LangItem::Poll, body.span);
1480                let poll_adt_ref = tcx.adt_def(poll_did);
1481                let poll_args = tcx.mk_args(&[old_ret_ty.into()]);
1482                Ty::new_adt(tcx, poll_adt_ref, poll_args)
1483            }
1484            CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {
1485                // Compute Option<yield_ty>
1486                let option_did = tcx.require_lang_item(LangItem::Option, body.span);
1487                let option_adt_ref = tcx.adt_def(option_did);
1488                let option_args = tcx.mk_args(&[old_yield_ty.into()]);
1489                Ty::new_adt(tcx, option_adt_ref, option_args)
1490            }
1491            CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => {
1492                // The yield ty is already `Poll<Option<yield_ty>>`
1493                old_yield_ty
1494            }
1495            CoroutineKind::Coroutine(_) => {
1496                // Compute CoroutineState<yield_ty, return_ty>
1497                let state_did = tcx.require_lang_item(LangItem::CoroutineState, body.span);
1498                let state_adt_ref = tcx.adt_def(state_did);
1499                let state_args = tcx.mk_args(&[old_yield_ty.into(), old_ret_ty.into()]);
1500                Ty::new_adt(tcx, state_adt_ref, state_args)
1501            }
1502        };
1503
1504        // We rename RETURN_PLACE which has type mir.return_ty to old_ret_local
1505        // RETURN_PLACE then is a fresh unused local with type ret_ty.
1506        let old_ret_local = replace_local(RETURN_PLACE, new_ret_ty, body, tcx);
1507
1508        // We need to insert clean drop for unresumed state and perform drop elaboration
1509        // (finally in open_drop_for_tuple) before async drop expansion.
1510        // Async drops, produced by this drop elaboration, will be expanded,
1511        // and corresponding futures kept in layout.
1512        let has_async_drops = matches!(
1513            coroutine_kind,
1514            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
1515        ) && has_expandable_async_drops(tcx, body, coroutine_ty);
1516
1517        // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies.
1518        if matches!(
1519            coroutine_kind,
1520            CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _)
1521        ) {
1522            let context_mut_ref = transform_async_context(tcx, body);
1523            expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty);
1524            dump_mir(tcx, false, "coroutine_async_drop_expand", &0, body, |_, _| Ok(()));
1525        } else {
1526            cleanup_async_drops(body);
1527        }
1528
1529        // We also replace the resume argument and insert an `Assign`.
1530        // This is needed because the resume argument `_2` might be live across a `yield`, in which
1531        // case there is no `Assign` to it that the transform can turn into a store to the coroutine
1532        // state. After the yield the slot in the coroutine state would then be uninitialized.
1533        let resume_local = CTX_ARG;
1534        let resume_ty = body.local_decls[resume_local].ty;
1535        let old_resume_local = replace_local(resume_local, resume_ty, body, tcx);
1536
1537        // When first entering the coroutine, move the resume argument into its old local
1538        // (which is now a generator interior).
1539        let source_info = SourceInfo::outermost(body.span);
1540        let stmts = &mut body.basic_blocks_mut()[START_BLOCK].statements;
1541        stmts.insert(
1542            0,
1543            Statement {
1544                source_info,
1545                kind: StatementKind::Assign(Box::new((
1546                    old_resume_local.into(),
1547                    Rvalue::Use(Operand::Move(resume_local.into())),
1548                ))),
1549            },
1550        );
1551
1552        let always_live_locals = always_storage_live_locals(body);
1553
1554        let movable = coroutine_kind.movability() == hir::Movability::Movable;
1555        let liveness_info =
1556            locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1557
1558        if tcx.sess.opts.unstable_opts.validate_mir {
1559            let mut vis = EnsureCoroutineFieldAssignmentsNeverAlias {
1560                assigned_local: None,
1561                saved_locals: &liveness_info.saved_locals,
1562                storage_conflicts: &liveness_info.storage_conflicts,
1563            };
1564
1565            vis.visit_body(body);
1566        }
1567
1568        // Extract locals which are live across suspension point into `layout`
1569        // `remap` gives a mapping from local indices onto coroutine struct indices
1570        // `storage_liveness` tells us which locals have live storage at suspension points
1571        let (remap, layout, storage_liveness) = compute_layout(liveness_info, body);
1572
1573        let can_return = can_return(tcx, body, body.typing_env(tcx));
1574
1575        // Run the transformation which converts Places from Local to coroutine struct
1576        // accesses for locals in `remap`.
1577        // It also rewrites `return x` and `yield y` as writing a new coroutine state and returning
1578        // either `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`,
1579        // or `Poll::Ready(x)` and `Poll::Pending` respectively depending on the coroutine kind.
1580        let mut transform = TransformVisitor {
1581            tcx,
1582            coroutine_kind,
1583            remap,
1584            storage_liveness,
1585            always_live_locals,
1586            suspension_points: Vec::new(),
1587            old_ret_local,
1588            discr_ty,
1589            old_ret_ty,
1590            old_yield_ty,
1591        };
1592        transform.visit_body(body);
1593
1594        // Update our MIR struct to reflect the changes we've made
1595        body.arg_count = 2; // self, resume arg
1596        body.spread_arg = None;
1597
1598        // Remove the context argument within generator bodies.
1599        if matches!(coroutine_kind, CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) {
1600            transform_gen_context(body);
1601        }
1602
1603        // The original arguments to the function are no longer arguments, mark them as such.
1604        // Otherwise they'll conflict with our new arguments, which although they don't have
1605        // argument_index set, will get emitted as unnamed arguments.
1606        for var in &mut body.var_debug_info {
1607            var.argument_index = None;
1608        }
1609
1610        body.coroutine.as_mut().unwrap().yield_ty = None;
1611        body.coroutine.as_mut().unwrap().resume_ty = None;
1612        body.coroutine.as_mut().unwrap().coroutine_layout = Some(layout);
1613
1614        // FIXME: Drops, produced by insert_clean_drop + elaborate_coroutine_drops,
1615        // are currently sync only. To allow async for them, we need to move those calls
1616        // before expand_async_drops, and fix the related problems.
1617        //
1618        // Insert `drop(coroutine_struct)` which is used to drop upvars for coroutines in
1619        // the unresumed state.
1620        // This is expanded to a drop ladder in `elaborate_coroutine_drops`.
1621        let drop_clean = insert_clean_drop(tcx, body, has_async_drops);
1622
1623        dump_mir(tcx, false, "coroutine_pre-elab", &0, body, |_, _| Ok(()));
1624
1625        // Expand `drop(coroutine_struct)` to a drop ladder which destroys upvars.
1626        // If any upvars are moved out of, drop elaboration will handle upvar destruction.
1627        // However we need to also elaborate the code generated by `insert_clean_drop`.
1628        elaborate_coroutine_drops(tcx, body);
1629
1630        dump_mir(tcx, false, "coroutine_post-transform", &0, body, |_, _| Ok(()));
1631
1632        let can_unwind = can_unwind(tcx, body);
1633
1634        // Create a copy of our MIR and use it to create the drop shim for the coroutine
1635        if has_async_drops {
1636            // If coroutine has async drops, generating async drop shim
1637            let mut drop_shim =
1638                create_coroutine_drop_shim_async(tcx, &transform, body, drop_clean, can_unwind);
1639            // Run derefer to fix Derefs that are not in the first place
1640            deref_finder(tcx, &mut drop_shim);
1641            body.coroutine.as_mut().unwrap().coroutine_drop_async = Some(drop_shim);
1642        } else {
1643            // If coroutine has no async drops, generating sync drop shim
1644            let mut drop_shim =
1645                create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean);
1646            // Run derefer to fix Derefs that are not in the first place
1647            deref_finder(tcx, &mut drop_shim);
1648            body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim);
1649
1650            // For coroutine with sync drop, generating async proxy for `future_drop_poll` call
1651            let mut proxy_shim = create_coroutine_drop_shim_proxy_async(tcx, body);
1652            deref_finder(tcx, &mut proxy_shim);
1653            body.coroutine.as_mut().unwrap().coroutine_drop_proxy_async = Some(proxy_shim);
1654        }
1655
1656        // Create the Coroutine::resume / Future::poll function
1657        create_coroutine_resume_function(tcx, transform, body, can_return, can_unwind);
1658
1659        // Run derefer to fix Derefs that are not in the first place
1660        deref_finder(tcx, body);
1661    }
1662
1663    fn is_required(&self) -> bool {
1664        true
1665    }
1666}
1667
1668/// Looks for any assignments between locals (e.g., `_4 = _5`) that will both be converted to fields
1669/// in the coroutine state machine but whose storage is not marked as conflicting
1670///
1671/// Validation needs to happen immediately *before* `TransformVisitor` is invoked, not after.
1672///
1673/// This condition would arise when the assignment is the last use of `_5` but the initial
1674/// definition of `_4` if we weren't extra careful to mark all locals used inside a statement as
1675/// conflicting. Non-conflicting coroutine saved locals may be stored at the same location within
1676/// the coroutine state machine, which would result in ill-formed MIR: the left-hand and right-hand
1677/// sides of an assignment may not alias. This caused a miscompilation in [#73137].
1678///
1679/// [#73137]: https://github.com/rust-lang/rust/issues/73137
1680struct EnsureCoroutineFieldAssignmentsNeverAlias<'a> {
1681    saved_locals: &'a CoroutineSavedLocals,
1682    storage_conflicts: &'a BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
1683    assigned_local: Option<CoroutineSavedLocal>,
1684}
1685
1686impl EnsureCoroutineFieldAssignmentsNeverAlias<'_> {
1687    fn saved_local_for_direct_place(&self, place: Place<'_>) -> Option<CoroutineSavedLocal> {
1688        if place.is_indirect() {
1689            return None;
1690        }
1691
1692        self.saved_locals.get(place.local)
1693    }
1694
1695    fn check_assigned_place(&mut self, place: Place<'_>, f: impl FnOnce(&mut Self)) {
1696        if let Some(assigned_local) = self.saved_local_for_direct_place(place) {
1697            assert!(self.assigned_local.is_none(), "`check_assigned_place` must not recurse");
1698
1699            self.assigned_local = Some(assigned_local);
1700            f(self);
1701            self.assigned_local = None;
1702        }
1703    }
1704}
1705
1706impl<'tcx> Visitor<'tcx> for EnsureCoroutineFieldAssignmentsNeverAlias<'_> {
1707    fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
1708        let Some(lhs) = self.assigned_local else {
1709            // This visitor only invokes `visit_place` for the right-hand side of an assignment
1710            // and only after setting `self.assigned_local`. However, the default impl of
1711            // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places
1712            // with debuginfo. Ignore them here.
1713            assert!(!context.is_use());
1714            return;
1715        };
1716
1717        let Some(rhs) = self.saved_local_for_direct_place(*place) else { return };
1718
1719        if !self.storage_conflicts.contains(lhs, rhs) {
1720            bug!(
1721                "Assignment between coroutine saved locals whose storage is not \
1722                    marked as conflicting: {:?}: {:?} = {:?}",
1723                location,
1724                lhs,
1725                rhs,
1726            );
1727        }
1728    }
1729
1730    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1731        match &statement.kind {
1732            StatementKind::Assign(box (lhs, rhs)) => {
1733                self.check_assigned_place(*lhs, |this| this.visit_rvalue(rhs, location));
1734            }
1735
1736            StatementKind::FakeRead(..)
1737            | StatementKind::SetDiscriminant { .. }
1738            | StatementKind::Deinit(..)
1739            | StatementKind::StorageLive(_)
1740            | StatementKind::StorageDead(_)
1741            | StatementKind::Retag(..)
1742            | StatementKind::AscribeUserType(..)
1743            | StatementKind::PlaceMention(..)
1744            | StatementKind::Coverage(..)
1745            | StatementKind::Intrinsic(..)
1746            | StatementKind::ConstEvalCounter
1747            | StatementKind::BackwardIncompatibleDropHint { .. }
1748            | StatementKind::Nop => {}
1749        }
1750    }
1751
1752    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1753        // Checking for aliasing in terminators is probably overkill, but until we have actual
1754        // semantics, we should be conservative here.
1755        match &terminator.kind {
1756            TerminatorKind::Call {
1757                func,
1758                args,
1759                destination,
1760                target: Some(_),
1761                unwind: _,
1762                call_source: _,
1763                fn_span: _,
1764            } => {
1765                self.check_assigned_place(*destination, |this| {
1766                    this.visit_operand(func, location);
1767                    for arg in args {
1768                        this.visit_operand(&arg.node, location);
1769                    }
1770                });
1771            }
1772
1773            TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => {
1774                self.check_assigned_place(*resume_arg, |this| this.visit_operand(value, location));
1775            }
1776
1777            // FIXME: Does `asm!` have any aliasing requirements?
1778            TerminatorKind::InlineAsm { .. } => {}
1779
1780            TerminatorKind::Call { .. }
1781            | TerminatorKind::Goto { .. }
1782            | TerminatorKind::SwitchInt { .. }
1783            | TerminatorKind::UnwindResume
1784            | TerminatorKind::UnwindTerminate(_)
1785            | TerminatorKind::Return
1786            | TerminatorKind::TailCall { .. }
1787            | TerminatorKind::Unreachable
1788            | TerminatorKind::Drop { .. }
1789            | TerminatorKind::Assert { .. }
1790            | TerminatorKind::CoroutineDrop
1791            | TerminatorKind::FalseEdge { .. }
1792            | TerminatorKind::FalseUnwind { .. } => {}
1793        }
1794    }
1795}
1796
1797fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &CoroutineLayout<'tcx>, body: &Body<'tcx>) {
1798    let mut linted_tys = FxHashSet::default();
1799
1800    for (variant, yield_source_info) in
1801        layout.variant_fields.iter().zip(&layout.variant_source_info)
1802    {
1803        debug!(?variant);
1804        for &local in variant {
1805            let decl = &layout.field_tys[local];
1806            debug!(?decl);
1807
1808            if !decl.ignore_for_traits && linted_tys.insert(decl.ty) {
1809                let Some(hir_id) = decl.source_info.scope.lint_root(&body.source_scopes) else {
1810                    continue;
1811                };
1812
1813                check_must_not_suspend_ty(
1814                    tcx,
1815                    decl.ty,
1816                    hir_id,
1817                    SuspendCheckData {
1818                        source_span: decl.source_info.span,
1819                        yield_span: yield_source_info.span,
1820                        plural_len: 1,
1821                        ..Default::default()
1822                    },
1823                );
1824            }
1825        }
1826    }
1827}
1828
1829#[derive(Default)]
1830struct SuspendCheckData<'a> {
1831    source_span: Span,
1832    yield_span: Span,
1833    descr_pre: &'a str,
1834    descr_post: &'a str,
1835    plural_len: usize,
1836}
1837
1838// Returns whether it emitted a diagnostic or not
1839// Note that this fn and the proceeding one are based on the code
1840// for creating must_use diagnostics
1841//
1842// Note that this technique was chosen over things like a `Suspend` marker trait
1843// as it is simpler and has precedent in the compiler
1844fn check_must_not_suspend_ty<'tcx>(
1845    tcx: TyCtxt<'tcx>,
1846    ty: Ty<'tcx>,
1847    hir_id: hir::HirId,
1848    data: SuspendCheckData<'_>,
1849) -> bool {
1850    if ty.is_unit() {
1851        return false;
1852    }
1853
1854    let plural_suffix = pluralize!(data.plural_len);
1855
1856    debug!("Checking must_not_suspend for {}", ty);
1857
1858    match *ty.kind() {
1859        ty::Adt(_, args) if ty.is_box() => {
1860            let boxed_ty = args.type_at(0);
1861            let allocator_ty = args.type_at(1);
1862            check_must_not_suspend_ty(
1863                tcx,
1864                boxed_ty,
1865                hir_id,
1866                SuspendCheckData { descr_pre: &format!("{}boxed ", data.descr_pre), ..data },
1867            ) || check_must_not_suspend_ty(
1868                tcx,
1869                allocator_ty,
1870                hir_id,
1871                SuspendCheckData { descr_pre: &format!("{}allocator ", data.descr_pre), ..data },
1872            )
1873        }
1874        ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data),
1875        // FIXME: support adding the attribute to TAITs
1876        ty::Alias(ty::Opaque, ty::AliasTy { def_id: def, .. }) => {
1877            let mut has_emitted = false;
1878            for &(predicate, _) in tcx.explicit_item_bounds(def).skip_binder() {
1879                // We only look at the `DefId`, so it is safe to skip the binder here.
1880                if let ty::ClauseKind::Trait(ref poly_trait_predicate) =
1881                    predicate.kind().skip_binder()
1882                {
1883                    let def_id = poly_trait_predicate.trait_ref.def_id;
1884                    let descr_pre = &format!("{}implementer{} of ", data.descr_pre, plural_suffix);
1885                    if check_must_not_suspend_def(
1886                        tcx,
1887                        def_id,
1888                        hir_id,
1889                        SuspendCheckData { descr_pre, ..data },
1890                    ) {
1891                        has_emitted = true;
1892                        break;
1893                    }
1894                }
1895            }
1896            has_emitted
1897        }
1898        ty::Dynamic(binder, _, _) => {
1899            let mut has_emitted = false;
1900            for predicate in binder.iter() {
1901                if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() {
1902                    let def_id = trait_ref.def_id;
1903                    let descr_post = &format!(" trait object{}{}", plural_suffix, data.descr_post);
1904                    if check_must_not_suspend_def(
1905                        tcx,
1906                        def_id,
1907                        hir_id,
1908                        SuspendCheckData { descr_post, ..data },
1909                    ) {
1910                        has_emitted = true;
1911                        break;
1912                    }
1913                }
1914            }
1915            has_emitted
1916        }
1917        ty::Tuple(fields) => {
1918            let mut has_emitted = false;
1919            for (i, ty) in fields.iter().enumerate() {
1920                let descr_post = &format!(" in tuple element {i}");
1921                if check_must_not_suspend_ty(
1922                    tcx,
1923                    ty,
1924                    hir_id,
1925                    SuspendCheckData { descr_post, ..data },
1926                ) {
1927                    has_emitted = true;
1928                }
1929            }
1930            has_emitted
1931        }
1932        ty::Array(ty, len) => {
1933            let descr_pre = &format!("{}array{} of ", data.descr_pre, plural_suffix);
1934            check_must_not_suspend_ty(
1935                tcx,
1936                ty,
1937                hir_id,
1938                SuspendCheckData {
1939                    descr_pre,
1940                    // FIXME(must_not_suspend): This is wrong. We should handle printing unevaluated consts.
1941                    plural_len: len.try_to_target_usize(tcx).unwrap_or(0) as usize + 1,
1942                    ..data
1943                },
1944            )
1945        }
1946        // If drop tracking is enabled, we want to look through references, since the referent
1947        // may not be considered live across the await point.
1948        ty::Ref(_region, ty, _mutability) => {
1949            let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);
1950            check_must_not_suspend_ty(tcx, ty, hir_id, SuspendCheckData { descr_pre, ..data })
1951        }
1952        _ => false,
1953    }
1954}
1955
1956fn check_must_not_suspend_def(
1957    tcx: TyCtxt<'_>,
1958    def_id: DefId,
1959    hir_id: hir::HirId,
1960    data: SuspendCheckData<'_>,
1961) -> bool {
1962    if let Some(attr) = tcx.get_attr(def_id, sym::must_not_suspend) {
1963        let reason = attr.value_str().map(|s| errors::MustNotSuspendReason {
1964            span: data.source_span,
1965            reason: s.as_str().to_string(),
1966        });
1967        tcx.emit_node_span_lint(
1968            rustc_session::lint::builtin::MUST_NOT_SUSPEND,
1969            hir_id,
1970            data.source_span,
1971            errors::MustNotSupend {
1972                tcx,
1973                yield_sp: data.yield_span,
1974                reason,
1975                src_sp: data.source_span,
1976                pre: data.descr_pre,
1977                def_id,
1978                post: data.descr_post,
1979            },
1980        );
1981
1982        true
1983    } else {
1984        false
1985    }
1986}