rustc_const_eval/check_consts/
check.rs

1//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3use std::assert_matches::assert_matches;
4use std::borrow::Cow;
5use std::mem;
6use std::num::NonZero;
7use std::ops::Deref;
8
9use rustc_attr_data_structures as attrs;
10use rustc_errors::{Diag, ErrorGuaranteed};
11use rustc_hir::def::DefKind;
12use rustc_hir::def_id::DefId;
13use rustc_hir::{self as hir, LangItem};
14use rustc_index::bit_set::DenseBitSet;
15use rustc_infer::infer::TyCtxtInferExt;
16use rustc_middle::mir::visit::Visitor;
17use rustc_middle::mir::*;
18use rustc_middle::span_bug;
19use rustc_middle::ty::adjustment::PointerCoercion;
20use rustc_middle::ty::{self, Ty, TypeVisitableExt};
21use rustc_mir_dataflow::Analysis;
22use rustc_mir_dataflow::impls::{MaybeStorageLive, always_storage_live_locals};
23use rustc_span::{Span, Symbol, sym};
24use rustc_trait_selection::traits::{
25    Obligation, ObligationCause, ObligationCauseCode, ObligationCtxt,
26};
27use tracing::{instrument, trace};
28
29use super::ops::{self, NonConstOp, Status};
30use super::qualifs::{self, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
31use super::resolver::FlowSensitiveAnalysis;
32use super::{ConstCx, Qualif};
33use crate::check_consts::is_fn_or_trait_safe_to_expose_on_stable;
34use crate::errors;
35
36type QualifResults<'mir, 'tcx, Q> =
37    rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'tcx, Q>>;
38
39#[derive(Copy, Clone, PartialEq, Eq, Debug)]
40enum ConstConditionsHold {
41    Yes,
42    No,
43}
44
45#[derive(Default)]
46pub(crate) struct Qualifs<'mir, 'tcx> {
47    has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
48    needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
49    needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
50}
51
52impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
53    /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
54    ///
55    /// Only updates the cursor if absolutely necessary
56    pub(crate) fn needs_drop(
57        &mut self,
58        ccx: &'mir ConstCx<'mir, 'tcx>,
59        local: Local,
60        location: Location,
61    ) -> bool {
62        let ty = ccx.body.local_decls[local].ty;
63        // Peeking into opaque types causes cycles if the current function declares said opaque
64        // type. Thus we avoid short circuiting on the type and instead run the more expensive
65        // analysis that looks at the actual usage within this function
66        if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
67            return false;
68        }
69
70        let needs_drop = self.needs_drop.get_or_insert_with(|| {
71            let ConstCx { tcx, body, .. } = *ccx;
72
73            FlowSensitiveAnalysis::new(NeedsDrop, ccx)
74                .iterate_to_fixpoint(tcx, body, None)
75                .into_results_cursor(body)
76        });
77
78        needs_drop.seek_before_primary_effect(location);
79        needs_drop.get().contains(local)
80    }
81
82    /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
83    ///
84    /// Only updates the cursor if absolutely necessary
85    pub(crate) fn needs_non_const_drop(
86        &mut self,
87        ccx: &'mir ConstCx<'mir, 'tcx>,
88        local: Local,
89        location: Location,
90    ) -> bool {
91        let ty = ccx.body.local_decls[local].ty;
92        // Peeking into opaque types causes cycles if the current function declares said opaque
93        // type. Thus we avoid short circuiting on the type and instead run the more expensive
94        // analysis that looks at the actual usage within this function
95        if !ty.has_opaque_types() && !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
96            return false;
97        }
98
99        let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
100            let ConstCx { tcx, body, .. } = *ccx;
101
102            FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
103                .iterate_to_fixpoint(tcx, body, None)
104                .into_results_cursor(body)
105        });
106
107        needs_non_const_drop.seek_before_primary_effect(location);
108        needs_non_const_drop.get().contains(local)
109    }
110
111    /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
112    ///
113    /// Only updates the cursor if absolutely necessary.
114    fn has_mut_interior(
115        &mut self,
116        ccx: &'mir ConstCx<'mir, 'tcx>,
117        local: Local,
118        location: Location,
119    ) -> bool {
120        let ty = ccx.body.local_decls[local].ty;
121        // Peeking into opaque types causes cycles if the current function declares said opaque
122        // type. Thus we avoid short circuiting on the type and instead run the more expensive
123        // analysis that looks at the actual usage within this function
124        if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
125            return false;
126        }
127
128        let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
129            let ConstCx { tcx, body, .. } = *ccx;
130
131            FlowSensitiveAnalysis::new(HasMutInterior, ccx)
132                .iterate_to_fixpoint(tcx, body, None)
133                .into_results_cursor(body)
134        });
135
136        has_mut_interior.seek_before_primary_effect(location);
137        has_mut_interior.get().contains(local)
138    }
139
140    fn in_return_place(
141        &mut self,
142        ccx: &'mir ConstCx<'mir, 'tcx>,
143        tainted_by_errors: Option<ErrorGuaranteed>,
144    ) -> ConstQualifs {
145        // FIXME(explicit_tail_calls): uhhhh I think we can return without return now, does it change anything
146
147        // Find the `Return` terminator if one exists.
148        //
149        // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
150        // qualifs for the return type.
151        let return_block = ccx
152            .body
153            .basic_blocks
154            .iter_enumerated()
155            .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
156            .map(|(bb, _)| bb);
157
158        let Some(return_block) = return_block else {
159            return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
160        };
161
162        let return_loc = ccx.body.terminator_loc(return_block);
163
164        ConstQualifs {
165            needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
166            needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
167            has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
168            tainted_by_errors,
169        }
170    }
171}
172
173pub struct Checker<'mir, 'tcx> {
174    ccx: &'mir ConstCx<'mir, 'tcx>,
175    qualifs: Qualifs<'mir, 'tcx>,
176
177    /// The span of the current statement.
178    span: Span,
179
180    /// A set that stores for each local whether it is "transient", i.e. guaranteed to be dead
181    /// when this MIR body returns.
182    transient_locals: Option<DenseBitSet<Local>>,
183
184    error_emitted: Option<ErrorGuaranteed>,
185    secondary_errors: Vec<Diag<'tcx>>,
186}
187
188impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
189    type Target = ConstCx<'mir, 'tcx>;
190
191    fn deref(&self) -> &Self::Target {
192        self.ccx
193    }
194}
195
196impl<'mir, 'tcx> Checker<'mir, 'tcx> {
197    pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
198        Checker {
199            span: ccx.body.span,
200            ccx,
201            qualifs: Default::default(),
202            transient_locals: None,
203            error_emitted: None,
204            secondary_errors: Vec::new(),
205        }
206    }
207
208    pub fn check_body(&mut self) {
209        let ConstCx { tcx, body, .. } = *self.ccx;
210        let def_id = self.ccx.def_id();
211
212        // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
213        // no need to emit duplicate errors here.
214        if self.ccx.is_async() || body.coroutine.is_some() {
215            tcx.dcx().span_delayed_bug(body.span, "`async` functions cannot be `const fn`");
216            return;
217        }
218
219        if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) {
220            self.visit_body(body);
221        }
222
223        // If we got through const-checking without emitting any "primary" errors, emit any
224        // "secondary" errors if they occurred. Otherwise, cancel the "secondary" errors.
225        let secondary_errors = mem::take(&mut self.secondary_errors);
226        if self.error_emitted.is_none() {
227            for error in secondary_errors {
228                self.error_emitted = Some(error.emit());
229            }
230        } else {
231            assert!(self.tcx.dcx().has_errors().is_some());
232            for error in secondary_errors {
233                error.cancel();
234            }
235        }
236    }
237
238    fn local_is_transient(&mut self, local: Local) -> bool {
239        let ccx = self.ccx;
240        self.transient_locals
241            .get_or_insert_with(|| {
242                // A local is "transient" if it is guaranteed dead at all `Return`.
243                // So first compute the say of "maybe live" locals at each program point.
244                let always_live_locals = &always_storage_live_locals(&ccx.body);
245                let mut maybe_storage_live =
246                    MaybeStorageLive::new(Cow::Borrowed(always_live_locals))
247                        .iterate_to_fixpoint(ccx.tcx, &ccx.body, None)
248                        .into_results_cursor(&ccx.body);
249
250                // And then check all `Return` in the MIR, and if a local is "maybe live" at a
251                // `Return` then it is definitely not transient.
252                let mut transient = DenseBitSet::new_filled(ccx.body.local_decls.len());
253                // Make sure to only visit reachable blocks, the dataflow engine can ICE otherwise.
254                for (bb, data) in traversal::reachable(&ccx.body) {
255                    if matches!(data.terminator().kind, TerminatorKind::Return) {
256                        let location = ccx.body.terminator_loc(bb);
257                        maybe_storage_live.seek_after_primary_effect(location);
258                        // If a local may be live here, it is definitely not transient.
259                        transient.subtract(maybe_storage_live.get());
260                    }
261                }
262
263                transient
264            })
265            .contains(local)
266    }
267
268    pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
269        self.qualifs.in_return_place(self.ccx, self.error_emitted)
270    }
271
272    /// Emits an error if an expression cannot be evaluated in the current context.
273    pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
274        self.check_op_spanned(op, self.span);
275    }
276
277    /// Emits an error at the given `span` if an expression cannot be evaluated in the current
278    /// context.
279    pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
280        let gate = match op.status_in_item(self.ccx) {
281            Status::Unstable {
282                gate,
283                safe_to_expose_on_stable,
284                is_function_call,
285                gate_already_checked,
286            } if gate_already_checked || self.tcx.features().enabled(gate) => {
287                if gate_already_checked {
288                    assert!(
289                        !safe_to_expose_on_stable,
290                        "setting `gate_already_checked` without `safe_to_expose_on_stable` makes no sense"
291                    );
292                }
293                // Generally this is allowed since the feature gate is enabled -- except
294                // if this function wants to be safe-to-expose-on-stable.
295                if !safe_to_expose_on_stable
296                    && self.enforce_recursive_const_stability()
297                    && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate)
298                {
299                    emit_unstable_in_stable_exposed_error(self.ccx, span, gate, is_function_call);
300                }
301
302                return;
303            }
304
305            Status::Unstable { gate, .. } => Some(gate),
306            Status::Forbidden => None,
307        };
308
309        if self.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
310            self.tcx.sess.miri_unleashed_feature(span, gate);
311            return;
312        }
313
314        let err = op.build_error(self.ccx, span);
315        assert!(err.is_error());
316
317        match op.importance() {
318            ops::DiagImportance::Primary => {
319                let reported = err.emit();
320                self.error_emitted = Some(reported);
321            }
322
323            ops::DiagImportance::Secondary => {
324                self.secondary_errors.push(err);
325                self.tcx.dcx().span_delayed_bug(
326                    span,
327                    "compilation must fail when there is a secondary const checker error",
328                );
329            }
330        }
331    }
332
333    fn check_static(&mut self, def_id: DefId, span: Span) {
334        if self.tcx.is_thread_local_static(def_id) {
335            self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`");
336        }
337        if let Some(def_id) = def_id.as_local()
338            && let Err(guar) = self.tcx.ensure_ok().check_well_formed(hir::OwnerId { def_id })
339        {
340            self.error_emitted = Some(guar);
341        }
342    }
343
344    /// Returns whether this place can possibly escape the evaluation of the current const/static
345    /// initializer. The check assumes that all already existing pointers and references point to
346    /// non-escaping places.
347    fn place_may_escape(&mut self, place: &Place<'_>) -> bool {
348        let is_transient = match self.const_kind() {
349            // In a const fn all borrows are transient or point to the places given via
350            // references in the arguments (so we already checked them with
351            // TransientMutBorrow/MutBorrow as appropriate).
352            // The borrow checker guarantees that no new non-transient borrows are created.
353            // NOTE: Once we have heap allocations during CTFE we need to figure out
354            // how to prevent `const fn` to create long-lived allocations that point
355            // to mutable memory.
356            hir::ConstContext::ConstFn => true,
357            _ => {
358                // For indirect places, we are not creating a new permanent borrow, it's just as
359                // transient as the already existing one.
360                // Locals with StorageDead do not live beyond the evaluation and can
361                // thus safely be borrowed without being able to be leaked to the final
362                // value of the constant.
363                // Note: This is only sound if every local that has a `StorageDead` has a
364                // `StorageDead` in every control flow path leading to a `return` terminator.
365                // If anything slips through, there's no safety net -- safe code can create
366                // references to variants of `!Freeze` enums as long as that variant is `Freeze`, so
367                // interning can't protect us here. (There *is* a safety net for mutable references
368                // though, interning will ICE if we miss something here.)
369                place.is_indirect() || self.local_is_transient(place.local)
370            }
371        };
372        // Transient places cannot possibly escape because the place doesn't exist any more at the
373        // end of evaluation.
374        !is_transient
375    }
376
377    /// Returns whether there are const-conditions.
378    fn revalidate_conditional_constness(
379        &mut self,
380        callee: DefId,
381        callee_args: ty::GenericArgsRef<'tcx>,
382        call_span: Span,
383    ) -> Option<ConstConditionsHold> {
384        let tcx = self.tcx;
385        if !tcx.is_conditionally_const(callee) {
386            return None;
387        }
388
389        let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args);
390        if const_conditions.is_empty() {
391            return None;
392        }
393
394        let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx));
395        let ocx = ObligationCtxt::new(&infcx);
396
397        let body_id = self.body.source.def_id().expect_local();
398        let host_polarity = match self.const_kind() {
399            hir::ConstContext::ConstFn => ty::BoundConstness::Maybe,
400            hir::ConstContext::Static(_) | hir::ConstContext::Const { .. } => {
401                ty::BoundConstness::Const
402            }
403        };
404        let const_conditions =
405            ocx.normalize(&ObligationCause::misc(call_span, body_id), param_env, const_conditions);
406        ocx.register_obligations(const_conditions.into_iter().map(|(trait_ref, span)| {
407            Obligation::new(
408                tcx,
409                ObligationCause::new(
410                    call_span,
411                    body_id,
412                    ObligationCauseCode::WhereClause(callee, span),
413                ),
414                param_env,
415                trait_ref.to_host_effect_clause(tcx, host_polarity),
416            )
417        }));
418
419        let errors = ocx.select_all_or_error();
420        if errors.is_empty() {
421            Some(ConstConditionsHold::Yes)
422        } else {
423            tcx.dcx()
424                .span_delayed_bug(call_span, "this should have reported a [const] error in HIR");
425            Some(ConstConditionsHold::No)
426        }
427    }
428
429    pub fn check_drop_terminator(
430        &mut self,
431        dropped_place: Place<'tcx>,
432        location: Location,
433        terminator_span: Span,
434    ) {
435        let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
436
437        let needs_drop = if let Some(local) = dropped_place.as_local() {
438            self.qualifs.needs_drop(self.ccx, local, location)
439        } else {
440            qualifs::NeedsDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
441        };
442        // If this type doesn't need a drop at all, then there's nothing to enforce.
443        if !needs_drop {
444            return;
445        }
446
447        let mut err_span = self.span;
448        let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
449            // Use the span where the local was declared as the span of the drop error.
450            err_span = self.body.local_decls[local].source_info.span;
451            self.qualifs.needs_non_const_drop(self.ccx, local, location)
452        } else {
453            qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
454        };
455
456        self.check_op_spanned(
457            ops::LiveDrop {
458                dropped_at: terminator_span,
459                dropped_ty: ty_of_dropped_place,
460                needs_non_const_drop,
461            },
462            err_span,
463        );
464    }
465
466    /// Check the const stability of the given item (fn or trait).
467    fn check_callee_stability(&mut self, def_id: DefId) {
468        match self.tcx.lookup_const_stability(def_id) {
469            Some(attrs::ConstStability { level: attrs::StabilityLevel::Stable { .. }, .. }) => {
470                // All good.
471            }
472            None => {
473                // This doesn't need a separate const-stability check -- const-stability equals
474                // regular stability, and regular stability is checked separately.
475                // However, we *do* have to worry about *recursive* const stability.
476                if self.enforce_recursive_const_stability()
477                    && !is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id)
478                {
479                    self.dcx().emit_err(errors::UnmarkedConstItemExposed {
480                        span: self.span,
481                        def_path: self.tcx.def_path_str(def_id),
482                    });
483                }
484            }
485            Some(attrs::ConstStability {
486                level: attrs::StabilityLevel::Unstable { implied_by: implied_feature, issue, .. },
487                feature,
488                ..
489            }) => {
490                // An unstable const fn/trait with a feature gate.
491                let callee_safe_to_expose_on_stable =
492                    is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id);
493
494                // We only honor `span.allows_unstable` aka `#[allow_internal_unstable]` if
495                // the callee is safe to expose, to avoid bypassing recursive stability.
496                // This is not ideal since it means the user sees an error, not the macro
497                // author, but that's also the case if one forgets to set
498                // `#[allow_internal_unstable]` in the first place. Note that this cannot be
499                // integrated in the check below since we want to enforce
500                // `callee_safe_to_expose_on_stable` even if
501                // `!self.enforce_recursive_const_stability()`.
502                if (self.span.allows_unstable(feature)
503                    || implied_feature.is_some_and(|f| self.span.allows_unstable(f)))
504                    && callee_safe_to_expose_on_stable
505                {
506                    return;
507                }
508
509                // We can't use `check_op` to check whether the feature is enabled because
510                // the logic is a bit different than elsewhere: local functions don't need
511                // the feature gate, and there might be an "implied" gate that also suffices
512                // to allow this.
513                let feature_enabled = def_id.is_local()
514                    || self.tcx.features().enabled(feature)
515                    || implied_feature.is_some_and(|f| self.tcx.features().enabled(f))
516                    || {
517                        // When we're compiling the compiler itself we may pull in
518                        // crates from crates.io, but those crates may depend on other
519                        // crates also pulled in from crates.io. We want to ideally be
520                        // able to compile everything without requiring upstream
521                        // modifications, so in the case that this looks like a
522                        // `rustc_private` crate (e.g., a compiler crate) and we also have
523                        // the `-Z force-unstable-if-unmarked` flag present (we're
524                        // compiling a compiler crate), then let this missing feature
525                        // annotation slide.
526                        // This matches what we do in `eval_stability_allow_unstable` for
527                        // regular stability.
528                        feature == sym::rustc_private
529                            && issue == NonZero::new(27812)
530                            && self.tcx.sess.opts.unstable_opts.force_unstable_if_unmarked
531                    };
532                // Even if the feature is enabled, we still need check_op to double-check
533                // this if the callee is not safe to expose on stable.
534                if !feature_enabled || !callee_safe_to_expose_on_stable {
535                    self.check_op(ops::CallUnstable {
536                        def_id,
537                        feature,
538                        feature_enabled,
539                        safe_to_expose_on_stable: callee_safe_to_expose_on_stable,
540                        is_function_call: self.tcx.def_kind(def_id) != DefKind::Trait,
541                    });
542                }
543            }
544        }
545    }
546}
547
548impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
549    fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
550        trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
551
552        // We don't const-check basic blocks on the cleanup path since we never unwind during
553        // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
554        // are unreachable during const-eval.
555        //
556        // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
557        // locals that would never be dropped during normal execution are sometimes dropped during
558        // unwinding, which means backwards-incompatible live-drop errors.
559        if block.is_cleanup {
560            return;
561        }
562
563        self.super_basic_block_data(bb, block);
564    }
565
566    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
567        trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
568
569        self.super_rvalue(rvalue, location);
570
571        match rvalue {
572            Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
573
574            Rvalue::Use(_)
575            | Rvalue::CopyForDeref(..)
576            | Rvalue::Repeat(..)
577            | Rvalue::Discriminant(..)
578            | Rvalue::Len(_) => {}
579
580            Rvalue::Aggregate(kind, ..) => {
581                if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
582                    && let Some(coroutine_kind) = self.tcx.coroutine_kind(def_id)
583                {
584                    self.check_op(ops::Coroutine(coroutine_kind));
585                }
586            }
587
588            Rvalue::Ref(_, BorrowKind::Mut { .. }, place)
589            | Rvalue::RawPtr(RawPtrKind::Mut, place) => {
590                // Inside mutable statics, we allow arbitrary mutable references.
591                // We've allowed `static mut FOO = &mut [elements];` for a long time (the exact
592                // reasons why are lost to history), and there is no reason to restrict that to
593                // arrays and slices.
594                let is_allowed =
595                    self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut);
596
597                if !is_allowed && self.place_may_escape(place) {
598                    self.check_op(ops::EscapingMutBorrow);
599                }
600            }
601
602            Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake(_), place)
603            | Rvalue::RawPtr(RawPtrKind::Const, place) => {
604                let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
605                    self.ccx,
606                    &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
607                    place.as_ref(),
608                );
609
610                if borrowed_place_has_mut_interior && self.place_may_escape(place) {
611                    self.check_op(ops::EscapingCellBorrow);
612                }
613            }
614
615            Rvalue::RawPtr(RawPtrKind::FakeForPtrMetadata, place) => {
616                // These are only inserted for slice length, so the place must already be indirect.
617                // This implies we do not have to worry about whether the borrow escapes.
618                if !place.is_indirect() {
619                    self.tcx.dcx().span_delayed_bug(
620                        self.body.source_info(location).span,
621                        "fake borrows are always indirect",
622                    );
623                }
624            }
625
626            Rvalue::Cast(
627                CastKind::PointerCoercion(
628                    PointerCoercion::MutToConstPointer
629                    | PointerCoercion::ArrayToPointer
630                    | PointerCoercion::UnsafeFnPointer
631                    | PointerCoercion::ClosureFnPointer(_)
632                    | PointerCoercion::ReifyFnPointer,
633                    _,
634                ),
635                _,
636                _,
637            ) => {
638                // These are all okay; they only change the type, not the data.
639            }
640
641            Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => {
642                self.check_op(ops::RawPtrToIntCast);
643            }
644            Rvalue::Cast(CastKind::PointerWithExposedProvenance, _, _) => {
645                // Since no pointer can ever get exposed (rejected above), this is easy to support.
646            }
647
648            Rvalue::Cast(_, _, _) => {}
649
650            Rvalue::NullaryOp(
651                NullOp::SizeOf
652                | NullOp::AlignOf
653                | NullOp::OffsetOf(_)
654                | NullOp::UbChecks
655                | NullOp::ContractChecks,
656                _,
657            ) => {}
658            Rvalue::ShallowInitBox(_, _) => {}
659
660            Rvalue::UnaryOp(op, operand) => {
661                let ty = operand.ty(self.body, self.tcx);
662                match op {
663                    UnOp::Not | UnOp::Neg => {
664                        if is_int_bool_float_or_char(ty) {
665                            // Int, bool, float, and char operations are fine.
666                        } else {
667                            span_bug!(
668                                self.span,
669                                "non-primitive type in `Rvalue::UnaryOp{op:?}`: {ty:?}",
670                            );
671                        }
672                    }
673                    UnOp::PtrMetadata => {
674                        // Getting the metadata from a pointer is always const.
675                        // We already validated the type is valid in the validator.
676                    }
677                }
678            }
679
680            Rvalue::BinaryOp(op, box (lhs, rhs)) => {
681                let lhs_ty = lhs.ty(self.body, self.tcx);
682                let rhs_ty = rhs.ty(self.body, self.tcx);
683
684                if is_int_bool_float_or_char(lhs_ty) && is_int_bool_float_or_char(rhs_ty) {
685                    // Int, bool, float, and char operations are fine.
686                } else if lhs_ty.is_fn_ptr() || lhs_ty.is_raw_ptr() {
687                    assert_matches!(
688                        op,
689                        BinOp::Eq
690                            | BinOp::Ne
691                            | BinOp::Le
692                            | BinOp::Lt
693                            | BinOp::Ge
694                            | BinOp::Gt
695                            | BinOp::Offset
696                    );
697
698                    self.check_op(ops::RawPtrComparison);
699                } else {
700                    span_bug!(
701                        self.span,
702                        "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
703                        lhs_ty,
704                        rhs_ty
705                    );
706                }
707            }
708
709            Rvalue::WrapUnsafeBinder(..) => {
710                // Unsafe binders are always trivial to create.
711            }
712        }
713    }
714
715    fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
716        self.super_operand(op, location);
717        if let Operand::Constant(c) = op {
718            if let Some(def_id) = c.check_static_ptr(self.tcx) {
719                self.check_static(def_id, self.span);
720            }
721        }
722    }
723
724    fn visit_source_info(&mut self, source_info: &SourceInfo) {
725        trace!("visit_source_info: source_info={:?}", source_info);
726        self.span = source_info.span;
727    }
728
729    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
730        trace!("visit_statement: statement={:?} location={:?}", statement, location);
731
732        self.super_statement(statement, location);
733
734        match statement.kind {
735            StatementKind::Assign(..)
736            | StatementKind::SetDiscriminant { .. }
737            | StatementKind::Deinit(..)
738            | StatementKind::FakeRead(..)
739            | StatementKind::StorageLive(_)
740            | StatementKind::StorageDead(_)
741            | StatementKind::Retag { .. }
742            | StatementKind::PlaceMention(..)
743            | StatementKind::AscribeUserType(..)
744            | StatementKind::Coverage(..)
745            | StatementKind::Intrinsic(..)
746            | StatementKind::ConstEvalCounter
747            | StatementKind::BackwardIncompatibleDropHint { .. }
748            | StatementKind::Nop => {}
749        }
750    }
751
752    #[instrument(level = "debug", skip(self))]
753    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
754        self.super_terminator(terminator, location);
755
756        match &terminator.kind {
757            TerminatorKind::Call { func, args, fn_span, .. }
758            | TerminatorKind::TailCall { func, args, fn_span, .. } => {
759                let call_source = match terminator.kind {
760                    TerminatorKind::Call { call_source, .. } => call_source,
761                    TerminatorKind::TailCall { .. } => CallSource::Normal,
762                    _ => unreachable!(),
763                };
764
765                let ConstCx { tcx, body, .. } = *self.ccx;
766
767                let fn_ty = func.ty(body, tcx);
768
769                let (callee, fn_args) = match *fn_ty.kind() {
770                    ty::FnDef(def_id, fn_args) => (def_id, fn_args),
771
772                    ty::FnPtr(..) => {
773                        self.check_op(ops::FnCallIndirect);
774                        // We can get here without an error in miri-unleashed mode... might as well
775                        // skip the rest of the checks as well then.
776                        return;
777                    }
778                    _ => {
779                        span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
780                    }
781                };
782
783                let has_const_conditions =
784                    self.revalidate_conditional_constness(callee, fn_args, *fn_span);
785
786                // Attempting to call a trait method?
787                if let Some(trait_did) = tcx.trait_of_item(callee) {
788                    // We can't determine the actual callee here, so we have to do different checks
789                    // than usual.
790
791                    trace!("attempting to call a trait method");
792                    let trait_is_const = tcx.is_const_trait(trait_did);
793
794                    // Only consider a trait to be const if the const conditions hold.
795                    // Otherwise, it's really misleading to call something "conditionally"
796                    // const when it's very obviously not conditionally const.
797                    if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
798                        // Trait calls are always conditionally-const.
799                        self.check_op(ops::ConditionallyConstCall {
800                            callee,
801                            args: fn_args,
802                            span: *fn_span,
803                            call_source,
804                        });
805                        self.check_callee_stability(trait_did);
806                    } else {
807                        // Not even a const trait.
808                        self.check_op(ops::FnCallNonConst {
809                            callee,
810                            args: fn_args,
811                            span: *fn_span,
812                            call_source,
813                        });
814                    }
815                    // That's all we can check here.
816                    return;
817                }
818
819                // Even if we know the callee, ensure we can use conditionally-const calls.
820                if has_const_conditions.is_some() {
821                    self.check_op(ops::ConditionallyConstCall {
822                        callee,
823                        args: fn_args,
824                        span: *fn_span,
825                        call_source,
826                    });
827                }
828
829                // At this point, we are calling a function, `callee`, whose `DefId` is known...
830
831                // `begin_panic` and `#[rustc_const_panic_str]` functions accept generic
832                // types other than str. Check to enforce that only str can be used in
833                // const-eval.
834
835                // const-eval of the `begin_panic` fn assumes the argument is `&str`
836                if tcx.is_lang_item(callee, LangItem::BeginPanic) {
837                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
838                        ty::Ref(_, ty, _) if ty.is_str() => {}
839                        _ => self.check_op(ops::PanicNonStr),
840                    }
841                    // Allow this call, skip all the checks below.
842                    return;
843                }
844
845                // const-eval of `#[rustc_const_panic_str]` functions assumes the argument is `&&str`
846                if tcx.has_attr(callee, sym::rustc_const_panic_str) {
847                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
848                        ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
849                            {}
850                        _ => {
851                            self.check_op(ops::PanicNonStr);
852                        }
853                    }
854                    // Allow this call, skip all the checks below.
855                    return;
856                }
857
858                // This can be called on stable via the `vec!` macro.
859                if tcx.is_lang_item(callee, LangItem::ExchangeMalloc) {
860                    self.check_op(ops::HeapAllocation);
861                    // Allow this call, skip all the checks below.
862                    return;
863                }
864
865                // Intrinsics are language primitives, not regular calls, so treat them separately.
866                if let Some(intrinsic) = tcx.intrinsic(callee) {
867                    if !tcx.is_const_fn(callee) {
868                        // Non-const intrinsic.
869                        self.check_op(ops::IntrinsicNonConst { name: intrinsic.name });
870                        // If we allowed this, we're in miri-unleashed mode, so we might
871                        // as well skip the remaining checks.
872                        return;
873                    }
874                    // We use `intrinsic.const_stable` to determine if this can be safely exposed to
875                    // stable code, rather than `const_stable_indirect`. This is to make
876                    // `#[rustc_const_stable_indirect]` an attribute that is always safe to add.
877                    // We also ask is_safe_to_expose_on_stable_const_fn; this determines whether the intrinsic
878                    // fallback body is safe to expose on stable.
879                    let is_const_stable = intrinsic.const_stable
880                        || (!intrinsic.must_be_overridden
881                            && is_fn_or_trait_safe_to_expose_on_stable(tcx, callee));
882                    match tcx.lookup_const_stability(callee) {
883                        None => {
884                            // This doesn't need a separate const-stability check -- const-stability equals
885                            // regular stability, and regular stability is checked separately.
886                            // However, we *do* have to worry about *recursive* const stability.
887                            if !is_const_stable && self.enforce_recursive_const_stability() {
888                                self.dcx().emit_err(errors::UnmarkedIntrinsicExposed {
889                                    span: self.span,
890                                    def_path: self.tcx.def_path_str(callee),
891                                });
892                            }
893                        }
894                        Some(attrs::ConstStability {
895                            level: attrs::StabilityLevel::Unstable { .. },
896                            feature,
897                            ..
898                        }) => {
899                            self.check_op(ops::IntrinsicUnstable {
900                                name: intrinsic.name,
901                                feature,
902                                const_stable_indirect: is_const_stable,
903                            });
904                        }
905                        Some(attrs::ConstStability {
906                            level: attrs::StabilityLevel::Stable { .. },
907                            ..
908                        }) => {
909                            // All good. Note that a `#[rustc_const_stable]` intrinsic (meaning it
910                            // can be *directly* invoked from stable const code) does not always
911                            // have the `#[rustc_intrinsic_const_stable_indirect]` attribute (which controls
912                            // exposing an intrinsic indirectly); we accept this call anyway.
913                        }
914                    }
915                    // This completes the checks for intrinsics.
916                    return;
917                }
918
919                if !tcx.is_const_fn(callee) {
920                    self.check_op(ops::FnCallNonConst {
921                        callee,
922                        args: fn_args,
923                        span: *fn_span,
924                        call_source,
925                    });
926                    // If we allowed this, we're in miri-unleashed mode, so we might
927                    // as well skip the remaining checks.
928                    return;
929                }
930
931                // Finally, stability for regular function calls -- this is the big one.
932                self.check_callee_stability(callee);
933            }
934
935            // Forbid all `Drop` terminators unless the place being dropped is a local with no
936            // projections that cannot be `NeedsNonConstDrop`.
937            TerminatorKind::Drop { place: dropped_place, .. } => {
938                // If we are checking live drops after drop-elaboration, don't emit duplicate
939                // errors here.
940                if super::post_drop_elaboration::checking_enabled(self.ccx) {
941                    return;
942                }
943
944                self.check_drop_terminator(*dropped_place, location, terminator.source_info.span);
945            }
946
947            TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
948
949            TerminatorKind::Yield { .. } => {
950                self.check_op(ops::Coroutine(
951                    self.tcx
952                        .coroutine_kind(self.body.source.def_id())
953                        .expect("Only expected to have a yield in a coroutine"),
954                ));
955            }
956
957            TerminatorKind::CoroutineDrop => {
958                span_bug!(
959                    self.body.source_info(location).span,
960                    "We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"
961                );
962            }
963
964            TerminatorKind::UnwindTerminate(_) => {
965                // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
966                span_bug!(self.span, "`Terminate` terminator outside of cleanup block")
967            }
968
969            TerminatorKind::Assert { .. }
970            | TerminatorKind::FalseEdge { .. }
971            | TerminatorKind::FalseUnwind { .. }
972            | TerminatorKind::Goto { .. }
973            | TerminatorKind::UnwindResume
974            | TerminatorKind::Return
975            | TerminatorKind::SwitchInt { .. }
976            | TerminatorKind::Unreachable => {}
977        }
978    }
979}
980
981fn is_int_bool_float_or_char(ty: Ty<'_>) -> bool {
982    ty.is_bool() || ty.is_integral() || ty.is_char() || ty.is_floating_point()
983}
984
985fn emit_unstable_in_stable_exposed_error(
986    ccx: &ConstCx<'_, '_>,
987    span: Span,
988    gate: Symbol,
989    is_function_call: bool,
990) -> ErrorGuaranteed {
991    let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
992
993    ccx.dcx().emit_err(errors::UnstableInStableExposed {
994        gate: gate.to_string(),
995        span,
996        attr_span,
997        is_function_call,
998        is_function_call2: is_function_call,
999    })
1000}