1use std::assert_matches::assert_matches;
4use std::borrow::Cow;
5use std::mem;
6use std::num::NonZero;
7use std::ops::Deref;
8
9use rustc_attr_data_structures as attrs;
10use rustc_errors::{Diag, ErrorGuaranteed};
11use rustc_hir::def::DefKind;
12use rustc_hir::def_id::DefId;
13use rustc_hir::{self as hir, LangItem};
14use rustc_index::bit_set::DenseBitSet;
15use rustc_infer::infer::TyCtxtInferExt;
16use rustc_middle::mir::visit::Visitor;
17use rustc_middle::mir::*;
18use rustc_middle::span_bug;
19use rustc_middle::ty::adjustment::PointerCoercion;
20use rustc_middle::ty::{self, Ty, TypeVisitableExt};
21use rustc_mir_dataflow::Analysis;
22use rustc_mir_dataflow::impls::{MaybeStorageLive, always_storage_live_locals};
23use rustc_span::{Span, Symbol, sym};
24use rustc_trait_selection::traits::{
25 Obligation, ObligationCause, ObligationCauseCode, ObligationCtxt,
26};
27use tracing::{instrument, trace};
28
29use super::ops::{self, NonConstOp, Status};
30use super::qualifs::{self, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
31use super::resolver::FlowSensitiveAnalysis;
32use super::{ConstCx, Qualif};
33use crate::check_consts::is_fn_or_trait_safe_to_expose_on_stable;
34use crate::errors;
35
36type QualifResults<'mir, 'tcx, Q> =
37 rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'tcx, Q>>;
38
39#[derive(Copy, Clone, PartialEq, Eq, Debug)]
40enum ConstConditionsHold {
41 Yes,
42 No,
43}
44
45#[derive(Default)]
46pub(crate) struct Qualifs<'mir, 'tcx> {
47 has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
48 needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
49 needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
50}
51
52impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
53 pub(crate) fn needs_drop(
57 &mut self,
58 ccx: &'mir ConstCx<'mir, 'tcx>,
59 local: Local,
60 location: Location,
61 ) -> bool {
62 let ty = ccx.body.local_decls[local].ty;
63 if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
67 return false;
68 }
69
70 let needs_drop = self.needs_drop.get_or_insert_with(|| {
71 let ConstCx { tcx, body, .. } = *ccx;
72
73 FlowSensitiveAnalysis::new(NeedsDrop, ccx)
74 .iterate_to_fixpoint(tcx, body, None)
75 .into_results_cursor(body)
76 });
77
78 needs_drop.seek_before_primary_effect(location);
79 needs_drop.get().contains(local)
80 }
81
82 pub(crate) fn needs_non_const_drop(
86 &mut self,
87 ccx: &'mir ConstCx<'mir, 'tcx>,
88 local: Local,
89 location: Location,
90 ) -> bool {
91 let ty = ccx.body.local_decls[local].ty;
92 if !ty.has_opaque_types() && !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
96 return false;
97 }
98
99 let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
100 let ConstCx { tcx, body, .. } = *ccx;
101
102 FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
103 .iterate_to_fixpoint(tcx, body, None)
104 .into_results_cursor(body)
105 });
106
107 needs_non_const_drop.seek_before_primary_effect(location);
108 needs_non_const_drop.get().contains(local)
109 }
110
111 fn has_mut_interior(
115 &mut self,
116 ccx: &'mir ConstCx<'mir, 'tcx>,
117 local: Local,
118 location: Location,
119 ) -> bool {
120 let ty = ccx.body.local_decls[local].ty;
121 if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
125 return false;
126 }
127
128 let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
129 let ConstCx { tcx, body, .. } = *ccx;
130
131 FlowSensitiveAnalysis::new(HasMutInterior, ccx)
132 .iterate_to_fixpoint(tcx, body, None)
133 .into_results_cursor(body)
134 });
135
136 has_mut_interior.seek_before_primary_effect(location);
137 has_mut_interior.get().contains(local)
138 }
139
140 fn in_return_place(
141 &mut self,
142 ccx: &'mir ConstCx<'mir, 'tcx>,
143 tainted_by_errors: Option<ErrorGuaranteed>,
144 ) -> ConstQualifs {
145 let return_block = ccx
152 .body
153 .basic_blocks
154 .iter_enumerated()
155 .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
156 .map(|(bb, _)| bb);
157
158 let Some(return_block) = return_block else {
159 return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
160 };
161
162 let return_loc = ccx.body.terminator_loc(return_block);
163
164 ConstQualifs {
165 needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
166 needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
167 has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
168 tainted_by_errors,
169 }
170 }
171}
172
173pub struct Checker<'mir, 'tcx> {
174 ccx: &'mir ConstCx<'mir, 'tcx>,
175 qualifs: Qualifs<'mir, 'tcx>,
176
177 span: Span,
179
180 transient_locals: Option<DenseBitSet<Local>>,
183
184 error_emitted: Option<ErrorGuaranteed>,
185 secondary_errors: Vec<Diag<'tcx>>,
186}
187
188impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
189 type Target = ConstCx<'mir, 'tcx>;
190
191 fn deref(&self) -> &Self::Target {
192 self.ccx
193 }
194}
195
196impl<'mir, 'tcx> Checker<'mir, 'tcx> {
197 pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
198 Checker {
199 span: ccx.body.span,
200 ccx,
201 qualifs: Default::default(),
202 transient_locals: None,
203 error_emitted: None,
204 secondary_errors: Vec::new(),
205 }
206 }
207
208 pub fn check_body(&mut self) {
209 let ConstCx { tcx, body, .. } = *self.ccx;
210 let def_id = self.ccx.def_id();
211
212 if self.ccx.is_async() || body.coroutine.is_some() {
215 tcx.dcx().span_delayed_bug(body.span, "`async` functions cannot be `const fn`");
216 return;
217 }
218
219 if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) {
220 self.visit_body(body);
221 }
222
223 let secondary_errors = mem::take(&mut self.secondary_errors);
226 if self.error_emitted.is_none() {
227 for error in secondary_errors {
228 self.error_emitted = Some(error.emit());
229 }
230 } else {
231 assert!(self.tcx.dcx().has_errors().is_some());
232 for error in secondary_errors {
233 error.cancel();
234 }
235 }
236 }
237
238 fn local_is_transient(&mut self, local: Local) -> bool {
239 let ccx = self.ccx;
240 self.transient_locals
241 .get_or_insert_with(|| {
242 let always_live_locals = &always_storage_live_locals(&ccx.body);
245 let mut maybe_storage_live =
246 MaybeStorageLive::new(Cow::Borrowed(always_live_locals))
247 .iterate_to_fixpoint(ccx.tcx, &ccx.body, None)
248 .into_results_cursor(&ccx.body);
249
250 let mut transient = DenseBitSet::new_filled(ccx.body.local_decls.len());
253 for (bb, data) in traversal::reachable(&ccx.body) {
255 if matches!(data.terminator().kind, TerminatorKind::Return) {
256 let location = ccx.body.terminator_loc(bb);
257 maybe_storage_live.seek_after_primary_effect(location);
258 transient.subtract(maybe_storage_live.get());
260 }
261 }
262
263 transient
264 })
265 .contains(local)
266 }
267
268 pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
269 self.qualifs.in_return_place(self.ccx, self.error_emitted)
270 }
271
272 pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
274 self.check_op_spanned(op, self.span);
275 }
276
277 pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
280 let gate = match op.status_in_item(self.ccx) {
281 Status::Unstable {
282 gate,
283 safe_to_expose_on_stable,
284 is_function_call,
285 gate_already_checked,
286 } if gate_already_checked || self.tcx.features().enabled(gate) => {
287 if gate_already_checked {
288 assert!(
289 !safe_to_expose_on_stable,
290 "setting `gate_already_checked` without `safe_to_expose_on_stable` makes no sense"
291 );
292 }
293 if !safe_to_expose_on_stable
296 && self.enforce_recursive_const_stability()
297 && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate)
298 {
299 emit_unstable_in_stable_exposed_error(self.ccx, span, gate, is_function_call);
300 }
301
302 return;
303 }
304
305 Status::Unstable { gate, .. } => Some(gate),
306 Status::Forbidden => None,
307 };
308
309 if self.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
310 self.tcx.sess.miri_unleashed_feature(span, gate);
311 return;
312 }
313
314 let err = op.build_error(self.ccx, span);
315 assert!(err.is_error());
316
317 match op.importance() {
318 ops::DiagImportance::Primary => {
319 let reported = err.emit();
320 self.error_emitted = Some(reported);
321 }
322
323 ops::DiagImportance::Secondary => {
324 self.secondary_errors.push(err);
325 self.tcx.dcx().span_delayed_bug(
326 span,
327 "compilation must fail when there is a secondary const checker error",
328 );
329 }
330 }
331 }
332
333 fn check_static(&mut self, def_id: DefId, span: Span) {
334 if self.tcx.is_thread_local_static(def_id) {
335 self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`");
336 }
337 if let Some(def_id) = def_id.as_local()
338 && let Err(guar) = self.tcx.ensure_ok().check_well_formed(hir::OwnerId { def_id })
339 {
340 self.error_emitted = Some(guar);
341 }
342 }
343
344 fn place_may_escape(&mut self, place: &Place<'_>) -> bool {
348 let is_transient = match self.const_kind() {
349 hir::ConstContext::ConstFn => true,
357 _ => {
358 place.is_indirect() || self.local_is_transient(place.local)
370 }
371 };
372 !is_transient
375 }
376
377 fn revalidate_conditional_constness(
379 &mut self,
380 callee: DefId,
381 callee_args: ty::GenericArgsRef<'tcx>,
382 call_span: Span,
383 ) -> Option<ConstConditionsHold> {
384 let tcx = self.tcx;
385 if !tcx.is_conditionally_const(callee) {
386 return None;
387 }
388
389 let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args);
390 if const_conditions.is_empty() {
391 return None;
392 }
393
394 let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx));
395 let ocx = ObligationCtxt::new(&infcx);
396
397 let body_id = self.body.source.def_id().expect_local();
398 let host_polarity = match self.const_kind() {
399 hir::ConstContext::ConstFn => ty::BoundConstness::Maybe,
400 hir::ConstContext::Static(_) | hir::ConstContext::Const { .. } => {
401 ty::BoundConstness::Const
402 }
403 };
404 let const_conditions =
405 ocx.normalize(&ObligationCause::misc(call_span, body_id), param_env, const_conditions);
406 ocx.register_obligations(const_conditions.into_iter().map(|(trait_ref, span)| {
407 Obligation::new(
408 tcx,
409 ObligationCause::new(
410 call_span,
411 body_id,
412 ObligationCauseCode::WhereClause(callee, span),
413 ),
414 param_env,
415 trait_ref.to_host_effect_clause(tcx, host_polarity),
416 )
417 }));
418
419 let errors = ocx.select_all_or_error();
420 if errors.is_empty() {
421 Some(ConstConditionsHold::Yes)
422 } else {
423 tcx.dcx()
424 .span_delayed_bug(call_span, "this should have reported a [const] error in HIR");
425 Some(ConstConditionsHold::No)
426 }
427 }
428
429 pub fn check_drop_terminator(
430 &mut self,
431 dropped_place: Place<'tcx>,
432 location: Location,
433 terminator_span: Span,
434 ) {
435 let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
436
437 let needs_drop = if let Some(local) = dropped_place.as_local() {
438 self.qualifs.needs_drop(self.ccx, local, location)
439 } else {
440 qualifs::NeedsDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
441 };
442 if !needs_drop {
444 return;
445 }
446
447 let mut err_span = self.span;
448 let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
449 err_span = self.body.local_decls[local].source_info.span;
451 self.qualifs.needs_non_const_drop(self.ccx, local, location)
452 } else {
453 qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
454 };
455
456 self.check_op_spanned(
457 ops::LiveDrop {
458 dropped_at: terminator_span,
459 dropped_ty: ty_of_dropped_place,
460 needs_non_const_drop,
461 },
462 err_span,
463 );
464 }
465
466 fn check_callee_stability(&mut self, def_id: DefId) {
468 match self.tcx.lookup_const_stability(def_id) {
469 Some(attrs::ConstStability { level: attrs::StabilityLevel::Stable { .. }, .. }) => {
470 }
472 None => {
473 if self.enforce_recursive_const_stability()
477 && !is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id)
478 {
479 self.dcx().emit_err(errors::UnmarkedConstItemExposed {
480 span: self.span,
481 def_path: self.tcx.def_path_str(def_id),
482 });
483 }
484 }
485 Some(attrs::ConstStability {
486 level: attrs::StabilityLevel::Unstable { implied_by: implied_feature, issue, .. },
487 feature,
488 ..
489 }) => {
490 let callee_safe_to_expose_on_stable =
492 is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id);
493
494 if (self.span.allows_unstable(feature)
503 || implied_feature.is_some_and(|f| self.span.allows_unstable(f)))
504 && callee_safe_to_expose_on_stable
505 {
506 return;
507 }
508
509 let feature_enabled = def_id.is_local()
514 || self.tcx.features().enabled(feature)
515 || implied_feature.is_some_and(|f| self.tcx.features().enabled(f))
516 || {
517 feature == sym::rustc_private
529 && issue == NonZero::new(27812)
530 && self.tcx.sess.opts.unstable_opts.force_unstable_if_unmarked
531 };
532 if !feature_enabled || !callee_safe_to_expose_on_stable {
535 self.check_op(ops::CallUnstable {
536 def_id,
537 feature,
538 feature_enabled,
539 safe_to_expose_on_stable: callee_safe_to_expose_on_stable,
540 is_function_call: self.tcx.def_kind(def_id) != DefKind::Trait,
541 });
542 }
543 }
544 }
545 }
546}
547
548impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
549 fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
550 trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
551
552 if block.is_cleanup {
560 return;
561 }
562
563 self.super_basic_block_data(bb, block);
564 }
565
566 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
567 trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
568
569 self.super_rvalue(rvalue, location);
570
571 match rvalue {
572 Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
573
574 Rvalue::Use(_)
575 | Rvalue::CopyForDeref(..)
576 | Rvalue::Repeat(..)
577 | Rvalue::Discriminant(..)
578 | Rvalue::Len(_) => {}
579
580 Rvalue::Aggregate(kind, ..) => {
581 if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
582 && let Some(coroutine_kind) = self.tcx.coroutine_kind(def_id)
583 {
584 self.check_op(ops::Coroutine(coroutine_kind));
585 }
586 }
587
588 Rvalue::Ref(_, BorrowKind::Mut { .. }, place)
589 | Rvalue::RawPtr(RawPtrKind::Mut, place) => {
590 let is_allowed =
595 self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut);
596
597 if !is_allowed && self.place_may_escape(place) {
598 self.check_op(ops::EscapingMutBorrow);
599 }
600 }
601
602 Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake(_), place)
603 | Rvalue::RawPtr(RawPtrKind::Const, place) => {
604 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
605 self.ccx,
606 &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
607 place.as_ref(),
608 );
609
610 if borrowed_place_has_mut_interior && self.place_may_escape(place) {
611 self.check_op(ops::EscapingCellBorrow);
612 }
613 }
614
615 Rvalue::RawPtr(RawPtrKind::FakeForPtrMetadata, place) => {
616 if !place.is_indirect() {
619 self.tcx.dcx().span_delayed_bug(
620 self.body.source_info(location).span,
621 "fake borrows are always indirect",
622 );
623 }
624 }
625
626 Rvalue::Cast(
627 CastKind::PointerCoercion(
628 PointerCoercion::MutToConstPointer
629 | PointerCoercion::ArrayToPointer
630 | PointerCoercion::UnsafeFnPointer
631 | PointerCoercion::ClosureFnPointer(_)
632 | PointerCoercion::ReifyFnPointer,
633 _,
634 ),
635 _,
636 _,
637 ) => {
638 }
640
641 Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => {
642 self.check_op(ops::RawPtrToIntCast);
643 }
644 Rvalue::Cast(CastKind::PointerWithExposedProvenance, _, _) => {
645 }
647
648 Rvalue::Cast(_, _, _) => {}
649
650 Rvalue::NullaryOp(
651 NullOp::SizeOf
652 | NullOp::AlignOf
653 | NullOp::OffsetOf(_)
654 | NullOp::UbChecks
655 | NullOp::ContractChecks,
656 _,
657 ) => {}
658 Rvalue::ShallowInitBox(_, _) => {}
659
660 Rvalue::UnaryOp(op, operand) => {
661 let ty = operand.ty(self.body, self.tcx);
662 match op {
663 UnOp::Not | UnOp::Neg => {
664 if is_int_bool_float_or_char(ty) {
665 } else {
667 span_bug!(
668 self.span,
669 "non-primitive type in `Rvalue::UnaryOp{op:?}`: {ty:?}",
670 );
671 }
672 }
673 UnOp::PtrMetadata => {
674 }
677 }
678 }
679
680 Rvalue::BinaryOp(op, box (lhs, rhs)) => {
681 let lhs_ty = lhs.ty(self.body, self.tcx);
682 let rhs_ty = rhs.ty(self.body, self.tcx);
683
684 if is_int_bool_float_or_char(lhs_ty) && is_int_bool_float_or_char(rhs_ty) {
685 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_raw_ptr() {
687 assert_matches!(
688 op,
689 BinOp::Eq
690 | BinOp::Ne
691 | BinOp::Le
692 | BinOp::Lt
693 | BinOp::Ge
694 | BinOp::Gt
695 | BinOp::Offset
696 );
697
698 self.check_op(ops::RawPtrComparison);
699 } else {
700 span_bug!(
701 self.span,
702 "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
703 lhs_ty,
704 rhs_ty
705 );
706 }
707 }
708
709 Rvalue::WrapUnsafeBinder(..) => {
710 }
712 }
713 }
714
715 fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
716 self.super_operand(op, location);
717 if let Operand::Constant(c) = op {
718 if let Some(def_id) = c.check_static_ptr(self.tcx) {
719 self.check_static(def_id, self.span);
720 }
721 }
722 }
723
724 fn visit_source_info(&mut self, source_info: &SourceInfo) {
725 trace!("visit_source_info: source_info={:?}", source_info);
726 self.span = source_info.span;
727 }
728
729 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
730 trace!("visit_statement: statement={:?} location={:?}", statement, location);
731
732 self.super_statement(statement, location);
733
734 match statement.kind {
735 StatementKind::Assign(..)
736 | StatementKind::SetDiscriminant { .. }
737 | StatementKind::Deinit(..)
738 | StatementKind::FakeRead(..)
739 | StatementKind::StorageLive(_)
740 | StatementKind::StorageDead(_)
741 | StatementKind::Retag { .. }
742 | StatementKind::PlaceMention(..)
743 | StatementKind::AscribeUserType(..)
744 | StatementKind::Coverage(..)
745 | StatementKind::Intrinsic(..)
746 | StatementKind::ConstEvalCounter
747 | StatementKind::BackwardIncompatibleDropHint { .. }
748 | StatementKind::Nop => {}
749 }
750 }
751
752 #[instrument(level = "debug", skip(self))]
753 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
754 self.super_terminator(terminator, location);
755
756 match &terminator.kind {
757 TerminatorKind::Call { func, args, fn_span, .. }
758 | TerminatorKind::TailCall { func, args, fn_span, .. } => {
759 let call_source = match terminator.kind {
760 TerminatorKind::Call { call_source, .. } => call_source,
761 TerminatorKind::TailCall { .. } => CallSource::Normal,
762 _ => unreachable!(),
763 };
764
765 let ConstCx { tcx, body, .. } = *self.ccx;
766
767 let fn_ty = func.ty(body, tcx);
768
769 let (callee, fn_args) = match *fn_ty.kind() {
770 ty::FnDef(def_id, fn_args) => (def_id, fn_args),
771
772 ty::FnPtr(..) => {
773 self.check_op(ops::FnCallIndirect);
774 return;
777 }
778 _ => {
779 span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
780 }
781 };
782
783 let has_const_conditions =
784 self.revalidate_conditional_constness(callee, fn_args, *fn_span);
785
786 if let Some(trait_did) = tcx.trait_of_item(callee) {
788 trace!("attempting to call a trait method");
792 let trait_is_const = tcx.is_const_trait(trait_did);
793
794 if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
798 self.check_op(ops::ConditionallyConstCall {
800 callee,
801 args: fn_args,
802 span: *fn_span,
803 call_source,
804 });
805 self.check_callee_stability(trait_did);
806 } else {
807 self.check_op(ops::FnCallNonConst {
809 callee,
810 args: fn_args,
811 span: *fn_span,
812 call_source,
813 });
814 }
815 return;
817 }
818
819 if has_const_conditions.is_some() {
821 self.check_op(ops::ConditionallyConstCall {
822 callee,
823 args: fn_args,
824 span: *fn_span,
825 call_source,
826 });
827 }
828
829 if tcx.is_lang_item(callee, LangItem::BeginPanic) {
837 match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
838 ty::Ref(_, ty, _) if ty.is_str() => {}
839 _ => self.check_op(ops::PanicNonStr),
840 }
841 return;
843 }
844
845 if tcx.has_attr(callee, sym::rustc_const_panic_str) {
847 match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
848 ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
849 {}
850 _ => {
851 self.check_op(ops::PanicNonStr);
852 }
853 }
854 return;
856 }
857
858 if tcx.is_lang_item(callee, LangItem::ExchangeMalloc) {
860 self.check_op(ops::HeapAllocation);
861 return;
863 }
864
865 if let Some(intrinsic) = tcx.intrinsic(callee) {
867 if !tcx.is_const_fn(callee) {
868 self.check_op(ops::IntrinsicNonConst { name: intrinsic.name });
870 return;
873 }
874 let is_const_stable = intrinsic.const_stable
880 || (!intrinsic.must_be_overridden
881 && is_fn_or_trait_safe_to_expose_on_stable(tcx, callee));
882 match tcx.lookup_const_stability(callee) {
883 None => {
884 if !is_const_stable && self.enforce_recursive_const_stability() {
888 self.dcx().emit_err(errors::UnmarkedIntrinsicExposed {
889 span: self.span,
890 def_path: self.tcx.def_path_str(callee),
891 });
892 }
893 }
894 Some(attrs::ConstStability {
895 level: attrs::StabilityLevel::Unstable { .. },
896 feature,
897 ..
898 }) => {
899 self.check_op(ops::IntrinsicUnstable {
900 name: intrinsic.name,
901 feature,
902 const_stable_indirect: is_const_stable,
903 });
904 }
905 Some(attrs::ConstStability {
906 level: attrs::StabilityLevel::Stable { .. },
907 ..
908 }) => {
909 }
914 }
915 return;
917 }
918
919 if !tcx.is_const_fn(callee) {
920 self.check_op(ops::FnCallNonConst {
921 callee,
922 args: fn_args,
923 span: *fn_span,
924 call_source,
925 });
926 return;
929 }
930
931 self.check_callee_stability(callee);
933 }
934
935 TerminatorKind::Drop { place: dropped_place, .. } => {
938 if super::post_drop_elaboration::checking_enabled(self.ccx) {
941 return;
942 }
943
944 self.check_drop_terminator(*dropped_place, location, terminator.source_info.span);
945 }
946
947 TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
948
949 TerminatorKind::Yield { .. } => {
950 self.check_op(ops::Coroutine(
951 self.tcx
952 .coroutine_kind(self.body.source.def_id())
953 .expect("Only expected to have a yield in a coroutine"),
954 ));
955 }
956
957 TerminatorKind::CoroutineDrop => {
958 span_bug!(
959 self.body.source_info(location).span,
960 "We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"
961 );
962 }
963
964 TerminatorKind::UnwindTerminate(_) => {
965 span_bug!(self.span, "`Terminate` terminator outside of cleanup block")
967 }
968
969 TerminatorKind::Assert { .. }
970 | TerminatorKind::FalseEdge { .. }
971 | TerminatorKind::FalseUnwind { .. }
972 | TerminatorKind::Goto { .. }
973 | TerminatorKind::UnwindResume
974 | TerminatorKind::Return
975 | TerminatorKind::SwitchInt { .. }
976 | TerminatorKind::Unreachable => {}
977 }
978 }
979}
980
981fn is_int_bool_float_or_char(ty: Ty<'_>) -> bool {
982 ty.is_bool() || ty.is_integral() || ty.is_char() || ty.is_floating_point()
983}
984
985fn emit_unstable_in_stable_exposed_error(
986 ccx: &ConstCx<'_, '_>,
987 span: Span,
988 gate: Symbol,
989 is_function_call: bool,
990) -> ErrorGuaranteed {
991 let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
992
993 ccx.dcx().emit_err(errors::UnstableInStableExposed {
994 gate: gate.to_string(),
995 span,
996 attr_span,
997 is_function_call,
998 is_function_call2: is_function_call,
999 })
1000}