1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_attr_parsing::InlineAttr;
5use rustc_data_structures::fx::{FxHashMap, FxHashSet};
6use rustc_hir::LangItem;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Upcast, Variance,
18};
19use rustc_middle::{bug, span_bug};
20use rustc_trait_selection::traits::ObligationCtxt;
21
22use crate::util::{self, is_within_packed};
23
24#[derive(Copy, Clone, Debug, PartialEq, Eq)]
25enum EdgeKind {
26 Unwind,
27 Normal,
28}
29
30pub(super) struct Validator {
31 pub when: String,
33}
34
35impl<'tcx> crate::MirPass<'tcx> for Validator {
36 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
37 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
42 return;
43 }
44 let def_id = body.source.def_id();
45 let typing_env = body.typing_env(tcx);
46 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
47 true
49 } else if !tcx.def_kind(def_id).is_fn_like() {
50 true
51 } else {
52 let body_ty = tcx.type_of(def_id).skip_binder();
53 let body_abi = match body_ty.kind() {
54 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
55 ty::Closure(..) => ExternAbi::RustCall,
56 ty::CoroutineClosure(..) => ExternAbi::RustCall,
57 ty::Coroutine(..) => ExternAbi::Rust,
58 ty::Error(_) => return,
60 _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
61 };
62
63 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
64 };
65
66 let mut cfg_checker = CfgChecker {
67 when: &self.when,
68 body,
69 tcx,
70 unwind_edge_count: 0,
71 reachable_blocks: traversal::reachable_as_bitset(body),
72 value_cache: FxHashSet::default(),
73 can_unwind,
74 };
75 cfg_checker.visit_body(body);
76 cfg_checker.check_cleanup_control_flow();
77
78 for (location, msg) in validate_types(tcx, typing_env, body, body) {
80 cfg_checker.fail(location, msg);
81 }
82
83 if let MirPhase::Runtime(_) = body.phase {
84 if let ty::InstanceKind::Item(_) = body.source.instance {
85 if body.has_free_regions() {
86 cfg_checker.fail(
87 Location::START,
88 format!("Free regions in optimized {} MIR", body.phase.name()),
89 );
90 }
91 }
92 }
93 }
94
95 fn is_required(&self) -> bool {
96 true
97 }
98}
99
100struct CfgChecker<'a, 'tcx> {
107 when: &'a str,
108 body: &'a Body<'tcx>,
109 tcx: TyCtxt<'tcx>,
110 unwind_edge_count: usize,
111 reachable_blocks: DenseBitSet<BasicBlock>,
112 value_cache: FxHashSet<u128>,
113 can_unwind: bool,
116}
117
118impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
119 #[track_caller]
120 fn fail(&self, location: Location, msg: impl AsRef<str>) {
121 assert!(
123 self.tcx.dcx().has_errors().is_some(),
124 "broken MIR in {:?} ({}) at {:?}:\n{}",
125 self.body.source.instance,
126 self.when,
127 location,
128 msg.as_ref(),
129 );
130 }
131
132 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
133 if bb == START_BLOCK {
134 self.fail(location, "start block must not have predecessors")
135 }
136 if let Some(bb) = self.body.basic_blocks.get(bb) {
137 let src = self.body.basic_blocks.get(location.block).unwrap();
138 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
139 (false, false, EdgeKind::Normal)
141 | (true, true, EdgeKind::Normal) => {}
143 (false, true, EdgeKind::Unwind) => {
145 self.unwind_edge_count += 1;
146 }
147 _ => {
149 self.fail(
150 location,
151 format!(
152 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
153 edge_kind,
154 bb,
155 src.is_cleanup,
156 bb.is_cleanup,
157 )
158 )
159 }
160 }
161 } else {
162 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
163 }
164 }
165
166 fn check_cleanup_control_flow(&self) {
167 if self.unwind_edge_count <= 1 {
168 return;
169 }
170 let doms = self.body.basic_blocks.dominators();
171 let mut post_contract_node = FxHashMap::default();
172 let mut dom_path = vec![];
174 let mut get_post_contract_node = |mut bb| {
175 let root = loop {
176 if let Some(root) = post_contract_node.get(&bb) {
177 break *root;
178 }
179 let parent = doms.immediate_dominator(bb).unwrap();
180 dom_path.push(bb);
181 if !self.body.basic_blocks[parent].is_cleanup {
182 break bb;
183 }
184 bb = parent;
185 };
186 for bb in dom_path.drain(..) {
187 post_contract_node.insert(bb, root);
188 }
189 root
190 };
191
192 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
193 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
194 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
195 continue;
196 }
197 let bb = get_post_contract_node(bb);
198 for s in bb_data.terminator().successors() {
199 let s = get_post_contract_node(s);
200 if s == bb {
201 continue;
202 }
203 let parent = &mut parent[bb];
204 match parent {
205 None => {
206 *parent = Some(s);
207 }
208 Some(e) if *e == s => (),
209 Some(e) => self.fail(
210 Location { block: bb, statement_index: 0 },
211 format!(
212 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
213 bb,
214 s,
215 *e
216 )
217 ),
218 }
219 }
220 }
221
222 let mut stack = FxHashSet::default();
224 for i in 0..parent.len() {
225 let mut bb = BasicBlock::from_usize(i);
226 stack.clear();
227 stack.insert(bb);
228 loop {
229 let Some(parent) = parent[bb].take() else { break };
230 let no_cycle = stack.insert(parent);
231 if !no_cycle {
232 self.fail(
233 Location { block: bb, statement_index: 0 },
234 format!(
235 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
236 ),
237 );
238 break;
239 }
240 bb = parent;
241 }
242 }
243 }
244
245 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
246 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
247 match unwind {
248 UnwindAction::Cleanup(unwind) => {
249 if is_cleanup {
250 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
251 }
252 self.check_edge(location, unwind, EdgeKind::Unwind);
253 }
254 UnwindAction::Continue => {
255 if is_cleanup {
256 self.fail(location, "`UnwindAction::Continue` in cleanup block");
257 }
258
259 if !self.can_unwind {
260 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
261 }
262 }
263 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
264 if !is_cleanup {
265 self.fail(
266 location,
267 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
268 );
269 }
270 }
271 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
273 }
274 }
275
276 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
277 let Some(target) = target else { return false };
278 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
279 && self.body.basic_blocks.predecessors()[target].len() > 1
280 }
281}
282
283impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
284 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
285 if self.body.local_decls.get(local).is_none() {
286 self.fail(
287 location,
288 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
289 );
290 }
291 }
292
293 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
294 match &statement.kind {
295 StatementKind::AscribeUserType(..) => {
296 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
297 self.fail(
298 location,
299 "`AscribeUserType` should have been removed after drop lowering phase",
300 );
301 }
302 }
303 StatementKind::FakeRead(..) => {
304 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
305 self.fail(
306 location,
307 "`FakeRead` should have been removed after drop lowering phase",
308 );
309 }
310 }
311 StatementKind::SetDiscriminant { .. } => {
312 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
313 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
314 }
315 }
316 StatementKind::Deinit(..) => {
317 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
318 self.fail(location, "`Deinit`is not allowed until deaggregation");
319 }
320 }
321 StatementKind::Retag(kind, _) => {
322 if matches!(kind, RetagKind::TwoPhase) {
326 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
327 }
328 }
329 StatementKind::Coverage(kind) => {
330 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
331 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
332 {
333 self.fail(
334 location,
335 format!("{kind:?} should have been removed after analysis"),
336 );
337 }
338 }
339 StatementKind::Assign(..)
340 | StatementKind::StorageLive(_)
341 | StatementKind::StorageDead(_)
342 | StatementKind::Intrinsic(_)
343 | StatementKind::ConstEvalCounter
344 | StatementKind::PlaceMention(..)
345 | StatementKind::BackwardIncompatibleDropHint { .. }
346 | StatementKind::Nop => {}
347 }
348
349 self.super_statement(statement, location);
350 }
351
352 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
353 match &terminator.kind {
354 TerminatorKind::Goto { target } => {
355 self.check_edge(location, *target, EdgeKind::Normal);
356 }
357 TerminatorKind::SwitchInt { targets, discr: _ } => {
358 for (_, target) in targets.iter() {
359 self.check_edge(location, target, EdgeKind::Normal);
360 }
361 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
362
363 self.value_cache.clear();
364 self.value_cache.extend(targets.iter().map(|(value, _)| value));
365 let has_duplicates = targets.iter().len() != self.value_cache.len();
366 if has_duplicates {
367 self.fail(
368 location,
369 format!(
370 "duplicated values in `SwitchInt` terminator: {:?}",
371 terminator.kind,
372 ),
373 );
374 }
375 }
376 TerminatorKind::Drop { target, unwind, .. } => {
377 self.check_edge(location, *target, EdgeKind::Normal);
378 self.check_unwind_edge(location, *unwind);
379 }
380 TerminatorKind::Call { func, args, .. }
381 | TerminatorKind::TailCall { func, args, .. } => {
382 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
384 if let Some(target) = target {
385 self.check_edge(location, target, EdgeKind::Normal);
386 }
387 self.check_unwind_edge(location, unwind);
388
389 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
395 && self.is_critical_call_edge(target, unwind)
396 {
397 self.fail(
398 location,
399 format!(
400 "encountered critical edge in `Call` terminator {:?}",
401 terminator.kind,
402 ),
403 );
404 }
405
406 if is_within_packed(self.tcx, &self.body.local_decls, destination).is_some() {
409 self.fail(
411 location,
412 format!(
413 "encountered packed place in `Call` terminator destination: {:?}",
414 terminator.kind,
415 ),
416 );
417 }
418 }
419
420 for arg in args {
421 if let Operand::Move(place) = &arg.node {
422 if is_within_packed(self.tcx, &self.body.local_decls, *place).is_some() {
423 self.fail(
425 location,
426 format!(
427 "encountered `Move` of a packed place in `Call` terminator: {:?}",
428 terminator.kind,
429 ),
430 );
431 }
432 }
433 }
434
435 if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind()
436 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
437 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
438 {
439 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
440 }
441 }
442 TerminatorKind::Assert { target, unwind, .. } => {
443 self.check_edge(location, *target, EdgeKind::Normal);
444 self.check_unwind_edge(location, *unwind);
445 }
446 TerminatorKind::Yield { resume, drop, .. } => {
447 if self.body.coroutine.is_none() {
448 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
449 }
450 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
451 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
452 }
453 self.check_edge(location, *resume, EdgeKind::Normal);
454 if let Some(drop) = drop {
455 self.check_edge(location, *drop, EdgeKind::Normal);
456 }
457 }
458 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
459 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
460 self.fail(
461 location,
462 "`FalseEdge` should have been removed after drop elaboration",
463 );
464 }
465 self.check_edge(location, *real_target, EdgeKind::Normal);
466 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
467 }
468 TerminatorKind::FalseUnwind { real_target, unwind } => {
469 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
470 self.fail(
471 location,
472 "`FalseUnwind` should have been removed after drop elaboration",
473 );
474 }
475 self.check_edge(location, *real_target, EdgeKind::Normal);
476 self.check_unwind_edge(location, *unwind);
477 }
478 TerminatorKind::InlineAsm { targets, unwind, .. } => {
479 for &target in targets {
480 self.check_edge(location, target, EdgeKind::Normal);
481 }
482 self.check_unwind_edge(location, *unwind);
483 }
484 TerminatorKind::CoroutineDrop => {
485 if self.body.coroutine.is_none() {
486 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
487 }
488 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
489 self.fail(
490 location,
491 "`CoroutineDrop` should have been replaced by coroutine lowering",
492 );
493 }
494 }
495 TerminatorKind::UnwindResume => {
496 let bb = location.block;
497 if !self.body.basic_blocks[bb].is_cleanup {
498 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
499 }
500 if !self.can_unwind {
501 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
502 }
503 }
504 TerminatorKind::UnwindTerminate(_) => {
505 let bb = location.block;
506 if !self.body.basic_blocks[bb].is_cleanup {
507 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
508 }
509 }
510 TerminatorKind::Return => {
511 let bb = location.block;
512 if self.body.basic_blocks[bb].is_cleanup {
513 self.fail(location, "Cannot `Return` from cleanup basic block")
514 }
515 }
516 TerminatorKind::Unreachable => {}
517 }
518
519 self.super_terminator(terminator, location);
520 }
521
522 fn visit_source_scope(&mut self, scope: SourceScope) {
523 if self.body.source_scopes.get(scope).is_none() {
524 self.tcx.dcx().span_bug(
525 self.body.span,
526 format!(
527 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
528 self.body.source.instance, self.when, scope,
529 ),
530 );
531 }
532 }
533}
534
535pub(super) fn validate_types<'tcx>(
541 tcx: TyCtxt<'tcx>,
542 typing_env: ty::TypingEnv<'tcx>,
543 body: &Body<'tcx>,
544 caller_body: &Body<'tcx>,
545) -> Vec<(Location, String)> {
546 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
547 with_no_trimmed_paths!({
552 type_checker.visit_body(body);
553 });
554 type_checker.failures
555}
556
557struct TypeChecker<'a, 'tcx> {
558 body: &'a Body<'tcx>,
559 caller_body: &'a Body<'tcx>,
560 tcx: TyCtxt<'tcx>,
561 typing_env: ty::TypingEnv<'tcx>,
562 failures: Vec<(Location, String)>,
563}
564
565impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
566 fn fail(&mut self, location: Location, msg: impl Into<String>) {
567 self.failures.push((location, msg.into()));
568 }
569
570 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
573 if src == dest {
575 return true;
577 }
578
579 if (src, dest).has_opaque_types() {
585 return true;
586 }
587
588 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
591 Variance::Invariant
592 } else {
593 Variance::Covariant
594 };
595
596 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
597 }
598
599 fn predicate_must_hold_modulo_regions(
601 &self,
602 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
603 ) -> bool {
604 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
605
606 if pred.has_opaque_types() {
612 return true;
613 }
614
615 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
616 let ocx = ObligationCtxt::new(&infcx);
617 ocx.register_obligation(Obligation::new(
618 self.tcx,
619 ObligationCause::dummy(),
620 param_env,
621 pred,
622 ));
623 ocx.select_all_or_error().is_empty()
624 }
625}
626
627impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
628 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
629 if self.tcx.sess.opts.unstable_opts.validate_mir
631 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
632 {
633 if let Operand::Copy(place) = operand {
635 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
636
637 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
638 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
639 }
640 }
641 }
642
643 self.super_operand(operand, location);
644 }
645
646 fn visit_projection_elem(
647 &mut self,
648 place_ref: PlaceRef<'tcx>,
649 elem: PlaceElem<'tcx>,
650 context: PlaceContext,
651 location: Location,
652 ) {
653 match elem {
654 ProjectionElem::OpaqueCast(ty)
655 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
656 {
657 self.fail(
658 location,
659 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
660 )
661 }
662 ProjectionElem::Index(index) => {
663 let index_ty = self.body.local_decls[index].ty;
664 if index_ty != self.tcx.types.usize {
665 self.fail(location, format!("bad index ({index_ty} != usize)"))
666 }
667 }
668 ProjectionElem::Deref
669 if self.body.phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) =>
670 {
671 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
672
673 if base_ty.is_box() {
674 self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
675 }
676 }
677 ProjectionElem::Field(f, ty) => {
678 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
679 let fail_out_of_bounds = |this: &mut Self, location| {
680 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
681 };
682 let check_equal = |this: &mut Self, location, f_ty| {
683 if !this.mir_assign_valid_types(ty, f_ty) {
684 this.fail(
685 location,
686 format!(
687 "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
688 )
689 )
690 }
691 };
692
693 let kind = match parent_ty.ty.kind() {
694 &ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
695 self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
696 }
697 kind => kind,
698 };
699
700 match kind {
701 ty::Tuple(fields) => {
702 let Some(f_ty) = fields.get(f.as_usize()) else {
703 fail_out_of_bounds(self, location);
704 return;
705 };
706 check_equal(self, location, *f_ty);
707 }
708 ty::Adt(adt_def, args) => {
709 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
711 self.fail(
712 location,
713 format!(
714 "You can't project to field {f:?} of `DynMetadata` because \
715 layout is weird and thinks it doesn't have fields."
716 ),
717 );
718 }
719
720 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
721 let Some(field) = adt_def.variant(var).fields.get(f) else {
722 fail_out_of_bounds(self, location);
723 return;
724 };
725 check_equal(self, location, field.ty(self.tcx, args));
726 }
727 ty::Closure(_, args) => {
728 let args = args.as_closure();
729 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
730 fail_out_of_bounds(self, location);
731 return;
732 };
733 check_equal(self, location, f_ty);
734 }
735 ty::CoroutineClosure(_, args) => {
736 let args = args.as_coroutine_closure();
737 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
738 fail_out_of_bounds(self, location);
739 return;
740 };
741 check_equal(self, location, f_ty);
742 }
743 &ty::Coroutine(def_id, args) => {
744 let f_ty = if let Some(var) = parent_ty.variant_index {
745 let layout = if def_id == self.caller_body.source.def_id() {
751 self.caller_body.coroutine_layout_raw()
752 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
753 && let ty::ClosureKind::FnOnce =
754 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
755 && self.caller_body.source.def_id()
756 == self.tcx.coroutine_by_move_body_def_id(def_id)
757 {
758 self.caller_body.coroutine_layout_raw()
760 } else {
761 self.tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty())
762 };
763
764 let Some(layout) = layout else {
765 self.fail(
766 location,
767 format!("No coroutine layout for {parent_ty:?}"),
768 );
769 return;
770 };
771
772 let Some(&local) = layout.variant_fields[var].get(f) else {
773 fail_out_of_bounds(self, location);
774 return;
775 };
776
777 let Some(f_ty) = layout.field_tys.get(local) else {
778 self.fail(
779 location,
780 format!("Out of bounds local {local:?} for {parent_ty:?}"),
781 );
782 return;
783 };
784
785 ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
786 } else {
787 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
788 else {
789 fail_out_of_bounds(self, location);
790 return;
791 };
792
793 f_ty
794 };
795
796 check_equal(self, location, f_ty);
797 }
798 _ => {
799 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
800 }
801 }
802 }
803 ProjectionElem::Subtype(ty) => {
804 if !util::sub_types(
805 self.tcx,
806 self.typing_env,
807 ty,
808 place_ref.ty(&self.body.local_decls, self.tcx).ty,
809 ) {
810 self.fail(
811 location,
812 format!(
813 "Failed subtyping {ty} and {}",
814 place_ref.ty(&self.body.local_decls, self.tcx).ty
815 ),
816 )
817 }
818 }
819 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
820 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
821 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
822 self.fail(
823 location,
824 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
825 );
826 return;
827 };
828 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
829 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
830 self.fail(
831 location,
832 format!(
833 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
834 ),
835 );
836 }
837 }
838 _ => {}
839 }
840 self.super_projection_elem(place_ref, elem, context, location);
841 }
842
843 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
844 if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
845 if ty.is_union() || ty.is_enum() {
846 self.fail(
847 START_BLOCK.start_location(),
848 format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
849 );
850 }
851 if projection.is_empty() {
852 self.fail(
853 START_BLOCK.start_location(),
854 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
855 );
856 }
857 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
858 self.fail(
859 START_BLOCK.start_location(),
860 format!(
861 "illegal projection {:?} in debuginfo for {:?}",
862 projection, debuginfo.name
863 ),
864 );
865 }
866 }
867 match debuginfo.value {
868 VarDebugInfoContents::Const(_) => {}
869 VarDebugInfoContents::Place(place) => {
870 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
871 self.fail(
872 START_BLOCK.start_location(),
873 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
874 );
875 }
876 }
877 }
878 self.super_var_debug_info(debuginfo);
879 }
880
881 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
882 let _ = place.ty(&self.body.local_decls, self.tcx);
884
885 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
886 && place.projection.len() > 1
887 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
888 && place.projection[1..].contains(&ProjectionElem::Deref)
889 {
890 self.fail(
891 location,
892 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
893 );
894 }
895
896 let mut projections_iter = place.projection.iter();
898 while let Some(proj) = projections_iter.next() {
899 if matches!(proj, ProjectionElem::Downcast(..)) {
900 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
901 self.fail(
902 location,
903 format!(
904 "place {place:?} has `Downcast` projection not followed by `Field`"
905 ),
906 );
907 }
908 }
909 }
910
911 self.super_place(place, cntxt, location);
912 }
913
914 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
915 macro_rules! check_kinds {
916 ($t:expr, $text:literal, $typat:pat) => {
917 if !matches!(($t).kind(), $typat) {
918 self.fail(location, format!($text, $t));
919 }
920 };
921 }
922 match rvalue {
923 Rvalue::Use(_) | Rvalue::CopyForDeref(_) => {}
924 Rvalue::Aggregate(kind, fields) => match **kind {
925 AggregateKind::Tuple => {}
926 AggregateKind::Array(dest) => {
927 for src in fields {
928 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
929 self.fail(location, "array field has the wrong type");
930 }
931 }
932 }
933 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
934 let adt_def = self.tcx.adt_def(def_id);
935 assert!(adt_def.is_union());
936 assert_eq!(idx, FIRST_VARIANT);
937 let dest_ty = self.tcx.normalize_erasing_regions(
938 self.typing_env,
939 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
940 );
941 if let [field] = fields.raw.as_slice() {
942 let src_ty = field.ty(self.body, self.tcx);
943 if !self.mir_assign_valid_types(src_ty, dest_ty) {
944 self.fail(location, "union field has the wrong type");
945 }
946 } else {
947 self.fail(location, "unions should have one initialized field");
948 }
949 }
950 AggregateKind::Adt(def_id, idx, args, _, None) => {
951 let adt_def = self.tcx.adt_def(def_id);
952 assert!(!adt_def.is_union());
953 let variant = &adt_def.variants()[idx];
954 if variant.fields.len() != fields.len() {
955 self.fail(location, "adt has the wrong number of initialized fields");
956 }
957 for (src, dest) in std::iter::zip(fields, &variant.fields) {
958 let dest_ty = self
959 .tcx
960 .normalize_erasing_regions(self.typing_env, dest.ty(self.tcx, args));
961 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
962 self.fail(location, "adt field has the wrong type");
963 }
964 }
965 }
966 AggregateKind::Closure(_, args) => {
967 let upvars = args.as_closure().upvar_tys();
968 if upvars.len() != fields.len() {
969 self.fail(location, "closure has the wrong number of initialized fields");
970 }
971 for (src, dest) in std::iter::zip(fields, upvars) {
972 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
973 self.fail(location, "closure field has the wrong type");
974 }
975 }
976 }
977 AggregateKind::Coroutine(_, args) => {
978 let upvars = args.as_coroutine().upvar_tys();
979 if upvars.len() != fields.len() {
980 self.fail(location, "coroutine has the wrong number of initialized fields");
981 }
982 for (src, dest) in std::iter::zip(fields, upvars) {
983 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
984 self.fail(location, "coroutine field has the wrong type");
985 }
986 }
987 }
988 AggregateKind::CoroutineClosure(_, args) => {
989 let upvars = args.as_coroutine_closure().upvar_tys();
990 if upvars.len() != fields.len() {
991 self.fail(
992 location,
993 "coroutine-closure has the wrong number of initialized fields",
994 );
995 }
996 for (src, dest) in std::iter::zip(fields, upvars) {
997 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
998 self.fail(location, "coroutine-closure field has the wrong type");
999 }
1000 }
1001 }
1002 AggregateKind::RawPtr(pointee_ty, mutability) => {
1003 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1004 self.fail(location, "RawPtr should be in runtime MIR only");
1008 }
1009
1010 if let [data_ptr, metadata] = fields.raw.as_slice() {
1011 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1012 let metadata_ty = metadata.ty(self.body, self.tcx);
1013 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1014 if *in_mut != mutability {
1015 self.fail(location, "input and output mutability must match");
1016 }
1017
1018 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1020 self.fail(location, "input pointer must be thin");
1021 }
1022 } else {
1023 self.fail(
1024 location,
1025 "first operand to raw pointer aggregate must be a raw pointer",
1026 );
1027 }
1028
1029 if pointee_ty.is_slice() {
1031 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1032 self.fail(location, "slice metadata must be usize");
1033 }
1034 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1035 if metadata_ty != self.tcx.types.unit {
1036 self.fail(location, "metadata for pointer-to-thin must be unit");
1037 }
1038 }
1039 } else {
1040 self.fail(location, "raw pointer aggregate must have 2 fields");
1041 }
1042 }
1043 },
1044 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1045 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1046 self.fail(
1047 location,
1048 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1049 );
1050 }
1051 }
1052 Rvalue::Ref(..) => {}
1053 Rvalue::Len(p) => {
1054 let pty = p.ty(&self.body.local_decls, self.tcx).ty;
1055 check_kinds!(
1056 pty,
1057 "Cannot compute length of non-array type {:?}",
1058 ty::Array(..) | ty::Slice(..)
1059 );
1060 }
1061 Rvalue::BinaryOp(op, vals) => {
1062 use BinOp::*;
1063 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1064 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1065 if crate::util::binop_right_homogeneous(*op) {
1066 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1067 if !self.mir_assign_valid_types(a, b) {
1069 self.fail(
1070 location,
1071 format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1072 );
1073 }
1074 } else if a != b {
1075 self.fail(
1076 location,
1077 format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1078 );
1079 }
1080 }
1081
1082 match op {
1083 Offset => {
1084 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1085 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1086 self.fail(location, format!("Cannot offset by non-isize type {b}"));
1087 }
1088 }
1089 Eq | Lt | Le | Ne | Ge | Gt => {
1090 for x in [a, b] {
1091 check_kinds!(
1092 x,
1093 "Cannot {op:?} compare type {:?}",
1094 ty::Bool
1095 | ty::Char
1096 | ty::Int(..)
1097 | ty::Uint(..)
1098 | ty::Float(..)
1099 | ty::RawPtr(..)
1100 | ty::FnPtr(..)
1101 )
1102 }
1103 }
1104 Cmp => {
1105 for x in [a, b] {
1106 check_kinds!(
1107 x,
1108 "Cannot three-way compare non-integer type {:?}",
1109 ty::Char | ty::Uint(..) | ty::Int(..)
1110 )
1111 }
1112 }
1113 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1114 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1115 for x in [a, b] {
1116 check_kinds!(
1117 x,
1118 "Cannot {op:?} non-integer type {:?}",
1119 ty::Uint(..) | ty::Int(..)
1120 )
1121 }
1122 }
1123 BitAnd | BitOr | BitXor => {
1124 for x in [a, b] {
1125 check_kinds!(
1126 x,
1127 "Cannot perform bitwise op {op:?} on type {:?}",
1128 ty::Uint(..) | ty::Int(..) | ty::Bool
1129 )
1130 }
1131 }
1132 Add | Sub | Mul | Div | Rem => {
1133 for x in [a, b] {
1134 check_kinds!(
1135 x,
1136 "Cannot perform arithmetic {op:?} on type {:?}",
1137 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1138 )
1139 }
1140 }
1141 }
1142 }
1143 Rvalue::UnaryOp(op, operand) => {
1144 let a = operand.ty(&self.body.local_decls, self.tcx);
1145 match op {
1146 UnOp::Neg => {
1147 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1148 }
1149 UnOp::Not => {
1150 check_kinds!(
1151 a,
1152 "Cannot binary not type {:?}",
1153 ty::Int(..) | ty::Uint(..) | ty::Bool
1154 );
1155 }
1156 UnOp::PtrMetadata => {
1157 check_kinds!(
1158 a,
1159 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1160 ty::RawPtr(..) | ty::Ref(..)
1161 );
1162 }
1163 }
1164 }
1165 Rvalue::ShallowInitBox(operand, _) => {
1166 let a = operand.ty(&self.body.local_decls, self.tcx);
1167 check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..));
1168 }
1169 Rvalue::Cast(kind, operand, target_type) => {
1170 let op_ty = operand.ty(self.body, self.tcx);
1171 match kind {
1172 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1174 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
1175 check_kinds!(
1177 op_ty,
1178 "CastKind::{kind:?} input must be a fn item, not {:?}",
1179 ty::FnDef(..)
1180 );
1181 check_kinds!(
1182 target_type,
1183 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1184 ty::FnPtr(..)
1185 );
1186 }
1187 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1188 check_kinds!(
1190 op_ty,
1191 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1192 ty::FnPtr(..)
1193 );
1194 check_kinds!(
1195 target_type,
1196 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1197 ty::FnPtr(..)
1198 );
1199 }
1200 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1201 check_kinds!(
1203 op_ty,
1204 "CastKind::{kind:?} input must be a closure, not {:?}",
1205 ty::Closure(..)
1206 );
1207 check_kinds!(
1208 target_type,
1209 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1210 ty::FnPtr(..)
1211 );
1212 }
1213 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1214 check_kinds!(
1216 op_ty,
1217 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1218 ty::RawPtr(_, Mutability::Mut)
1219 );
1220 check_kinds!(
1221 target_type,
1222 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1223 ty::RawPtr(_, Mutability::Not)
1224 );
1225 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1226 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1227 }
1228 }
1229 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1230 check_kinds!(
1232 op_ty,
1233 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1234 ty::RawPtr(..)
1235 );
1236 check_kinds!(
1237 target_type,
1238 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1239 ty::RawPtr(..)
1240 );
1241 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1242 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1243 }
1244 }
1245 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1246 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1249 self.tcx,
1250 self.tcx.require_lang_item(
1251 LangItem::CoerceUnsized,
1252 Some(self.body.source_info(location).span),
1253 ),
1254 [op_ty, *target_type],
1255 )) {
1256 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1257 }
1258 }
1259 CastKind::PointerCoercion(PointerCoercion::DynStar, _) => {
1260 }
1262 CastKind::IntToInt | CastKind::IntToFloat => {
1263 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1264 let target_valid = target_type.is_numeric() || target_type.is_char();
1265 if !input_valid || !target_valid {
1266 self.fail(
1267 location,
1268 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1269 );
1270 }
1271 }
1272 CastKind::FnPtrToPtr => {
1273 check_kinds!(
1274 op_ty,
1275 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1276 ty::FnPtr(..)
1277 );
1278 check_kinds!(
1279 target_type,
1280 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1281 ty::RawPtr(..)
1282 );
1283 }
1284 CastKind::PtrToPtr => {
1285 check_kinds!(
1286 op_ty,
1287 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1288 ty::RawPtr(..)
1289 );
1290 check_kinds!(
1291 target_type,
1292 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1293 ty::RawPtr(..)
1294 );
1295 }
1296 CastKind::FloatToFloat | CastKind::FloatToInt => {
1297 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1298 self.fail(
1299 location,
1300 format!(
1301 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1302 ),
1303 );
1304 }
1305 }
1306 CastKind::Transmute => {
1307 if let MirPhase::Runtime(..) = self.body.phase {
1308 if !self
1312 .tcx
1313 .normalize_erasing_regions(self.typing_env, op_ty)
1314 .is_sized(self.tcx, self.typing_env)
1315 {
1316 self.fail(
1317 location,
1318 format!("Cannot transmute from non-`Sized` type {op_ty}"),
1319 );
1320 }
1321 if !self
1322 .tcx
1323 .normalize_erasing_regions(self.typing_env, *target_type)
1324 .is_sized(self.tcx, self.typing_env)
1325 {
1326 self.fail(
1327 location,
1328 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1329 );
1330 }
1331 } else {
1332 self.fail(
1333 location,
1334 format!(
1335 "Transmute is not supported in non-runtime phase {:?}.",
1336 self.body.phase
1337 ),
1338 );
1339 }
1340 }
1341 }
1342 }
1343 Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => {
1344 let fail_out_of_bounds = |this: &mut Self, location, field, ty| {
1345 this.fail(location, format!("Out of bounds field {field:?} for {ty}"));
1346 };
1347
1348 let mut current_ty = *container;
1349
1350 for (variant, field) in indices.iter() {
1351 match current_ty.kind() {
1352 ty::Tuple(fields) => {
1353 if variant != FIRST_VARIANT {
1354 self.fail(
1355 location,
1356 format!("tried to get variant {variant:?} of tuple"),
1357 );
1358 return;
1359 }
1360 let Some(&f_ty) = fields.get(field.as_usize()) else {
1361 fail_out_of_bounds(self, location, field, current_ty);
1362 return;
1363 };
1364
1365 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1366 }
1367 ty::Adt(adt_def, args) => {
1368 let Some(field) = adt_def.variant(variant).fields.get(field) else {
1369 fail_out_of_bounds(self, location, field, current_ty);
1370 return;
1371 };
1372
1373 let f_ty = field.ty(self.tcx, args);
1374 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1375 }
1376 _ => {
1377 self.fail(
1378 location,
1379 format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty}"),
1380 );
1381 return;
1382 }
1383 }
1384 }
1385 }
1386 Rvalue::Repeat(_, _)
1387 | Rvalue::ThreadLocalRef(_)
1388 | Rvalue::RawPtr(_, _)
1389 | Rvalue::NullaryOp(
1390 NullOp::SizeOf | NullOp::AlignOf | NullOp::UbChecks | NullOp::ContractChecks,
1391 _,
1392 )
1393 | Rvalue::Discriminant(_) => {}
1394
1395 Rvalue::WrapUnsafeBinder(op, ty) => {
1396 let unwrapped_ty = op.ty(self.body, self.tcx);
1397 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1398 self.fail(
1399 location,
1400 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1401 );
1402 return;
1403 };
1404 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1405 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1406 self.fail(
1407 location,
1408 format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1409 );
1410 }
1411 }
1412 }
1413 self.super_rvalue(rvalue, location);
1414 }
1415
1416 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1417 match &statement.kind {
1418 StatementKind::Assign(box (dest, rvalue)) => {
1419 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1421 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1422
1423 if !self.mir_assign_valid_types(right_ty, left_ty) {
1424 self.fail(
1425 location,
1426 format!(
1427 "encountered `{:?}` with incompatible types:\n\
1428 left-hand side has type: {}\n\
1429 right-hand side has type: {}",
1430 statement.kind, left_ty, right_ty,
1431 ),
1432 );
1433 }
1434 if let Rvalue::CopyForDeref(place) = rvalue {
1435 if place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_none() {
1436 self.fail(
1437 location,
1438 "`CopyForDeref` should only be used for dereferenceable types",
1439 )
1440 }
1441 }
1442 }
1443 StatementKind::AscribeUserType(..) => {
1444 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1445 self.fail(
1446 location,
1447 "`AscribeUserType` should have been removed after drop lowering phase",
1448 );
1449 }
1450 }
1451 StatementKind::FakeRead(..) => {
1452 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1453 self.fail(
1454 location,
1455 "`FakeRead` should have been removed after drop lowering phase",
1456 );
1457 }
1458 }
1459 StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1460 let ty = op.ty(&self.body.local_decls, self.tcx);
1461 if !ty.is_bool() {
1462 self.fail(
1463 location,
1464 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1465 );
1466 }
1467 }
1468 StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1469 CopyNonOverlapping { src, dst, count },
1470 )) => {
1471 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1472 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1473 src_deref
1474 } else {
1475 self.fail(
1476 location,
1477 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1478 );
1479 return;
1480 };
1481 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1482 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1483 dst_deref
1484 } else {
1485 self.fail(
1486 location,
1487 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1488 );
1489 return;
1490 };
1491 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1494 self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1495 }
1496
1497 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1498 if op_cnt_ty != self.tcx.types.usize {
1499 self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1500 }
1501 }
1502 StatementKind::SetDiscriminant { place, .. } => {
1503 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1504 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1505 }
1506 let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1507 if !matches!(
1508 pty.kind(),
1509 ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)
1510 ) {
1511 self.fail(
1512 location,
1513 format!(
1514 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1515 ),
1516 );
1517 }
1518 }
1519 StatementKind::Deinit(..) => {
1520 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1521 self.fail(location, "`Deinit`is not allowed until deaggregation");
1522 }
1523 }
1524 StatementKind::Retag(kind, _) => {
1525 if matches!(kind, RetagKind::TwoPhase) {
1529 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
1530 }
1531 }
1532 StatementKind::StorageLive(_)
1533 | StatementKind::StorageDead(_)
1534 | StatementKind::Coverage(_)
1535 | StatementKind::ConstEvalCounter
1536 | StatementKind::PlaceMention(..)
1537 | StatementKind::BackwardIncompatibleDropHint { .. }
1538 | StatementKind::Nop => {}
1539 }
1540
1541 self.super_statement(statement, location);
1542 }
1543
1544 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1545 match &terminator.kind {
1546 TerminatorKind::SwitchInt { targets, discr } => {
1547 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1548
1549 let target_width = self.tcx.sess.target.pointer_width;
1550
1551 let size = Size::from_bits(match switch_ty.kind() {
1552 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1553 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1554 ty::Char => 32,
1555 ty::Bool => 1,
1556 other => bug!("unhandled type: {:?}", other),
1557 });
1558
1559 for (value, _) in targets.iter() {
1560 if ScalarInt::try_from_uint(value, size).is_none() {
1561 self.fail(
1562 location,
1563 format!("the value {value:#x} is not a proper {switch_ty}"),
1564 )
1565 }
1566 }
1567 }
1568 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1569 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1570 match func_ty.kind() {
1571 ty::FnPtr(..) | ty::FnDef(..) => {}
1572 _ => self.fail(
1573 location,
1574 format!(
1575 "encountered non-callable type {func_ty} in `{}` terminator",
1576 terminator.kind.name()
1577 ),
1578 ),
1579 }
1580
1581 if let TerminatorKind::TailCall { .. } = terminator.kind {
1582 }
1585 }
1586 TerminatorKind::Assert { cond, .. } => {
1587 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1588 if cond_ty != self.tcx.types.bool {
1589 self.fail(
1590 location,
1591 format!(
1592 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1593 ),
1594 );
1595 }
1596 }
1597 TerminatorKind::Goto { .. }
1598 | TerminatorKind::Drop { .. }
1599 | TerminatorKind::Yield { .. }
1600 | TerminatorKind::FalseEdge { .. }
1601 | TerminatorKind::FalseUnwind { .. }
1602 | TerminatorKind::InlineAsm { .. }
1603 | TerminatorKind::CoroutineDrop
1604 | TerminatorKind::UnwindResume
1605 | TerminatorKind::UnwindTerminate(_)
1606 | TerminatorKind::Return
1607 | TerminatorKind::Unreachable => {}
1608 }
1609
1610 self.super_terminator(terminator, location);
1611 }
1612}