1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_attr_data_structures::InlineAttr;
5use rustc_data_structures::fx::{FxHashMap, FxHashSet};
6use rustc_hir::LangItem;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Upcast, Variance,
18};
19use rustc_middle::{bug, span_bug};
20use rustc_trait_selection::traits::ObligationCtxt;
21
22use crate::util::{self, is_within_packed};
23
24#[derive(Copy, Clone, Debug, PartialEq, Eq)]
25enum EdgeKind {
26 Unwind,
27 Normal,
28}
29
30pub(super) struct Validator {
31 pub when: String,
33}
34
35impl<'tcx> crate::MirPass<'tcx> for Validator {
36 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
37 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
42 return;
43 }
44 let def_id = body.source.def_id();
45 let typing_env = body.typing_env(tcx);
46 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
47 true
49 } else if !tcx.def_kind(def_id).is_fn_like() {
50 true
51 } else {
52 let body_ty = tcx.type_of(def_id).skip_binder();
53 let body_abi = match body_ty.kind() {
54 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
55 ty::Closure(..) => ExternAbi::RustCall,
56 ty::CoroutineClosure(..) => ExternAbi::RustCall,
57 ty::Coroutine(..) => ExternAbi::Rust,
58 ty::Error(_) => return,
60 _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
61 };
62
63 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
64 };
65
66 let mut cfg_checker = CfgChecker {
67 when: &self.when,
68 body,
69 tcx,
70 unwind_edge_count: 0,
71 reachable_blocks: traversal::reachable_as_bitset(body),
72 value_cache: FxHashSet::default(),
73 can_unwind,
74 };
75 cfg_checker.visit_body(body);
76 cfg_checker.check_cleanup_control_flow();
77
78 for (location, msg) in validate_types(tcx, typing_env, body, body) {
80 cfg_checker.fail(location, msg);
81 }
82
83 if let MirPhase::Runtime(_) = body.phase {
84 if let ty::InstanceKind::Item(_) = body.source.instance {
85 if body.has_free_regions() {
86 cfg_checker.fail(
87 Location::START,
88 format!("Free regions in optimized {} MIR", body.phase.name()),
89 );
90 }
91 }
92 }
93 }
94
95 fn is_required(&self) -> bool {
96 true
97 }
98}
99
100struct CfgChecker<'a, 'tcx> {
107 when: &'a str,
108 body: &'a Body<'tcx>,
109 tcx: TyCtxt<'tcx>,
110 unwind_edge_count: usize,
111 reachable_blocks: DenseBitSet<BasicBlock>,
112 value_cache: FxHashSet<u128>,
113 can_unwind: bool,
116}
117
118impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
119 #[track_caller]
120 fn fail(&self, location: Location, msg: impl AsRef<str>) {
121 assert!(
123 self.tcx.dcx().has_errors().is_some(),
124 "broken MIR in {:?} ({}) at {:?}:\n{}",
125 self.body.source.instance,
126 self.when,
127 location,
128 msg.as_ref(),
129 );
130 }
131
132 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
133 if bb == START_BLOCK {
134 self.fail(location, "start block must not have predecessors")
135 }
136 if let Some(bb) = self.body.basic_blocks.get(bb) {
137 let src = self.body.basic_blocks.get(location.block).unwrap();
138 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
139 (false, false, EdgeKind::Normal)
141 | (true, true, EdgeKind::Normal) => {}
143 (false, true, EdgeKind::Unwind) => {
145 self.unwind_edge_count += 1;
146 }
147 _ => {
149 self.fail(
150 location,
151 format!(
152 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
153 edge_kind,
154 bb,
155 src.is_cleanup,
156 bb.is_cleanup,
157 )
158 )
159 }
160 }
161 } else {
162 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
163 }
164 }
165
166 fn check_cleanup_control_flow(&self) {
167 if self.unwind_edge_count <= 1 {
168 return;
169 }
170 let doms = self.body.basic_blocks.dominators();
171 let mut post_contract_node = FxHashMap::default();
172 let mut dom_path = vec![];
174 let mut get_post_contract_node = |mut bb| {
175 let root = loop {
176 if let Some(root) = post_contract_node.get(&bb) {
177 break *root;
178 }
179 let parent = doms.immediate_dominator(bb).unwrap();
180 dom_path.push(bb);
181 if !self.body.basic_blocks[parent].is_cleanup {
182 break bb;
183 }
184 bb = parent;
185 };
186 for bb in dom_path.drain(..) {
187 post_contract_node.insert(bb, root);
188 }
189 root
190 };
191
192 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
193 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
194 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
195 continue;
196 }
197 let bb = get_post_contract_node(bb);
198 for s in bb_data.terminator().successors() {
199 let s = get_post_contract_node(s);
200 if s == bb {
201 continue;
202 }
203 let parent = &mut parent[bb];
204 match parent {
205 None => {
206 *parent = Some(s);
207 }
208 Some(e) if *e == s => (),
209 Some(e) => self.fail(
210 Location { block: bb, statement_index: 0 },
211 format!(
212 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
213 bb,
214 s,
215 *e
216 )
217 ),
218 }
219 }
220 }
221
222 let mut stack = FxHashSet::default();
224 for (mut bb, parent) in parent.iter_enumerated_mut() {
225 stack.clear();
226 stack.insert(bb);
227 loop {
228 let Some(parent) = parent.take() else { break };
229 let no_cycle = stack.insert(parent);
230 if !no_cycle {
231 self.fail(
232 Location { block: bb, statement_index: 0 },
233 format!(
234 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
235 ),
236 );
237 break;
238 }
239 bb = parent;
240 }
241 }
242 }
243
244 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
245 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
246 match unwind {
247 UnwindAction::Cleanup(unwind) => {
248 if is_cleanup {
249 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
250 }
251 self.check_edge(location, unwind, EdgeKind::Unwind);
252 }
253 UnwindAction::Continue => {
254 if is_cleanup {
255 self.fail(location, "`UnwindAction::Continue` in cleanup block");
256 }
257
258 if !self.can_unwind {
259 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
260 }
261 }
262 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
263 if !is_cleanup {
264 self.fail(
265 location,
266 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
267 );
268 }
269 }
270 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
272 }
273 }
274
275 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
276 let Some(target) = target else { return false };
277 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
278 && self.body.basic_blocks.predecessors()[target].len() > 1
279 }
280}
281
282impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
283 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
284 if self.body.local_decls.get(local).is_none() {
285 self.fail(
286 location,
287 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
288 );
289 }
290 }
291
292 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
293 match &statement.kind {
294 StatementKind::AscribeUserType(..) => {
295 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
296 self.fail(
297 location,
298 "`AscribeUserType` should have been removed after drop lowering phase",
299 );
300 }
301 }
302 StatementKind::FakeRead(..) => {
303 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
304 self.fail(
305 location,
306 "`FakeRead` should have been removed after drop lowering phase",
307 );
308 }
309 }
310 StatementKind::SetDiscriminant { .. } => {
311 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
312 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
313 }
314 }
315 StatementKind::Deinit(..) => {
316 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
317 self.fail(location, "`Deinit`is not allowed until deaggregation");
318 }
319 }
320 StatementKind::Retag(kind, _) => {
321 if matches!(kind, RetagKind::TwoPhase) {
325 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
326 }
327 }
328 StatementKind::Coverage(kind) => {
329 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
330 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
331 {
332 self.fail(
333 location,
334 format!("{kind:?} should have been removed after analysis"),
335 );
336 }
337 }
338 StatementKind::Assign(..)
339 | StatementKind::StorageLive(_)
340 | StatementKind::StorageDead(_)
341 | StatementKind::Intrinsic(_)
342 | StatementKind::ConstEvalCounter
343 | StatementKind::PlaceMention(..)
344 | StatementKind::BackwardIncompatibleDropHint { .. }
345 | StatementKind::Nop => {}
346 }
347
348 self.super_statement(statement, location);
349 }
350
351 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
352 match &terminator.kind {
353 TerminatorKind::Goto { target } => {
354 self.check_edge(location, *target, EdgeKind::Normal);
355 }
356 TerminatorKind::SwitchInt { targets, discr: _ } => {
357 for (_, target) in targets.iter() {
358 self.check_edge(location, target, EdgeKind::Normal);
359 }
360 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
361
362 self.value_cache.clear();
363 self.value_cache.extend(targets.iter().map(|(value, _)| value));
364 let has_duplicates = targets.iter().len() != self.value_cache.len();
365 if has_duplicates {
366 self.fail(
367 location,
368 format!(
369 "duplicated values in `SwitchInt` terminator: {:?}",
370 terminator.kind,
371 ),
372 );
373 }
374 }
375 TerminatorKind::Drop { target, unwind, drop, .. } => {
376 self.check_edge(location, *target, EdgeKind::Normal);
377 self.check_unwind_edge(location, *unwind);
378 if let Some(drop) = drop {
379 self.check_edge(location, *drop, EdgeKind::Normal);
380 }
381 }
382 TerminatorKind::Call { func, args, .. }
383 | TerminatorKind::TailCall { func, args, .. } => {
384 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
386 if let Some(target) = target {
387 self.check_edge(location, target, EdgeKind::Normal);
388 }
389 self.check_unwind_edge(location, unwind);
390
391 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
397 && self.is_critical_call_edge(target, unwind)
398 {
399 self.fail(
400 location,
401 format!(
402 "encountered critical edge in `Call` terminator {:?}",
403 terminator.kind,
404 ),
405 );
406 }
407
408 if is_within_packed(self.tcx, &self.body.local_decls, destination).is_some() {
411 self.fail(
413 location,
414 format!(
415 "encountered packed place in `Call` terminator destination: {:?}",
416 terminator.kind,
417 ),
418 );
419 }
420 }
421
422 for arg in args {
423 if let Operand::Move(place) = &arg.node {
424 if is_within_packed(self.tcx, &self.body.local_decls, *place).is_some() {
425 self.fail(
427 location,
428 format!(
429 "encountered `Move` of a packed place in `Call` terminator: {:?}",
430 terminator.kind,
431 ),
432 );
433 }
434 }
435 }
436
437 if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind()
438 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
439 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
440 {
441 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
442 }
443 }
444 TerminatorKind::Assert { target, unwind, .. } => {
445 self.check_edge(location, *target, EdgeKind::Normal);
446 self.check_unwind_edge(location, *unwind);
447 }
448 TerminatorKind::Yield { resume, drop, .. } => {
449 if self.body.coroutine.is_none() {
450 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
451 }
452 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
453 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
454 }
455 self.check_edge(location, *resume, EdgeKind::Normal);
456 if let Some(drop) = drop {
457 self.check_edge(location, *drop, EdgeKind::Normal);
458 }
459 }
460 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
461 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
462 self.fail(
463 location,
464 "`FalseEdge` should have been removed after drop elaboration",
465 );
466 }
467 self.check_edge(location, *real_target, EdgeKind::Normal);
468 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
469 }
470 TerminatorKind::FalseUnwind { real_target, unwind } => {
471 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
472 self.fail(
473 location,
474 "`FalseUnwind` should have been removed after drop elaboration",
475 );
476 }
477 self.check_edge(location, *real_target, EdgeKind::Normal);
478 self.check_unwind_edge(location, *unwind);
479 }
480 TerminatorKind::InlineAsm { targets, unwind, .. } => {
481 for &target in targets {
482 self.check_edge(location, target, EdgeKind::Normal);
483 }
484 self.check_unwind_edge(location, *unwind);
485 }
486 TerminatorKind::CoroutineDrop => {
487 if self.body.coroutine.is_none() {
488 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
489 }
490 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
491 self.fail(
492 location,
493 "`CoroutineDrop` should have been replaced by coroutine lowering",
494 );
495 }
496 }
497 TerminatorKind::UnwindResume => {
498 let bb = location.block;
499 if !self.body.basic_blocks[bb].is_cleanup {
500 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
501 }
502 if !self.can_unwind {
503 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
504 }
505 }
506 TerminatorKind::UnwindTerminate(_) => {
507 let bb = location.block;
508 if !self.body.basic_blocks[bb].is_cleanup {
509 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
510 }
511 }
512 TerminatorKind::Return => {
513 let bb = location.block;
514 if self.body.basic_blocks[bb].is_cleanup {
515 self.fail(location, "Cannot `Return` from cleanup basic block")
516 }
517 }
518 TerminatorKind::Unreachable => {}
519 }
520
521 self.super_terminator(terminator, location);
522 }
523
524 fn visit_source_scope(&mut self, scope: SourceScope) {
525 if self.body.source_scopes.get(scope).is_none() {
526 self.tcx.dcx().span_bug(
527 self.body.span,
528 format!(
529 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
530 self.body.source.instance, self.when, scope,
531 ),
532 );
533 }
534 }
535}
536
537pub(super) fn validate_types<'tcx>(
543 tcx: TyCtxt<'tcx>,
544 typing_env: ty::TypingEnv<'tcx>,
545 body: &Body<'tcx>,
546 caller_body: &Body<'tcx>,
547) -> Vec<(Location, String)> {
548 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
549 with_no_trimmed_paths!({
554 type_checker.visit_body(body);
555 });
556 type_checker.failures
557}
558
559struct TypeChecker<'a, 'tcx> {
560 body: &'a Body<'tcx>,
561 caller_body: &'a Body<'tcx>,
562 tcx: TyCtxt<'tcx>,
563 typing_env: ty::TypingEnv<'tcx>,
564 failures: Vec<(Location, String)>,
565}
566
567impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
568 fn fail(&mut self, location: Location, msg: impl Into<String>) {
569 self.failures.push((location, msg.into()));
570 }
571
572 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
575 if src == dest {
577 return true;
579 }
580
581 if (src, dest).has_opaque_types() {
587 return true;
588 }
589
590 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
593 Variance::Invariant
594 } else {
595 Variance::Covariant
596 };
597
598 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
599 }
600
601 fn predicate_must_hold_modulo_regions(
603 &self,
604 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
605 ) -> bool {
606 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
607
608 if pred.has_opaque_types() {
614 return true;
615 }
616
617 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
618 let ocx = ObligationCtxt::new(&infcx);
619 ocx.register_obligation(Obligation::new(
620 self.tcx,
621 ObligationCause::dummy(),
622 param_env,
623 pred,
624 ));
625 ocx.select_all_or_error().is_empty()
626 }
627}
628
629impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
630 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
631 if self.tcx.sess.opts.unstable_opts.validate_mir
633 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
634 {
635 if let Operand::Copy(place) = operand {
637 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
638
639 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
640 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
641 }
642 }
643 }
644
645 self.super_operand(operand, location);
646 }
647
648 fn visit_projection_elem(
649 &mut self,
650 place_ref: PlaceRef<'tcx>,
651 elem: PlaceElem<'tcx>,
652 context: PlaceContext,
653 location: Location,
654 ) {
655 match elem {
656 ProjectionElem::OpaqueCast(ty)
657 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
658 {
659 self.fail(
660 location,
661 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
662 )
663 }
664 ProjectionElem::Index(index) => {
665 let index_ty = self.body.local_decls[index].ty;
666 if index_ty != self.tcx.types.usize {
667 self.fail(location, format!("bad index ({index_ty} != usize)"))
668 }
669 }
670 ProjectionElem::Deref
671 if self.body.phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) =>
672 {
673 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
674
675 if base_ty.is_box() {
676 self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
677 }
678 }
679 ProjectionElem::Field(f, ty) => {
680 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
681 let fail_out_of_bounds = |this: &mut Self, location| {
682 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
683 };
684 let check_equal = |this: &mut Self, location, f_ty| {
685 if !this.mir_assign_valid_types(ty, f_ty) {
686 this.fail(
687 location,
688 format!(
689 "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
690 )
691 )
692 }
693 };
694
695 let kind = match parent_ty.ty.kind() {
696 &ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
697 self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
698 }
699 kind => kind,
700 };
701
702 match kind {
703 ty::Tuple(fields) => {
704 let Some(f_ty) = fields.get(f.as_usize()) else {
705 fail_out_of_bounds(self, location);
706 return;
707 };
708 check_equal(self, location, *f_ty);
709 }
710 ty::Adt(adt_def, args) => {
711 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
713 self.fail(
714 location,
715 format!(
716 "You can't project to field {f:?} of `DynMetadata` because \
717 layout is weird and thinks it doesn't have fields."
718 ),
719 );
720 }
721
722 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
723 let Some(field) = adt_def.variant(var).fields.get(f) else {
724 fail_out_of_bounds(self, location);
725 return;
726 };
727 check_equal(self, location, field.ty(self.tcx, args));
728 }
729 ty::Closure(_, args) => {
730 let args = args.as_closure();
731 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
732 fail_out_of_bounds(self, location);
733 return;
734 };
735 check_equal(self, location, f_ty);
736 }
737 ty::CoroutineClosure(_, args) => {
738 let args = args.as_coroutine_closure();
739 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
740 fail_out_of_bounds(self, location);
741 return;
742 };
743 check_equal(self, location, f_ty);
744 }
745 &ty::Coroutine(def_id, args) => {
746 let f_ty = if let Some(var) = parent_ty.variant_index {
747 let layout = if def_id == self.caller_body.source.def_id() {
753 self.caller_body
754 .coroutine_layout_raw()
755 .or_else(|| self.tcx.coroutine_layout(def_id, args).ok())
756 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
757 && let ty::ClosureKind::FnOnce =
758 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
759 && self.caller_body.source.def_id()
760 == self.tcx.coroutine_by_move_body_def_id(def_id)
761 {
762 self.caller_body.coroutine_layout_raw()
764 } else {
765 self.tcx.coroutine_layout(def_id, args).ok()
766 };
767
768 let Some(layout) = layout else {
769 self.fail(
770 location,
771 format!("No coroutine layout for {parent_ty:?}"),
772 );
773 return;
774 };
775
776 let Some(&local) = layout.variant_fields[var].get(f) else {
777 fail_out_of_bounds(self, location);
778 return;
779 };
780
781 let Some(f_ty) = layout.field_tys.get(local) else {
782 self.fail(
783 location,
784 format!("Out of bounds local {local:?} for {parent_ty:?}"),
785 );
786 return;
787 };
788
789 ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
790 } else {
791 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
792 else {
793 fail_out_of_bounds(self, location);
794 return;
795 };
796
797 f_ty
798 };
799
800 check_equal(self, location, f_ty);
801 }
802 _ => {
803 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
804 }
805 }
806 }
807 ProjectionElem::Subtype(ty) => {
808 if !util::sub_types(
809 self.tcx,
810 self.typing_env,
811 ty,
812 place_ref.ty(&self.body.local_decls, self.tcx).ty,
813 ) {
814 self.fail(
815 location,
816 format!(
817 "Failed subtyping {ty} and {}",
818 place_ref.ty(&self.body.local_decls, self.tcx).ty
819 ),
820 )
821 }
822 }
823 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
824 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
825 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
826 self.fail(
827 location,
828 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
829 );
830 return;
831 };
832 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
833 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
834 self.fail(
835 location,
836 format!(
837 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
838 ),
839 );
840 }
841 }
842 _ => {}
843 }
844 self.super_projection_elem(place_ref, elem, context, location);
845 }
846
847 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
848 if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
849 if ty.is_union() || ty.is_enum() {
850 self.fail(
851 START_BLOCK.start_location(),
852 format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
853 );
854 }
855 if projection.is_empty() {
856 self.fail(
857 START_BLOCK.start_location(),
858 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
859 );
860 }
861 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
862 self.fail(
863 START_BLOCK.start_location(),
864 format!(
865 "illegal projection {:?} in debuginfo for {:?}",
866 projection, debuginfo.name
867 ),
868 );
869 }
870 }
871 match debuginfo.value {
872 VarDebugInfoContents::Const(_) => {}
873 VarDebugInfoContents::Place(place) => {
874 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
875 self.fail(
876 START_BLOCK.start_location(),
877 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
878 );
879 }
880 }
881 }
882 self.super_var_debug_info(debuginfo);
883 }
884
885 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
886 let _ = place.ty(&self.body.local_decls, self.tcx);
888
889 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
890 && place.projection.len() > 1
891 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
892 && place.projection[1..].contains(&ProjectionElem::Deref)
893 {
894 self.fail(
895 location,
896 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
897 );
898 }
899
900 let mut projections_iter = place.projection.iter();
902 while let Some(proj) = projections_iter.next() {
903 if matches!(proj, ProjectionElem::Downcast(..)) {
904 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
905 self.fail(
906 location,
907 format!(
908 "place {place:?} has `Downcast` projection not followed by `Field`"
909 ),
910 );
911 }
912 }
913 }
914
915 self.super_place(place, cntxt, location);
916 }
917
918 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
919 macro_rules! check_kinds {
920 ($t:expr, $text:literal, $typat:pat) => {
921 if !matches!(($t).kind(), $typat) {
922 self.fail(location, format!($text, $t));
923 }
924 };
925 }
926 match rvalue {
927 Rvalue::Use(_) | Rvalue::CopyForDeref(_) => {}
928 Rvalue::Aggregate(kind, fields) => match **kind {
929 AggregateKind::Tuple => {}
930 AggregateKind::Array(dest) => {
931 for src in fields {
932 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
933 self.fail(location, "array field has the wrong type");
934 }
935 }
936 }
937 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
938 let adt_def = self.tcx.adt_def(def_id);
939 assert!(adt_def.is_union());
940 assert_eq!(idx, FIRST_VARIANT);
941 let dest_ty = self.tcx.normalize_erasing_regions(
942 self.typing_env,
943 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
944 );
945 if let [field] = fields.raw.as_slice() {
946 let src_ty = field.ty(self.body, self.tcx);
947 if !self.mir_assign_valid_types(src_ty, dest_ty) {
948 self.fail(location, "union field has the wrong type");
949 }
950 } else {
951 self.fail(location, "unions should have one initialized field");
952 }
953 }
954 AggregateKind::Adt(def_id, idx, args, _, None) => {
955 let adt_def = self.tcx.adt_def(def_id);
956 assert!(!adt_def.is_union());
957 let variant = &adt_def.variants()[idx];
958 if variant.fields.len() != fields.len() {
959 self.fail(location, "adt has the wrong number of initialized fields");
960 }
961 for (src, dest) in std::iter::zip(fields, &variant.fields) {
962 let dest_ty = self
963 .tcx
964 .normalize_erasing_regions(self.typing_env, dest.ty(self.tcx, args));
965 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
966 self.fail(location, "adt field has the wrong type");
967 }
968 }
969 }
970 AggregateKind::Closure(_, args) => {
971 let upvars = args.as_closure().upvar_tys();
972 if upvars.len() != fields.len() {
973 self.fail(location, "closure has the wrong number of initialized fields");
974 }
975 for (src, dest) in std::iter::zip(fields, upvars) {
976 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
977 self.fail(location, "closure field has the wrong type");
978 }
979 }
980 }
981 AggregateKind::Coroutine(_, args) => {
982 let upvars = args.as_coroutine().upvar_tys();
983 if upvars.len() != fields.len() {
984 self.fail(location, "coroutine has the wrong number of initialized fields");
985 }
986 for (src, dest) in std::iter::zip(fields, upvars) {
987 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
988 self.fail(location, "coroutine field has the wrong type");
989 }
990 }
991 }
992 AggregateKind::CoroutineClosure(_, args) => {
993 let upvars = args.as_coroutine_closure().upvar_tys();
994 if upvars.len() != fields.len() {
995 self.fail(
996 location,
997 "coroutine-closure has the wrong number of initialized fields",
998 );
999 }
1000 for (src, dest) in std::iter::zip(fields, upvars) {
1001 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1002 self.fail(location, "coroutine-closure field has the wrong type");
1003 }
1004 }
1005 }
1006 AggregateKind::RawPtr(pointee_ty, mutability) => {
1007 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1008 self.fail(location, "RawPtr should be in runtime MIR only");
1012 }
1013
1014 if let [data_ptr, metadata] = fields.raw.as_slice() {
1015 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1016 let metadata_ty = metadata.ty(self.body, self.tcx);
1017 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1018 if *in_mut != mutability {
1019 self.fail(location, "input and output mutability must match");
1020 }
1021
1022 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1024 self.fail(location, "input pointer must be thin");
1025 }
1026 } else {
1027 self.fail(
1028 location,
1029 "first operand to raw pointer aggregate must be a raw pointer",
1030 );
1031 }
1032
1033 if pointee_ty.is_slice() {
1035 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1036 self.fail(location, "slice metadata must be usize");
1037 }
1038 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1039 if metadata_ty != self.tcx.types.unit {
1040 self.fail(location, "metadata for pointer-to-thin must be unit");
1041 }
1042 }
1043 } else {
1044 self.fail(location, "raw pointer aggregate must have 2 fields");
1045 }
1046 }
1047 },
1048 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1049 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1050 self.fail(
1051 location,
1052 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1053 );
1054 }
1055 }
1056 Rvalue::Ref(..) => {}
1057 Rvalue::Len(p) => {
1058 let pty = p.ty(&self.body.local_decls, self.tcx).ty;
1059 check_kinds!(
1060 pty,
1061 "Cannot compute length of non-array type {:?}",
1062 ty::Array(..) | ty::Slice(..)
1063 );
1064 }
1065 Rvalue::BinaryOp(op, vals) => {
1066 use BinOp::*;
1067 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1068 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1069 if crate::util::binop_right_homogeneous(*op) {
1070 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1071 if !self.mir_assign_valid_types(a, b) {
1073 self.fail(
1074 location,
1075 format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1076 );
1077 }
1078 } else if a != b {
1079 self.fail(
1080 location,
1081 format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1082 );
1083 }
1084 }
1085
1086 match op {
1087 Offset => {
1088 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1089 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1090 self.fail(location, format!("Cannot offset by non-isize type {b}"));
1091 }
1092 }
1093 Eq | Lt | Le | Ne | Ge | Gt => {
1094 for x in [a, b] {
1095 check_kinds!(
1096 x,
1097 "Cannot {op:?} compare type {:?}",
1098 ty::Bool
1099 | ty::Char
1100 | ty::Int(..)
1101 | ty::Uint(..)
1102 | ty::Float(..)
1103 | ty::RawPtr(..)
1104 | ty::FnPtr(..)
1105 )
1106 }
1107 }
1108 Cmp => {
1109 for x in [a, b] {
1110 check_kinds!(
1111 x,
1112 "Cannot three-way compare non-integer type {:?}",
1113 ty::Char | ty::Uint(..) | ty::Int(..)
1114 )
1115 }
1116 }
1117 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1118 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1119 for x in [a, b] {
1120 check_kinds!(
1121 x,
1122 "Cannot {op:?} non-integer type {:?}",
1123 ty::Uint(..) | ty::Int(..)
1124 )
1125 }
1126 }
1127 BitAnd | BitOr | BitXor => {
1128 for x in [a, b] {
1129 check_kinds!(
1130 x,
1131 "Cannot perform bitwise op {op:?} on type {:?}",
1132 ty::Uint(..) | ty::Int(..) | ty::Bool
1133 )
1134 }
1135 }
1136 Add | Sub | Mul | Div | Rem => {
1137 for x in [a, b] {
1138 check_kinds!(
1139 x,
1140 "Cannot perform arithmetic {op:?} on type {:?}",
1141 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1142 )
1143 }
1144 }
1145 }
1146 }
1147 Rvalue::UnaryOp(op, operand) => {
1148 let a = operand.ty(&self.body.local_decls, self.tcx);
1149 match op {
1150 UnOp::Neg => {
1151 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1152 }
1153 UnOp::Not => {
1154 check_kinds!(
1155 a,
1156 "Cannot binary not type {:?}",
1157 ty::Int(..) | ty::Uint(..) | ty::Bool
1158 );
1159 }
1160 UnOp::PtrMetadata => {
1161 check_kinds!(
1162 a,
1163 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1164 ty::RawPtr(..) | ty::Ref(..)
1165 );
1166 }
1167 }
1168 }
1169 Rvalue::ShallowInitBox(operand, _) => {
1170 let a = operand.ty(&self.body.local_decls, self.tcx);
1171 check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..));
1172 }
1173 Rvalue::Cast(kind, operand, target_type) => {
1174 let op_ty = operand.ty(self.body, self.tcx);
1175 match kind {
1176 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1178 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
1179 check_kinds!(
1181 op_ty,
1182 "CastKind::{kind:?} input must be a fn item, not {:?}",
1183 ty::FnDef(..)
1184 );
1185 check_kinds!(
1186 target_type,
1187 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1188 ty::FnPtr(..)
1189 );
1190 }
1191 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1192 check_kinds!(
1194 op_ty,
1195 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1196 ty::FnPtr(..)
1197 );
1198 check_kinds!(
1199 target_type,
1200 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1201 ty::FnPtr(..)
1202 );
1203 }
1204 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1205 check_kinds!(
1207 op_ty,
1208 "CastKind::{kind:?} input must be a closure, not {:?}",
1209 ty::Closure(..)
1210 );
1211 check_kinds!(
1212 target_type,
1213 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1214 ty::FnPtr(..)
1215 );
1216 }
1217 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1218 check_kinds!(
1220 op_ty,
1221 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1222 ty::RawPtr(_, Mutability::Mut)
1223 );
1224 check_kinds!(
1225 target_type,
1226 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1227 ty::RawPtr(_, Mutability::Not)
1228 );
1229 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1230 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1231 }
1232 }
1233 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1234 check_kinds!(
1236 op_ty,
1237 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1238 ty::RawPtr(..)
1239 );
1240 check_kinds!(
1241 target_type,
1242 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1243 ty::RawPtr(..)
1244 );
1245 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1246 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1247 }
1248 }
1249 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1250 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1253 self.tcx,
1254 self.tcx.require_lang_item(
1255 LangItem::CoerceUnsized,
1256 Some(self.body.source_info(location).span),
1257 ),
1258 [op_ty, *target_type],
1259 )) {
1260 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1261 }
1262 }
1263 CastKind::PointerCoercion(PointerCoercion::DynStar, _) => {
1264 }
1266 CastKind::IntToInt | CastKind::IntToFloat => {
1267 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1268 let target_valid = target_type.is_numeric() || target_type.is_char();
1269 if !input_valid || !target_valid {
1270 self.fail(
1271 location,
1272 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1273 );
1274 }
1275 }
1276 CastKind::FnPtrToPtr => {
1277 check_kinds!(
1278 op_ty,
1279 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1280 ty::FnPtr(..)
1281 );
1282 check_kinds!(
1283 target_type,
1284 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1285 ty::RawPtr(..)
1286 );
1287 }
1288 CastKind::PtrToPtr => {
1289 check_kinds!(
1290 op_ty,
1291 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1292 ty::RawPtr(..)
1293 );
1294 check_kinds!(
1295 target_type,
1296 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1297 ty::RawPtr(..)
1298 );
1299 }
1300 CastKind::FloatToFloat | CastKind::FloatToInt => {
1301 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1302 self.fail(
1303 location,
1304 format!(
1305 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1306 ),
1307 );
1308 }
1309 }
1310 CastKind::Transmute => {
1311 if let MirPhase::Runtime(..) = self.body.phase {
1312 if !self
1316 .tcx
1317 .normalize_erasing_regions(self.typing_env, op_ty)
1318 .is_sized(self.tcx, self.typing_env)
1319 {
1320 self.fail(
1321 location,
1322 format!("Cannot transmute from non-`Sized` type {op_ty}"),
1323 );
1324 }
1325 if !self
1326 .tcx
1327 .normalize_erasing_regions(self.typing_env, *target_type)
1328 .is_sized(self.tcx, self.typing_env)
1329 {
1330 self.fail(
1331 location,
1332 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1333 );
1334 }
1335 } else {
1336 self.fail(
1337 location,
1338 format!(
1339 "Transmute is not supported in non-runtime phase {:?}.",
1340 self.body.phase
1341 ),
1342 );
1343 }
1344 }
1345 }
1346 }
1347 Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => {
1348 let fail_out_of_bounds = |this: &mut Self, location, field, ty| {
1349 this.fail(location, format!("Out of bounds field {field:?} for {ty}"));
1350 };
1351
1352 let mut current_ty = *container;
1353
1354 for (variant, field) in indices.iter() {
1355 match current_ty.kind() {
1356 ty::Tuple(fields) => {
1357 if variant != FIRST_VARIANT {
1358 self.fail(
1359 location,
1360 format!("tried to get variant {variant:?} of tuple"),
1361 );
1362 return;
1363 }
1364 let Some(&f_ty) = fields.get(field.as_usize()) else {
1365 fail_out_of_bounds(self, location, field, current_ty);
1366 return;
1367 };
1368
1369 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1370 }
1371 ty::Adt(adt_def, args) => {
1372 let Some(field) = adt_def.variant(variant).fields.get(field) else {
1373 fail_out_of_bounds(self, location, field, current_ty);
1374 return;
1375 };
1376
1377 let f_ty = field.ty(self.tcx, args);
1378 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1379 }
1380 _ => {
1381 self.fail(
1382 location,
1383 format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty}"),
1384 );
1385 return;
1386 }
1387 }
1388 }
1389 }
1390 Rvalue::Repeat(_, _)
1391 | Rvalue::ThreadLocalRef(_)
1392 | Rvalue::RawPtr(_, _)
1393 | Rvalue::NullaryOp(
1394 NullOp::SizeOf | NullOp::AlignOf | NullOp::UbChecks | NullOp::ContractChecks,
1395 _,
1396 )
1397 | Rvalue::Discriminant(_) => {}
1398
1399 Rvalue::WrapUnsafeBinder(op, ty) => {
1400 let unwrapped_ty = op.ty(self.body, self.tcx);
1401 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1402 self.fail(
1403 location,
1404 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1405 );
1406 return;
1407 };
1408 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1409 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1410 self.fail(
1411 location,
1412 format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1413 );
1414 }
1415 }
1416 }
1417 self.super_rvalue(rvalue, location);
1418 }
1419
1420 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1421 match &statement.kind {
1422 StatementKind::Assign(box (dest, rvalue)) => {
1423 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1425 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1426
1427 if !self.mir_assign_valid_types(right_ty, left_ty) {
1428 self.fail(
1429 location,
1430 format!(
1431 "encountered `{:?}` with incompatible types:\n\
1432 left-hand side has type: {}\n\
1433 right-hand side has type: {}",
1434 statement.kind, left_ty, right_ty,
1435 ),
1436 );
1437 }
1438 if let Rvalue::CopyForDeref(place) = rvalue {
1439 if place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_none() {
1440 self.fail(
1441 location,
1442 "`CopyForDeref` should only be used for dereferenceable types",
1443 )
1444 }
1445 }
1446 }
1447 StatementKind::AscribeUserType(..) => {
1448 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1449 self.fail(
1450 location,
1451 "`AscribeUserType` should have been removed after drop lowering phase",
1452 );
1453 }
1454 }
1455 StatementKind::FakeRead(..) => {
1456 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1457 self.fail(
1458 location,
1459 "`FakeRead` should have been removed after drop lowering phase",
1460 );
1461 }
1462 }
1463 StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1464 let ty = op.ty(&self.body.local_decls, self.tcx);
1465 if !ty.is_bool() {
1466 self.fail(
1467 location,
1468 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1469 );
1470 }
1471 }
1472 StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1473 CopyNonOverlapping { src, dst, count },
1474 )) => {
1475 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1476 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1477 src_deref
1478 } else {
1479 self.fail(
1480 location,
1481 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1482 );
1483 return;
1484 };
1485 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1486 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1487 dst_deref
1488 } else {
1489 self.fail(
1490 location,
1491 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1492 );
1493 return;
1494 };
1495 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1498 self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1499 }
1500
1501 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1502 if op_cnt_ty != self.tcx.types.usize {
1503 self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1504 }
1505 }
1506 StatementKind::SetDiscriminant { place, .. } => {
1507 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1508 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1509 }
1510 let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1511 if !matches!(
1512 pty.kind(),
1513 ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)
1514 ) {
1515 self.fail(
1516 location,
1517 format!(
1518 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1519 ),
1520 );
1521 }
1522 }
1523 StatementKind::Deinit(..) => {
1524 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1525 self.fail(location, "`Deinit`is not allowed until deaggregation");
1526 }
1527 }
1528 StatementKind::Retag(kind, _) => {
1529 if matches!(kind, RetagKind::TwoPhase) {
1533 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
1534 }
1535 }
1536 StatementKind::StorageLive(_)
1537 | StatementKind::StorageDead(_)
1538 | StatementKind::Coverage(_)
1539 | StatementKind::ConstEvalCounter
1540 | StatementKind::PlaceMention(..)
1541 | StatementKind::BackwardIncompatibleDropHint { .. }
1542 | StatementKind::Nop => {}
1543 }
1544
1545 self.super_statement(statement, location);
1546 }
1547
1548 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1549 match &terminator.kind {
1550 TerminatorKind::SwitchInt { targets, discr } => {
1551 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1552
1553 let target_width = self.tcx.sess.target.pointer_width;
1554
1555 let size = Size::from_bits(match switch_ty.kind() {
1556 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1557 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1558 ty::Char => 32,
1559 ty::Bool => 1,
1560 other => bug!("unhandled type: {:?}", other),
1561 });
1562
1563 for (value, _) in targets.iter() {
1564 if ScalarInt::try_from_uint(value, size).is_none() {
1565 self.fail(
1566 location,
1567 format!("the value {value:#x} is not a proper {switch_ty}"),
1568 )
1569 }
1570 }
1571 }
1572 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1573 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1574 match func_ty.kind() {
1575 ty::FnPtr(..) | ty::FnDef(..) => {}
1576 _ => self.fail(
1577 location,
1578 format!(
1579 "encountered non-callable type {func_ty} in `{}` terminator",
1580 terminator.kind.name()
1581 ),
1582 ),
1583 }
1584
1585 if let TerminatorKind::TailCall { .. } = terminator.kind {
1586 }
1589 }
1590 TerminatorKind::Assert { cond, .. } => {
1591 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1592 if cond_ty != self.tcx.types.bool {
1593 self.fail(
1594 location,
1595 format!(
1596 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1597 ),
1598 );
1599 }
1600 }
1601 TerminatorKind::Goto { .. }
1602 | TerminatorKind::Drop { .. }
1603 | TerminatorKind::Yield { .. }
1604 | TerminatorKind::FalseEdge { .. }
1605 | TerminatorKind::FalseUnwind { .. }
1606 | TerminatorKind::InlineAsm { .. }
1607 | TerminatorKind::CoroutineDrop
1608 | TerminatorKind::UnwindResume
1609 | TerminatorKind::UnwindTerminate(_)
1610 | TerminatorKind::Return
1611 | TerminatorKind::Unreachable => {}
1612 }
1613
1614 self.super_terminator(terminator, location);
1615 }
1616}