1use std::borrow::Cow;
88
89use either::Either;
90use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
91use rustc_const_eval::const_eval::DummyMachine;
92use rustc_const_eval::interpret::{
93 ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
94 intern_const_alloc_for_constprop,
95};
96use rustc_data_structures::fx::{FxIndexSet, MutableValues};
97use rustc_data_structures::graph::dominators::Dominators;
98use rustc_hir::def::DefKind;
99use rustc_index::bit_set::DenseBitSet;
100use rustc_index::{IndexVec, newtype_index};
101use rustc_middle::bug;
102use rustc_middle::mir::interpret::GlobalAlloc;
103use rustc_middle::mir::visit::*;
104use rustc_middle::mir::*;
105use rustc_middle::ty::layout::HasTypingEnv;
106use rustc_middle::ty::{self, Ty, TyCtxt};
107use rustc_span::DUMMY_SP;
108use rustc_span::def_id::DefId;
109use smallvec::SmallVec;
110use tracing::{debug, instrument, trace};
111
112use crate::ssa::SsaLocals;
113
114pub(super) struct GVN;
115
116impl<'tcx> crate::MirPass<'tcx> for GVN {
117 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
118 sess.mir_opt_level() >= 2
119 }
120
121 #[instrument(level = "trace", skip(self, tcx, body))]
122 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
123 debug!(def_id = ?body.source.def_id());
124
125 let typing_env = body.typing_env(tcx);
126 let ssa = SsaLocals::new(tcx, body, typing_env);
127 let dominators = body.basic_blocks.dominators().clone();
129
130 let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
131
132 for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
133 let opaque = state.new_opaque();
134 state.assign(local, opaque);
135 }
136
137 let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
138 for bb in reverse_postorder {
139 let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
140 state.visit_basic_block_data(bb, data);
141 }
142
143 StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
147 }
148
149 fn is_required(&self) -> bool {
150 false
151 }
152}
153
154newtype_index! {
155 struct VnIndex {}
156}
157
158#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
161enum AggregateTy<'tcx> {
162 Array,
164 Tuple,
165 Def(DefId, ty::GenericArgsRef<'tcx>),
166 RawPtr {
167 data_pointer_ty: Ty<'tcx>,
169 output_pointer_ty: Ty<'tcx>,
171 },
172}
173
174#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
175enum AddressKind {
176 Ref(BorrowKind),
177 Address(RawPtrKind),
178}
179
180#[derive(Debug, PartialEq, Eq, Hash)]
181enum Value<'tcx> {
182 Opaque(usize),
186 Constant {
188 value: Const<'tcx>,
189 disambiguator: usize,
193 },
194 Aggregate(AggregateTy<'tcx>, VariantIdx, Vec<VnIndex>),
197 Repeat(VnIndex, ty::Const<'tcx>),
199 Address {
201 place: Place<'tcx>,
202 kind: AddressKind,
203 provenance: usize,
205 },
206
207 Projection(VnIndex, ProjectionElem<VnIndex, Ty<'tcx>>),
210 Discriminant(VnIndex),
212 Len(VnIndex),
214
215 NullaryOp(NullOp<'tcx>, Ty<'tcx>),
217 UnaryOp(UnOp, VnIndex),
218 BinaryOp(BinOp, VnIndex, VnIndex),
219 Cast {
220 kind: CastKind,
221 value: VnIndex,
222 from: Ty<'tcx>,
223 to: Ty<'tcx>,
224 },
225}
226
227struct VnState<'body, 'tcx> {
228 tcx: TyCtxt<'tcx>,
229 ecx: InterpCx<'tcx, DummyMachine>,
230 local_decls: &'body LocalDecls<'tcx>,
231 locals: IndexVec<Local, Option<VnIndex>>,
233 rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
236 values: FxIndexSet<Value<'tcx>>,
237 evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
239 next_opaque: usize,
241 derefs: Vec<VnIndex>,
243 ssa: &'body SsaLocals,
244 dominators: Dominators<BasicBlock>,
245 reused_locals: DenseBitSet<Local>,
246}
247
248impl<'body, 'tcx> VnState<'body, 'tcx> {
249 fn new(
250 tcx: TyCtxt<'tcx>,
251 body: &Body<'tcx>,
252 typing_env: ty::TypingEnv<'tcx>,
253 ssa: &'body SsaLocals,
254 dominators: Dominators<BasicBlock>,
255 local_decls: &'body LocalDecls<'tcx>,
256 ) -> Self {
257 let num_values =
262 2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
263 + 4 * body.basic_blocks.len();
264 VnState {
265 tcx,
266 ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
267 local_decls,
268 locals: IndexVec::from_elem(None, local_decls),
269 rev_locals: IndexVec::with_capacity(num_values),
270 values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()),
271 evaluated: IndexVec::with_capacity(num_values),
272 next_opaque: 1,
273 derefs: Vec::new(),
274 ssa,
275 dominators,
276 reused_locals: DenseBitSet::new_empty(local_decls.len()),
277 }
278 }
279
280 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
281 self.ecx.typing_env()
282 }
283
284 #[instrument(level = "trace", skip(self), ret)]
285 fn insert(&mut self, value: Value<'tcx>) -> VnIndex {
286 let (index, new) = self.values.insert_full(value);
287 let index = VnIndex::from_usize(index);
288 if new {
289 let evaluated = self.eval_to_const(index);
291 let _index = self.evaluated.push(evaluated);
292 debug_assert_eq!(index, _index);
293 let _index = self.rev_locals.push(SmallVec::new());
294 debug_assert_eq!(index, _index);
295 }
296 index
297 }
298
299 fn next_opaque(&mut self) -> usize {
300 let next_opaque = self.next_opaque;
301 self.next_opaque += 1;
302 next_opaque
303 }
304
305 #[instrument(level = "trace", skip(self), ret)]
308 fn new_opaque(&mut self) -> VnIndex {
309 let value = Value::Opaque(self.next_opaque());
310 self.insert(value)
311 }
312
313 #[instrument(level = "trace", skip(self), ret)]
315 fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex {
316 let value = Value::Address { place, kind, provenance: self.next_opaque() };
317 self.insert(value)
318 }
319
320 fn get(&self, index: VnIndex) -> &Value<'tcx> {
321 self.values.get_index(index.as_usize()).unwrap()
322 }
323
324 #[instrument(level = "trace", skip(self))]
326 fn assign(&mut self, local: Local, value: VnIndex) {
327 debug_assert!(self.ssa.is_ssa(local));
328 self.locals[local] = Some(value);
329 self.rev_locals[value].push(local);
330 }
331
332 fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
333 let disambiguator = if value.is_deterministic() {
334 0
336 } else {
337 let disambiguator = self.next_opaque();
340 debug_assert_ne!(disambiguator, 0);
342 disambiguator
343 };
344 self.insert(Value::Constant { value, disambiguator })
345 }
346
347 fn insert_bool(&mut self, flag: bool) -> VnIndex {
348 let value = Const::from_bool(self.tcx, flag);
350 debug_assert!(value.is_deterministic());
351 self.insert(Value::Constant { value, disambiguator: 0 })
352 }
353
354 fn insert_scalar(&mut self, scalar: Scalar, ty: Ty<'tcx>) -> VnIndex {
355 let value = Const::from_scalar(self.tcx, scalar, ty);
357 debug_assert!(value.is_deterministic());
358 self.insert(Value::Constant { value, disambiguator: 0 })
359 }
360
361 fn insert_tuple(&mut self, values: Vec<VnIndex>) -> VnIndex {
362 self.insert(Value::Aggregate(AggregateTy::Tuple, VariantIdx::ZERO, values))
363 }
364
365 fn insert_deref(&mut self, value: VnIndex) -> VnIndex {
366 let value = self.insert(Value::Projection(value, ProjectionElem::Deref));
367 self.derefs.push(value);
368 value
369 }
370
371 fn invalidate_derefs(&mut self) {
372 for deref in std::mem::take(&mut self.derefs) {
373 let opaque = self.next_opaque();
374 *self.values.get_index_mut2(deref.index()).unwrap() = Value::Opaque(opaque);
375 }
376 }
377
378 #[instrument(level = "trace", skip(self), ret)]
379 fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
380 use Value::*;
381 let op = match *self.get(value) {
382 Opaque(_) => return None,
383 Repeat(..) => return None,
385
386 Constant { ref value, disambiguator: _ } => {
387 self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
388 }
389 Aggregate(kind, variant, ref fields) => {
390 let fields = fields
391 .iter()
392 .map(|&f| self.evaluated[f].as_ref())
393 .collect::<Option<Vec<_>>>()?;
394 let ty = match kind {
395 AggregateTy::Array => {
396 assert!(fields.len() > 0);
397 Ty::new_array(self.tcx, fields[0].layout.ty, fields.len() as u64)
398 }
399 AggregateTy::Tuple => {
400 Ty::new_tup_from_iter(self.tcx, fields.iter().map(|f| f.layout.ty))
401 }
402 AggregateTy::Def(def_id, args) => {
403 self.tcx.type_of(def_id).instantiate(self.tcx, args)
404 }
405 AggregateTy::RawPtr { output_pointer_ty, .. } => output_pointer_ty,
406 };
407 let variant = if ty.is_enum() { Some(variant) } else { None };
408 let ty = self.ecx.layout_of(ty).ok()?;
409 if ty.is_zst() {
410 ImmTy::uninit(ty).into()
411 } else if matches!(kind, AggregateTy::RawPtr { .. }) {
412 let data = self.ecx.read_pointer(fields[0]).discard_err()?;
414 let meta = if fields[1].layout.is_zst() {
415 MemPlaceMeta::None
416 } else {
417 MemPlaceMeta::Meta(self.ecx.read_scalar(fields[1]).discard_err()?)
418 };
419 let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
420 ImmTy::from_immediate(ptr_imm, ty).into()
421 } else if matches!(
422 ty.backend_repr,
423 BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
424 ) {
425 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
426 let variant_dest = if let Some(variant) = variant {
427 self.ecx.project_downcast(&dest, variant).discard_err()?
428 } else {
429 dest.clone()
430 };
431 for (field_index, op) in fields.into_iter().enumerate() {
432 let field_dest = self
433 .ecx
434 .project_field(&variant_dest, FieldIdx::from_usize(field_index))
435 .discard_err()?;
436 self.ecx.copy_op(op, &field_dest).discard_err()?;
437 }
438 self.ecx
439 .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
440 .discard_err()?;
441 self.ecx
442 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
443 .discard_err()?;
444 dest.into()
445 } else {
446 return None;
447 }
448 }
449
450 Projection(base, elem) => {
451 let value = self.evaluated[base].as_ref()?;
452 let elem = match elem {
453 ProjectionElem::Deref => ProjectionElem::Deref,
454 ProjectionElem::Downcast(name, read_variant) => {
455 ProjectionElem::Downcast(name, read_variant)
456 }
457 ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, ty),
458 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
459 ProjectionElem::ConstantIndex { offset, min_length, from_end }
460 }
461 ProjectionElem::Subslice { from, to, from_end } => {
462 ProjectionElem::Subslice { from, to, from_end }
463 }
464 ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
465 ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
466 ProjectionElem::UnwrapUnsafeBinder(ty) => {
467 ProjectionElem::UnwrapUnsafeBinder(ty)
468 }
469 ProjectionElem::Index(_) => return None,
471 };
472 self.ecx.project(value, elem).discard_err()?
473 }
474 Address { place, kind, provenance: _ } => {
475 if !place.is_indirect_first_projection() {
476 return None;
477 }
478 let local = self.locals[place.local]?;
479 let pointer = self.evaluated[local].as_ref()?;
480 let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
481 for proj in place.projection.iter().skip(1) {
482 if matches!(proj, ProjectionElem::Index(_)) {
485 return None;
486 }
487 mplace = self.ecx.project(&mplace, proj).discard_err()?;
488 }
489 let pointer = mplace.to_ref(&self.ecx);
490 let ty = match kind {
491 AddressKind::Ref(bk) => Ty::new_ref(
492 self.tcx,
493 self.tcx.lifetimes.re_erased,
494 mplace.layout.ty,
495 bk.to_mutbl_lossy(),
496 ),
497 AddressKind::Address(mutbl) => {
498 Ty::new_ptr(self.tcx, mplace.layout.ty, mutbl.to_mutbl_lossy())
499 }
500 };
501 let layout = self.ecx.layout_of(ty).ok()?;
502 ImmTy::from_immediate(pointer, layout).into()
503 }
504
505 Discriminant(base) => {
506 let base = self.evaluated[base].as_ref()?;
507 let variant = self.ecx.read_discriminant(base).discard_err()?;
508 let discr_value =
509 self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
510 discr_value.into()
511 }
512 Len(slice) => {
513 let slice = self.evaluated[slice].as_ref()?;
514 let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
515 let len = slice.len(&self.ecx).discard_err()?;
516 let imm = ImmTy::from_uint(len, usize_layout);
517 imm.into()
518 }
519 NullaryOp(null_op, ty) => {
520 let layout = self.ecx.layout_of(ty).ok()?;
521 if let NullOp::SizeOf | NullOp::AlignOf = null_op
522 && layout.is_unsized()
523 {
524 return None;
525 }
526 let val = match null_op {
527 NullOp::SizeOf => layout.size.bytes(),
528 NullOp::AlignOf => layout.align.abi.bytes(),
529 NullOp::OffsetOf(fields) => self
530 .ecx
531 .tcx
532 .offset_of_subfield(self.typing_env(), layout, fields.iter())
533 .bytes(),
534 NullOp::UbChecks => return None,
535 NullOp::ContractChecks => return None,
536 };
537 let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
538 let imm = ImmTy::from_uint(val, usize_layout);
539 imm.into()
540 }
541 UnaryOp(un_op, operand) => {
542 let operand = self.evaluated[operand].as_ref()?;
543 let operand = self.ecx.read_immediate(operand).discard_err()?;
544 let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
545 val.into()
546 }
547 BinaryOp(bin_op, lhs, rhs) => {
548 let lhs = self.evaluated[lhs].as_ref()?;
549 let lhs = self.ecx.read_immediate(lhs).discard_err()?;
550 let rhs = self.evaluated[rhs].as_ref()?;
551 let rhs = self.ecx.read_immediate(rhs).discard_err()?;
552 let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
553 val.into()
554 }
555 Cast { kind, value, from: _, to } => match kind {
556 CastKind::IntToInt | CastKind::IntToFloat => {
557 let value = self.evaluated[value].as_ref()?;
558 let value = self.ecx.read_immediate(value).discard_err()?;
559 let to = self.ecx.layout_of(to).ok()?;
560 let res = self.ecx.int_to_int_or_float(&value, to).discard_err()?;
561 res.into()
562 }
563 CastKind::FloatToFloat | CastKind::FloatToInt => {
564 let value = self.evaluated[value].as_ref()?;
565 let value = self.ecx.read_immediate(value).discard_err()?;
566 let to = self.ecx.layout_of(to).ok()?;
567 let res = self.ecx.float_to_float_or_int(&value, to).discard_err()?;
568 res.into()
569 }
570 CastKind::Transmute => {
571 let value = self.evaluated[value].as_ref()?;
572 let to = self.ecx.layout_of(to).ok()?;
573 if value.as_mplace_or_imm().is_right() {
578 let can_transmute = match (value.layout.backend_repr, to.backend_repr) {
579 (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
580 s1.size(&self.ecx) == s2.size(&self.ecx)
581 && !matches!(s1.primitive(), Primitive::Pointer(..))
582 }
583 (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
584 a1.size(&self.ecx) == a2.size(&self.ecx) &&
585 b1.size(&self.ecx) == b2.size(&self.ecx) &&
586 b1.align(&self.ecx) == b2.align(&self.ecx) &&
588 !matches!(a1.primitive(), Primitive::Pointer(..))
590 && !matches!(b1.primitive(), Primitive::Pointer(..))
591 }
592 _ => false,
593 };
594 if !can_transmute {
595 return None;
596 }
597 }
598 value.offset(Size::ZERO, to, &self.ecx).discard_err()?
599 }
600 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
601 let src = self.evaluated[value].as_ref()?;
602 let to = self.ecx.layout_of(to).ok()?;
603 let dest = self.ecx.allocate(to, MemoryKind::Stack).discard_err()?;
604 self.ecx.unsize_into(src, to, &dest.clone().into()).discard_err()?;
605 self.ecx
606 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
607 .discard_err()?;
608 dest.into()
609 }
610 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
611 let src = self.evaluated[value].as_ref()?;
612 let src = self.ecx.read_immediate(src).discard_err()?;
613 let to = self.ecx.layout_of(to).ok()?;
614 let ret = self.ecx.ptr_to_ptr(&src, to).discard_err()?;
615 ret.into()
616 }
617 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
618 let src = self.evaluated[value].as_ref()?;
619 let src = self.ecx.read_immediate(src).discard_err()?;
620 let to = self.ecx.layout_of(to).ok()?;
621 ImmTy::from_immediate(*src, to).into()
622 }
623 _ => return None,
624 },
625 };
626 Some(op)
627 }
628
629 fn project(
630 &mut self,
631 place: PlaceRef<'tcx>,
632 value: VnIndex,
633 proj: PlaceElem<'tcx>,
634 from_non_ssa_index: &mut bool,
635 ) -> Option<VnIndex> {
636 let proj = match proj {
637 ProjectionElem::Deref => {
638 let ty = place.ty(self.local_decls, self.tcx).ty;
639 if let Some(Mutability::Not) = ty.ref_mutability()
640 && let Some(pointee_ty) = ty.builtin_deref(true)
641 && pointee_ty.is_freeze(self.tcx, self.typing_env())
642 {
643 return Some(self.insert_deref(value));
646 } else {
647 return None;
648 }
649 }
650 ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
651 ProjectionElem::Field(f, ty) => {
652 if let Value::Aggregate(_, _, fields) = self.get(value) {
653 return Some(fields[f.as_usize()]);
654 } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
655 && let Value::Aggregate(_, written_variant, fields) = self.get(*outer_value)
656 && written_variant == read_variant
672 {
673 return Some(fields[f.as_usize()]);
674 }
675 ProjectionElem::Field(f, ty)
676 }
677 ProjectionElem::Index(idx) => {
678 if let Value::Repeat(inner, _) = self.get(value) {
679 *from_non_ssa_index |= self.locals[idx].is_none();
680 return Some(*inner);
681 }
682 let idx = self.locals[idx]?;
683 ProjectionElem::Index(idx)
684 }
685 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
686 match self.get(value) {
687 Value::Repeat(inner, _) => {
688 return Some(*inner);
689 }
690 Value::Aggregate(AggregateTy::Array, _, operands) => {
691 let offset = if from_end {
692 operands.len() - offset as usize
693 } else {
694 offset as usize
695 };
696 return operands.get(offset).copied();
697 }
698 _ => {}
699 };
700 ProjectionElem::ConstantIndex { offset, min_length, from_end }
701 }
702 ProjectionElem::Subslice { from, to, from_end } => {
703 ProjectionElem::Subslice { from, to, from_end }
704 }
705 ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
706 ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
707 ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(ty),
708 };
709
710 Some(self.insert(Value::Projection(value, proj)))
711 }
712
713 #[instrument(level = "trace", skip(self))]
715 fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
716 if place.is_indirect_first_projection()
719 && let Some(base) = self.locals[place.local]
720 && let Some(new_local) = self.try_as_local(base, location)
721 && place.local != new_local
722 {
723 place.local = new_local;
724 self.reused_locals.insert(new_local);
725 }
726
727 let mut projection = Cow::Borrowed(&place.projection[..]);
728
729 for i in 0..projection.len() {
730 let elem = projection[i];
731 if let ProjectionElem::Index(idx_local) = elem
732 && let Some(idx) = self.locals[idx_local]
733 {
734 if let Some(offset) = self.evaluated[idx].as_ref()
735 && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
736 && let Some(min_length) = offset.checked_add(1)
737 {
738 projection.to_mut()[i] =
739 ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
740 } else if let Some(new_idx_local) = self.try_as_local(idx, location)
741 && idx_local != new_idx_local
742 {
743 projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
744 self.reused_locals.insert(new_idx_local);
745 }
746 }
747 }
748
749 if projection.is_owned() {
750 place.projection = self.tcx.mk_place_elems(&projection);
751 }
752
753 trace!(?place);
754 }
755
756 #[instrument(level = "trace", skip(self), ret)]
759 fn simplify_place_value(
760 &mut self,
761 place: &mut Place<'tcx>,
762 location: Location,
763 ) -> Option<VnIndex> {
764 self.simplify_place_projection(place, location);
765
766 let mut place_ref = place.as_ref();
769
770 let mut value = self.locals[place.local]?;
772 let mut from_non_ssa_index = false;
773 for (index, proj) in place.projection.iter().enumerate() {
774 if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
775 && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
776 && let AddressKind::Ref(BorrowKind::Shared) = kind
777 && let Some(v) = self.simplify_place_value(&mut pointee, location)
778 {
779 value = v;
780 place_ref = pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
781 }
782 if let Some(local) = self.try_as_local(value, location) {
783 place_ref = PlaceRef { local, projection: &place.projection[index..] };
787 }
788
789 let base = PlaceRef { local: place.local, projection: &place.projection[..index] };
790 value = self.project(base, value, proj, &mut from_non_ssa_index)?;
791 }
792
793 if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
794 && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
795 && let AddressKind::Ref(BorrowKind::Shared) = kind
796 && let Some(v) = self.simplify_place_value(&mut pointee, location)
797 {
798 value = v;
799 place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
800 }
801 if let Some(new_local) = self.try_as_local(value, location) {
802 place_ref = PlaceRef { local: new_local, projection: &[] };
803 } else if from_non_ssa_index {
804 return None;
806 }
807
808 if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
809 *place = place_ref.project_deeper(&[], self.tcx);
811 self.reused_locals.insert(place_ref.local);
812 }
813
814 Some(value)
815 }
816
817 #[instrument(level = "trace", skip(self), ret)]
818 fn simplify_operand(
819 &mut self,
820 operand: &mut Operand<'tcx>,
821 location: Location,
822 ) -> Option<VnIndex> {
823 match *operand {
824 Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
825 Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
826 let value = self.simplify_place_value(place, location)?;
827 if let Some(const_) = self.try_as_constant(value) {
828 *operand = Operand::Constant(Box::new(const_));
829 }
830 Some(value)
831 }
832 }
833 }
834
835 #[instrument(level = "trace", skip(self), ret)]
836 fn simplify_rvalue(
837 &mut self,
838 lhs: &Place<'tcx>,
839 rvalue: &mut Rvalue<'tcx>,
840 location: Location,
841 ) -> Option<VnIndex> {
842 let value = match *rvalue {
843 Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
845 Rvalue::CopyForDeref(place) => {
846 let mut operand = Operand::Copy(place);
847 let val = self.simplify_operand(&mut operand, location);
848 *rvalue = Rvalue::Use(operand);
849 return val;
850 }
851
852 Rvalue::Repeat(ref mut op, amount) => {
854 let op = self.simplify_operand(op, location)?;
855 Value::Repeat(op, amount)
856 }
857 Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
858 Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
859 Rvalue::Ref(_, borrow_kind, ref mut place) => {
860 self.simplify_place_projection(place, location);
861 return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind)));
862 }
863 Rvalue::RawPtr(mutbl, ref mut place) => {
864 self.simplify_place_projection(place, location);
865 return Some(self.new_pointer(*place, AddressKind::Address(mutbl)));
866 }
867 Rvalue::WrapUnsafeBinder(ref mut op, ty) => {
868 let value = self.simplify_operand(op, location)?;
869 Value::Cast {
870 kind: CastKind::Transmute,
871 value,
872 from: op.ty(self.local_decls, self.tcx),
873 to: ty,
874 }
875 }
876
877 Rvalue::Len(ref mut place) => return self.simplify_len(place, location),
879 Rvalue::Cast(ref mut kind, ref mut value, to) => {
880 return self.simplify_cast(kind, value, to, location);
881 }
882 Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
883 return self.simplify_binary(op, lhs, rhs, location);
884 }
885 Rvalue::UnaryOp(op, ref mut arg_op) => {
886 return self.simplify_unary(op, arg_op, location);
887 }
888 Rvalue::Discriminant(ref mut place) => {
889 let place = self.simplify_place_value(place, location)?;
890 if let Some(discr) = self.simplify_discriminant(place) {
891 return Some(discr);
892 }
893 Value::Discriminant(place)
894 }
895
896 Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
898 };
899 debug!(?value);
900 Some(self.insert(value))
901 }
902
903 fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
904 if let Value::Aggregate(enum_ty, variant, _) = *self.get(place)
905 && let AggregateTy::Def(enum_did, enum_args) = enum_ty
906 && let DefKind::Enum = self.tcx.def_kind(enum_did)
907 {
908 let enum_ty = self.tcx.type_of(enum_did).instantiate(self.tcx, enum_args);
909 let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
910 return Some(self.insert_scalar(discr.to_scalar(), discr.layout.ty));
911 }
912
913 None
914 }
915
916 fn try_as_place_elem(
917 &mut self,
918 proj: ProjectionElem<VnIndex, Ty<'tcx>>,
919 loc: Location,
920 ) -> Option<PlaceElem<'tcx>> {
921 Some(match proj {
922 ProjectionElem::Deref => ProjectionElem::Deref,
923 ProjectionElem::Field(idx, ty) => ProjectionElem::Field(idx, ty),
924 ProjectionElem::Index(idx) => {
925 let Some(local) = self.try_as_local(idx, loc) else {
926 return None;
927 };
928 self.reused_locals.insert(local);
929 ProjectionElem::Index(local)
930 }
931 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
932 ProjectionElem::ConstantIndex { offset, min_length, from_end }
933 }
934 ProjectionElem::Subslice { from, to, from_end } => {
935 ProjectionElem::Subslice { from, to, from_end }
936 }
937 ProjectionElem::Downcast(symbol, idx) => ProjectionElem::Downcast(symbol, idx),
938 ProjectionElem::OpaqueCast(idx) => ProjectionElem::OpaqueCast(idx),
939 ProjectionElem::Subtype(idx) => ProjectionElem::Subtype(idx),
940 ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(ty),
941 })
942 }
943
944 fn simplify_aggregate_to_copy(
945 &mut self,
946 lhs: &Place<'tcx>,
947 rvalue: &mut Rvalue<'tcx>,
948 location: Location,
949 fields: &[VnIndex],
950 variant_index: VariantIdx,
951 ) -> Option<VnIndex> {
952 let Some(&first_field) = fields.first() else {
953 return None;
954 };
955 let Value::Projection(copy_from_value, _) = *self.get(first_field) else {
956 return None;
957 };
958 if fields.iter().enumerate().any(|(index, &v)| {
960 if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
961 && copy_from_value == pointer
962 && from_index.index() == index
963 {
964 return false;
965 }
966 true
967 }) {
968 return None;
969 }
970
971 let mut copy_from_local_value = copy_from_value;
972 if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
973 && let ProjectionElem::Downcast(_, read_variant) = proj
974 {
975 if variant_index == read_variant {
976 copy_from_local_value = pointer;
978 } else {
979 return None;
981 }
982 }
983
984 if let Some(place) = self.try_as_place(copy_from_local_value, location, true)
987 && rvalue.ty(self.local_decls, self.tcx) == place.ty(self.local_decls, self.tcx).ty
988 {
989 if lhs.as_local().is_some() {
992 self.reused_locals.insert(place.local);
993 *rvalue = Rvalue::Use(Operand::Copy(place));
994 }
995 return Some(copy_from_local_value);
996 }
997
998 None
999 }
1000
1001 fn simplify_aggregate(
1002 &mut self,
1003 lhs: &Place<'tcx>,
1004 rvalue: &mut Rvalue<'tcx>,
1005 location: Location,
1006 ) -> Option<VnIndex> {
1007 let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1008
1009 let tcx = self.tcx;
1010 if field_ops.is_empty() {
1011 let is_zst = match *kind {
1012 AggregateKind::Array(..)
1013 | AggregateKind::Tuple
1014 | AggregateKind::Closure(..)
1015 | AggregateKind::CoroutineClosure(..) => true,
1016 AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1018 AggregateKind::Coroutine(..) => false,
1020 AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1021 };
1022
1023 if is_zst {
1024 let ty = rvalue.ty(self.local_decls, tcx);
1025 return Some(self.insert_constant(Const::zero_sized(ty)));
1026 }
1027 }
1028
1029 let (mut ty, variant_index) = match *kind {
1030 AggregateKind::Array(..) => {
1031 assert!(!field_ops.is_empty());
1032 (AggregateTy::Array, FIRST_VARIANT)
1033 }
1034 AggregateKind::Tuple => {
1035 assert!(!field_ops.is_empty());
1036 (AggregateTy::Tuple, FIRST_VARIANT)
1037 }
1038 AggregateKind::Closure(did, args)
1039 | AggregateKind::CoroutineClosure(did, args)
1040 | AggregateKind::Coroutine(did, args) => (AggregateTy::Def(did, args), FIRST_VARIANT),
1041 AggregateKind::Adt(did, variant_index, args, _, None) => {
1042 (AggregateTy::Def(did, args), variant_index)
1043 }
1044 AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
1046 AggregateKind::RawPtr(pointee_ty, mtbl) => {
1047 assert_eq!(field_ops.len(), 2);
1048 let data_pointer_ty = field_ops[FieldIdx::ZERO].ty(self.local_decls, self.tcx);
1049 let output_pointer_ty = Ty::new_ptr(self.tcx, pointee_ty, mtbl);
1050 (AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty }, FIRST_VARIANT)
1051 }
1052 };
1053
1054 let mut fields: Vec<_> = field_ops
1055 .iter_mut()
1056 .map(|op| self.simplify_operand(op, location).unwrap_or_else(|| self.new_opaque()))
1057 .collect();
1058
1059 if let AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty } = &mut ty {
1060 let mut was_updated = false;
1061
1062 while let Value::Cast {
1064 kind: CastKind::PtrToPtr,
1065 value: cast_value,
1066 from: cast_from,
1067 to: _,
1068 } = self.get(fields[0])
1069 && let ty::RawPtr(from_pointee_ty, from_mtbl) = cast_from.kind()
1070 && let ty::RawPtr(_, output_mtbl) = output_pointer_ty.kind()
1071 && from_mtbl == output_mtbl
1072 && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1073 {
1074 fields[0] = *cast_value;
1075 *data_pointer_ty = *cast_from;
1076 was_updated = true;
1077 }
1078
1079 if was_updated && let Some(op) = self.try_as_operand(fields[0], location) {
1080 field_ops[FieldIdx::ZERO] = op;
1081 }
1082 }
1083
1084 if let AggregateTy::Array = ty
1085 && fields.len() > 4
1086 {
1087 let first = fields[0];
1088 if fields.iter().all(|&v| v == first) {
1089 let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1090 if let Some(op) = self.try_as_operand(first, location) {
1091 *rvalue = Rvalue::Repeat(op, len);
1092 }
1093 return Some(self.insert(Value::Repeat(first, len)));
1094 }
1095 }
1096
1097 if let AggregateTy::Def(_, _) = ty
1098 && let Some(value) =
1099 self.simplify_aggregate_to_copy(lhs, rvalue, location, &fields, variant_index)
1100 {
1101 return Some(value);
1102 }
1103
1104 Some(self.insert(Value::Aggregate(ty, variant_index, fields)))
1105 }
1106
1107 #[instrument(level = "trace", skip(self), ret)]
1108 fn simplify_unary(
1109 &mut self,
1110 op: UnOp,
1111 arg_op: &mut Operand<'tcx>,
1112 location: Location,
1113 ) -> Option<VnIndex> {
1114 let mut arg_index = self.simplify_operand(arg_op, location)?;
1115
1116 if op == UnOp::PtrMetadata {
1119 let mut was_updated = false;
1120 loop {
1121 match self.get(arg_index) {
1122 Value::Cast { kind: CastKind::PtrToPtr, value: inner, from, to }
1131 if self.pointers_have_same_metadata(*from, *to) =>
1132 {
1133 arg_index = *inner;
1134 was_updated = true;
1135 continue;
1136 }
1137
1138 Value::Address { place, kind: _, provenance: _ }
1140 if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1141 place.as_ref()
1142 && let Some(local_index) = self.locals[local] =>
1143 {
1144 arg_index = local_index;
1145 was_updated = true;
1146 continue;
1147 }
1148
1149 _ => {
1150 if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1151 *arg_op = op;
1152 }
1153 break;
1154 }
1155 }
1156 }
1157 }
1158
1159 let value = match (op, self.get(arg_index)) {
1160 (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1161 (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1162 (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1163 Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1164 }
1165 (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1166 Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1167 }
1168 (UnOp::PtrMetadata, Value::Aggregate(AggregateTy::RawPtr { .. }, _, fields)) => {
1169 return Some(fields[1]);
1170 }
1171 (
1173 UnOp::PtrMetadata,
1174 Value::Cast {
1175 kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1176 from,
1177 to,
1178 ..
1179 },
1180 ) if let ty::Slice(..) = to.builtin_deref(true).unwrap().kind()
1181 && let ty::Array(_, len) = from.builtin_deref(true).unwrap().kind() =>
1182 {
1183 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1184 }
1185 _ => Value::UnaryOp(op, arg_index),
1186 };
1187 Some(self.insert(value))
1188 }
1189
1190 #[instrument(level = "trace", skip(self), ret)]
1191 fn simplify_binary(
1192 &mut self,
1193 op: BinOp,
1194 lhs_operand: &mut Operand<'tcx>,
1195 rhs_operand: &mut Operand<'tcx>,
1196 location: Location,
1197 ) -> Option<VnIndex> {
1198 let lhs = self.simplify_operand(lhs_operand, location);
1199 let rhs = self.simplify_operand(rhs_operand, location);
1200 let mut lhs = lhs?;
1203 let mut rhs = rhs?;
1204
1205 let lhs_ty = lhs_operand.ty(self.local_decls, self.tcx);
1206
1207 if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1210 && lhs_ty.is_any_ptr()
1211 && let Value::Cast {
1212 kind: CastKind::PtrToPtr, value: lhs_value, from: lhs_from, ..
1213 } = self.get(lhs)
1214 && let Value::Cast {
1215 kind: CastKind::PtrToPtr, value: rhs_value, from: rhs_from, ..
1216 } = self.get(rhs)
1217 && lhs_from == rhs_from
1218 && self.pointers_have_same_metadata(*lhs_from, lhs_ty)
1219 {
1220 lhs = *lhs_value;
1221 rhs = *rhs_value;
1222 if let Some(lhs_op) = self.try_as_operand(lhs, location)
1223 && let Some(rhs_op) = self.try_as_operand(rhs, location)
1224 {
1225 *lhs_operand = lhs_op;
1226 *rhs_operand = rhs_op;
1227 }
1228 }
1229
1230 if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1231 return Some(value);
1232 }
1233 let value = Value::BinaryOp(op, lhs, rhs);
1234 Some(self.insert(value))
1235 }
1236
1237 fn simplify_binary_inner(
1238 &mut self,
1239 op: BinOp,
1240 lhs_ty: Ty<'tcx>,
1241 lhs: VnIndex,
1242 rhs: VnIndex,
1243 ) -> Option<VnIndex> {
1244 let reasonable_ty =
1246 lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1247 if !reasonable_ty {
1248 return None;
1249 }
1250
1251 let layout = self.ecx.layout_of(lhs_ty).ok()?;
1252
1253 let as_bits = |value: VnIndex| {
1254 let constant = self.evaluated[value].as_ref()?;
1255 if layout.backend_repr.is_scalar() {
1256 let scalar = self.ecx.read_scalar(constant).discard_err()?;
1257 scalar.to_bits(constant.layout.size).discard_err()
1258 } else {
1259 None
1261 }
1262 };
1263
1264 use Either::{Left, Right};
1266 let a = as_bits(lhs).map_or(Right(lhs), Left);
1267 let b = as_bits(rhs).map_or(Right(rhs), Left);
1268
1269 let result = match (op, a, b) {
1270 (
1272 BinOp::Add
1273 | BinOp::AddWithOverflow
1274 | BinOp::AddUnchecked
1275 | BinOp::BitOr
1276 | BinOp::BitXor,
1277 Left(0),
1278 Right(p),
1279 )
1280 | (
1281 BinOp::Add
1282 | BinOp::AddWithOverflow
1283 | BinOp::AddUnchecked
1284 | BinOp::BitOr
1285 | BinOp::BitXor
1286 | BinOp::Sub
1287 | BinOp::SubWithOverflow
1288 | BinOp::SubUnchecked
1289 | BinOp::Offset
1290 | BinOp::Shl
1291 | BinOp::Shr,
1292 Right(p),
1293 Left(0),
1294 )
1295 | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1296 | (
1297 BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1298 Right(p),
1299 Left(1),
1300 ) => p,
1301 (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1303 if ones == layout.size.truncate(u128::MAX)
1304 || (layout.ty.is_bool() && ones == 1) =>
1305 {
1306 p
1307 }
1308 (
1310 BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1311 _,
1312 Left(0),
1313 )
1314 | (BinOp::Rem, _, Left(1))
1315 | (
1316 BinOp::Mul
1317 | BinOp::MulWithOverflow
1318 | BinOp::MulUnchecked
1319 | BinOp::Div
1320 | BinOp::Rem
1321 | BinOp::BitAnd
1322 | BinOp::Shl
1323 | BinOp::Shr,
1324 Left(0),
1325 _,
1326 ) => self.insert_scalar(Scalar::from_uint(0u128, layout.size), lhs_ty),
1327 (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1329 if ones == layout.size.truncate(u128::MAX)
1330 || (layout.ty.is_bool() && ones == 1) =>
1331 {
1332 self.insert_scalar(Scalar::from_uint(ones, layout.size), lhs_ty)
1333 }
1334 (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1336 if a == b =>
1337 {
1338 self.insert_scalar(Scalar::from_uint(0u128, layout.size), lhs_ty)
1339 }
1340 (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1345 (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1346 (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1347 (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1348 _ => return None,
1349 };
1350
1351 if op.is_overflowing() {
1352 let false_val = self.insert_bool(false);
1353 Some(self.insert_tuple(vec![result, false_val]))
1354 } else {
1355 Some(result)
1356 }
1357 }
1358
1359 fn simplify_cast(
1360 &mut self,
1361 initial_kind: &mut CastKind,
1362 initial_operand: &mut Operand<'tcx>,
1363 to: Ty<'tcx>,
1364 location: Location,
1365 ) -> Option<VnIndex> {
1366 use CastKind::*;
1367 use rustc_middle::ty::adjustment::PointerCoercion::*;
1368
1369 let mut from = initial_operand.ty(self.local_decls, self.tcx);
1370 let mut kind = *initial_kind;
1371 let mut value = self.simplify_operand(initial_operand, location)?;
1372 if from == to {
1373 return Some(value);
1374 }
1375
1376 if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1377 return Some(self.new_opaque());
1380 }
1381
1382 let mut was_ever_updated = false;
1383 loop {
1384 let mut was_updated_this_iteration = false;
1385
1386 if let Transmute = kind
1391 && from.is_raw_ptr()
1392 && to.is_raw_ptr()
1393 && self.pointers_have_same_metadata(from, to)
1394 {
1395 kind = PtrToPtr;
1396 was_updated_this_iteration = true;
1397 }
1398
1399 if let PtrToPtr = kind
1402 && let Value::Aggregate(AggregateTy::RawPtr { data_pointer_ty, .. }, _, fields) =
1403 self.get(value)
1404 && let ty::RawPtr(to_pointee, _) = to.kind()
1405 && to_pointee.is_sized(self.tcx, self.typing_env())
1406 {
1407 from = *data_pointer_ty;
1408 value = fields[0];
1409 was_updated_this_iteration = true;
1410 if *data_pointer_ty == to {
1411 return Some(fields[0]);
1412 }
1413 }
1414
1415 if let Transmute = kind
1418 && let Value::Aggregate(_aggregate_ty, variant_idx, field_values) = self.get(value)
1419 && let Some((field_idx, field_ty)) =
1420 self.value_is_all_in_one_field(from, *variant_idx)
1421 {
1422 from = field_ty;
1423 value = field_values[field_idx.as_usize()];
1424 was_updated_this_iteration = true;
1425 if field_ty == to {
1426 return Some(value);
1427 }
1428 }
1429
1430 if let Value::Cast {
1432 kind: inner_kind,
1433 value: inner_value,
1434 from: inner_from,
1435 to: inner_to,
1436 } = *self.get(value)
1437 {
1438 let new_kind = match (inner_kind, kind) {
1439 (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1443 (PtrToPtr, Transmute)
1447 if self.pointers_have_same_metadata(inner_from, inner_to) =>
1448 {
1449 Some(Transmute)
1450 }
1451 (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1454 Some(Transmute)
1455 }
1456 (Transmute, Transmute)
1459 if !self.type_may_have_niche_of_interest_to_backend(inner_to) =>
1460 {
1461 Some(Transmute)
1462 }
1463 _ => None,
1464 };
1465 if let Some(new_kind) = new_kind {
1466 kind = new_kind;
1467 from = inner_from;
1468 value = inner_value;
1469 was_updated_this_iteration = true;
1470 if inner_from == to {
1471 return Some(inner_value);
1472 }
1473 }
1474 }
1475
1476 if was_updated_this_iteration {
1477 was_ever_updated = true;
1478 } else {
1479 break;
1480 }
1481 }
1482
1483 if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1484 *initial_operand = op;
1485 *initial_kind = kind;
1486 }
1487
1488 Some(self.insert(Value::Cast { kind, value, from, to }))
1489 }
1490
1491 fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> {
1492 let place_ty = place.ty(self.local_decls, self.tcx).ty;
1494 if let ty::Array(_, len) = place_ty.kind() {
1495 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1496 }
1497
1498 let mut inner = self.simplify_place_value(place, location)?;
1499
1500 while let Value::Address { place: borrowed, .. } = self.get(inner)
1503 && let [PlaceElem::Deref] = borrowed.projection[..]
1504 && let Some(borrowed) = self.locals[borrowed.local]
1505 {
1506 inner = borrowed;
1507 }
1508
1509 if let Value::Cast { kind, from, to, .. } = self.get(inner)
1511 && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind
1512 && let Some(from) = from.builtin_deref(true)
1513 && let ty::Array(_, len) = from.kind()
1514 && let Some(to) = to.builtin_deref(true)
1515 && let ty::Slice(..) = to.kind()
1516 {
1517 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1518 }
1519
1520 Some(self.insert(Value::Len(inner)))
1522 }
1523
1524 fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1525 let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1526 let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1527 if left_meta_ty == right_meta_ty {
1528 true
1529 } else if let Ok(left) =
1530 self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1531 && let Ok(right) =
1532 self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1533 {
1534 left == right
1535 } else {
1536 false
1537 }
1538 }
1539
1540 fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1547 let Ok(layout) = self.ecx.layout_of(ty) else {
1548 return true;
1550 };
1551
1552 if layout.uninhabited {
1553 return true;
1554 }
1555
1556 match layout.backend_repr {
1557 BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1558 BackendRepr::ScalarPair(a, b) => {
1559 !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1560 }
1561 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1562 }
1563 }
1564
1565 fn value_is_all_in_one_field(
1566 &self,
1567 ty: Ty<'tcx>,
1568 variant: VariantIdx,
1569 ) -> Option<(FieldIdx, Ty<'tcx>)> {
1570 if let Ok(layout) = self.ecx.layout_of(ty)
1571 && let abi::Variants::Single { index } = layout.variants
1572 && index == variant
1573 && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1574 && layout.size == field_layout.size
1575 {
1576 Some((field_idx, field_layout.ty))
1580 } else if let ty::Adt(adt, args) = ty.kind()
1581 && adt.is_struct()
1582 && adt.repr().transparent()
1583 && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1584 {
1585 Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1586 } else {
1587 None
1588 }
1589 }
1590}
1591
1592fn op_to_prop_const<'tcx>(
1593 ecx: &mut InterpCx<'tcx, DummyMachine>,
1594 op: &OpTy<'tcx>,
1595) -> Option<ConstValue<'tcx>> {
1596 if op.layout.is_unsized() {
1598 return None;
1599 }
1600
1601 if op.layout.is_zst() {
1603 return Some(ConstValue::ZeroSized);
1604 }
1605
1606 if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1609 return None;
1610 }
1611
1612 if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1614 && let Some(scalar) = ecx.read_scalar(op).discard_err()
1615 {
1616 if !scalar.try_to_scalar_int().is_ok() {
1617 return None;
1621 }
1622 return Some(ConstValue::Scalar(scalar));
1623 }
1624
1625 if let Either::Left(mplace) = op.as_mplace_or_imm() {
1628 let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1629
1630 let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1634 if alloc_ref.has_provenance() {
1635 return None;
1636 }
1637
1638 let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1639 let (prov, offset) = pointer.into_parts();
1640 let alloc_id = prov.alloc_id();
1641 intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1642
1643 if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1647 && alloc.inner().align >= op.layout.align.abi
1650 {
1651 return Some(ConstValue::Indirect { alloc_id, offset });
1652 }
1653 }
1654
1655 let alloc_id =
1657 ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1658 let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1659
1660 if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1664 return Some(value);
1665 }
1666
1667 None
1668}
1669
1670impl<'tcx> VnState<'_, 'tcx> {
1671 fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1674 if let Some(const_) = self.try_as_constant(index) {
1675 Some(Operand::Constant(Box::new(const_)))
1676 } else if let Some(place) = self.try_as_place(index, location, false) {
1677 self.reused_locals.insert(place.local);
1678 Some(Operand::Copy(place))
1679 } else {
1680 None
1681 }
1682 }
1683
1684 fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1686 if let Value::Constant { value, disambiguator: 0 } = *self.get(index) {
1690 debug_assert!(value.is_deterministic());
1691 return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1692 }
1693
1694 let op = self.evaluated[index].as_ref()?;
1695 if op.layout.is_unsized() {
1696 return None;
1698 }
1699
1700 let value = op_to_prop_const(&mut self.ecx, op)?;
1701
1702 assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1706
1707 let const_ = Const::Val(value, op.layout.ty);
1708 Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1709 }
1710
1711 #[instrument(level = "trace", skip(self), ret)]
1715 fn try_as_place(
1716 &mut self,
1717 mut index: VnIndex,
1718 loc: Location,
1719 allow_complex_projection: bool,
1720 ) -> Option<Place<'tcx>> {
1721 let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1722 loop {
1723 if let Some(local) = self.try_as_local(index, loc) {
1724 projection.reverse();
1725 let place =
1726 Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1727 return Some(place);
1728 } else if let Value::Projection(pointer, proj) = *self.get(index)
1729 && (allow_complex_projection || proj.is_stable_offset())
1730 && let Some(proj) = self.try_as_place_elem(proj, loc)
1731 {
1732 projection.push(proj);
1733 index = pointer;
1734 } else {
1735 return None;
1736 }
1737 }
1738 }
1739
1740 fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1743 let other = self.rev_locals.get(index)?;
1744 other
1745 .iter()
1746 .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1747 .copied()
1748 }
1749}
1750
1751impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1752 fn tcx(&self) -> TyCtxt<'tcx> {
1753 self.tcx
1754 }
1755
1756 fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1757 self.simplify_place_projection(place, location);
1758 if context.is_mutating_use() && !place.projection.is_empty() {
1759 self.invalidate_derefs();
1761 }
1762 self.super_place(place, context, location);
1763 }
1764
1765 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1766 self.simplify_operand(operand, location);
1767 self.super_operand(operand, location);
1768 }
1769
1770 fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) {
1771 if let StatementKind::Assign(box (ref mut lhs, ref mut rvalue)) = stmt.kind {
1772 self.simplify_place_projection(lhs, location);
1773
1774 let value = self.simplify_rvalue(lhs, rvalue, location);
1775 let value = if let Some(local) = lhs.as_local()
1776 && self.ssa.is_ssa(local)
1777 && self.local_decls[local].ty == rvalue.ty(self.local_decls, self.tcx)
1780 {
1781 let value = value.unwrap_or_else(|| self.new_opaque());
1782 self.assign(local, value);
1783 Some(value)
1784 } else {
1785 value
1786 };
1787 if let Some(value) = value {
1788 if let Some(const_) = self.try_as_constant(value) {
1789 *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1790 } else if let Some(place) = self.try_as_place(value, location, false)
1791 && *rvalue != Rvalue::Use(Operand::Move(place))
1792 && *rvalue != Rvalue::Use(Operand::Copy(place))
1793 {
1794 *rvalue = Rvalue::Use(Operand::Copy(place));
1795 self.reused_locals.insert(place.local);
1796 }
1797 }
1798 }
1799 self.super_statement(stmt, location);
1800 }
1801
1802 fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1803 if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1804 if let Some(local) = destination.as_local()
1805 && self.ssa.is_ssa(local)
1806 {
1807 let opaque = self.new_opaque();
1808 self.assign(local, opaque);
1809 }
1810 }
1811 let safe_to_preserve_derefs = matches!(
1814 terminator.kind,
1815 TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. }
1816 );
1817 if !safe_to_preserve_derefs {
1818 self.invalidate_derefs();
1819 }
1820 self.super_terminator(terminator, location);
1821 }
1822}
1823
1824struct StorageRemover<'tcx> {
1825 tcx: TyCtxt<'tcx>,
1826 reused_locals: DenseBitSet<Local>,
1827}
1828
1829impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1830 fn tcx(&self) -> TyCtxt<'tcx> {
1831 self.tcx
1832 }
1833
1834 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1835 if let Operand::Move(place) = *operand
1836 && !place.is_indirect_first_projection()
1837 && self.reused_locals.contains(place.local)
1838 {
1839 *operand = Operand::Copy(place);
1840 }
1841 }
1842
1843 fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1844 match stmt.kind {
1845 StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1847 if self.reused_locals.contains(l) =>
1848 {
1849 stmt.make_nop()
1850 }
1851 _ => self.super_statement(stmt, loc),
1852 }
1853 }
1854}