1use std::fmt;
2
3use itertools::Either;
4use rustc_abi as abi;
5use rustc_abi::{
6 Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, VariantIdx, Variants,
7};
8use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
9use rustc_middle::mir::{self, ConstValue};
10use rustc_middle::ty::Ty;
11use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
12use rustc_middle::{bug, span_bug};
13use rustc_session::config::OptLevel;
14use tracing::{debug, instrument};
15
16use super::place::{PlaceRef, PlaceValue};
17use super::rvalue::transmute_scalar;
18use super::{FunctionCx, LocalRef};
19use crate::common::IntPredicate;
20use crate::traits::*;
21use crate::{MemFlags, size_of_val};
22
23#[derive(Copy, Clone, Debug)]
27pub enum OperandValue<V> {
28 Ref(PlaceValue<V>),
41 Immediate(V),
48 Pair(V, V),
62 ZeroSized,
71}
72
73impl<V: CodegenObject> OperandValue<V> {
74 pub(crate) fn pointer_parts(self) -> (V, Option<V>) {
79 match self {
80 OperandValue::Immediate(llptr) => (llptr, None),
81 OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
82 _ => bug!("OperandValue cannot be a pointer: {self:?}"),
83 }
84 }
85
86 pub(crate) fn deref(self, align: Align) -> PlaceValue<V> {
94 let (llval, llextra) = self.pointer_parts();
95 PlaceValue { llval, llextra, align }
96 }
97
98 pub(crate) fn is_expected_variant_for_type<'tcx, Cx: LayoutTypeCodegenMethods<'tcx>>(
99 &self,
100 cx: &Cx,
101 ty: TyAndLayout<'tcx>,
102 ) -> bool {
103 match self {
104 OperandValue::ZeroSized => ty.is_zst(),
105 OperandValue::Immediate(_) => cx.is_backend_immediate(ty),
106 OperandValue::Pair(_, _) => cx.is_backend_scalar_pair(ty),
107 OperandValue::Ref(_) => cx.is_backend_ref(ty),
108 }
109 }
110}
111
112#[derive(Copy, Clone)]
121pub struct OperandRef<'tcx, V> {
122 pub val: OperandValue<V>,
124
125 pub layout: TyAndLayout<'tcx>,
127}
128
129impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
130 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
131 write!(f, "OperandRef({:?} @ {:?})", self.val, self.layout)
132 }
133}
134
135impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
136 pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
137 assert!(layout.is_zst());
138 OperandRef { val: OperandValue::ZeroSized, layout }
139 }
140
141 pub(crate) fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
142 bx: &mut Bx,
143 val: mir::ConstValue<'tcx>,
144 ty: Ty<'tcx>,
145 ) -> Self {
146 let layout = bx.layout_of(ty);
147
148 let val = match val {
149 ConstValue::Scalar(x) => {
150 let BackendRepr::Scalar(scalar) = layout.backend_repr else {
151 bug!("from_const: invalid ByVal layout: {:#?}", layout);
152 };
153 let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
154 OperandValue::Immediate(llval)
155 }
156 ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
157 ConstValue::Slice { data, meta } => {
158 let BackendRepr::ScalarPair(a_scalar, _) = layout.backend_repr else {
159 bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
160 };
161 let a = Scalar::from_pointer(
162 Pointer::new(bx.tcx().reserve_and_set_memory_alloc(data).into(), Size::ZERO),
163 &bx.tcx(),
164 );
165 let a_llval = bx.scalar_to_backend(
166 a,
167 a_scalar,
168 bx.scalar_pair_element_backend_type(layout, 0, true),
169 );
170 let b_llval = bx.const_usize(meta);
171 OperandValue::Pair(a_llval, b_llval)
172 }
173 ConstValue::Indirect { alloc_id, offset } => {
174 let alloc = bx.tcx().global_alloc(alloc_id).unwrap_memory();
175 return Self::from_const_alloc(bx, layout, alloc, offset);
176 }
177 };
178
179 OperandRef { val, layout }
180 }
181
182 fn from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
183 bx: &mut Bx,
184 layout: TyAndLayout<'tcx>,
185 alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>,
186 offset: Size,
187 ) -> Self {
188 let alloc_align = alloc.inner().align;
189 assert!(alloc_align >= layout.align.abi);
190
191 let read_scalar = |start, size, s: abi::Scalar, ty| {
192 match alloc.0.read_scalar(
193 bx,
194 alloc_range(start, size),
195 matches!(s.primitive(), abi::Primitive::Pointer(_)),
196 ) {
197 Ok(val) => bx.scalar_to_backend(val, s, ty),
198 Err(_) => bx.const_poison(ty),
199 }
200 };
201
202 match layout.backend_repr {
209 BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
210 let size = s.size(bx);
211 assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
212 let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
213 OperandRef { val: OperandValue::Immediate(val), layout }
214 }
215 BackendRepr::ScalarPair(
216 a @ abi::Scalar::Initialized { .. },
217 b @ abi::Scalar::Initialized { .. },
218 ) => {
219 let (a_size, b_size) = (a.size(bx), b.size(bx));
220 let b_offset = (offset + a_size).align_to(b.align(bx).abi);
221 assert!(b_offset.bytes() > 0);
222 let a_val = read_scalar(
223 offset,
224 a_size,
225 a,
226 bx.scalar_pair_element_backend_type(layout, 0, true),
227 );
228 let b_val = read_scalar(
229 b_offset,
230 b_size,
231 b,
232 bx.scalar_pair_element_backend_type(layout, 1, true),
233 );
234 OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
235 }
236 _ if layout.is_zst() => OperandRef::zero_sized(layout),
237 _ => {
238 let init = bx.const_data_from_alloc(alloc);
242 let base_addr = bx.static_addr_of(init, alloc_align, None);
243
244 let llval = bx.const_ptr_byte_offset(base_addr, offset);
245 bx.load_operand(PlaceRef::new_sized(llval, layout))
246 }
247 }
248 }
249
250 pub fn immediate(self) -> V {
253 match self.val {
254 OperandValue::Immediate(s) => s,
255 _ => bug!("not immediate: {:?}", self),
256 }
257 }
258
259 pub fn deref<Cx: CodegenMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
269 if self.layout.ty.is_box() {
270 bug!("dereferencing {:?} in codegen", self.layout.ty);
272 }
273
274 let projected_ty = self
275 .layout
276 .ty
277 .builtin_deref(true)
278 .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self));
279
280 let layout = cx.layout_of(projected_ty);
281 self.val.deref(layout.align.abi).with_type(layout)
282 }
283
284 pub fn immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
287 self,
288 bx: &mut Bx,
289 ) -> V {
290 if let OperandValue::Pair(a, b) = self.val {
291 let llty = bx.cx().immediate_backend_type(self.layout);
292 debug!("Operand::immediate_or_packed_pair: packing {:?} into {:?}", self, llty);
293 let mut llpair = bx.cx().const_poison(llty);
295 llpair = bx.insert_value(llpair, a, 0);
296 llpair = bx.insert_value(llpair, b, 1);
297 llpair
298 } else {
299 self.immediate()
300 }
301 }
302
303 pub fn from_immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
305 bx: &mut Bx,
306 llval: V,
307 layout: TyAndLayout<'tcx>,
308 ) -> Self {
309 let val = if let BackendRepr::ScalarPair(..) = layout.backend_repr {
310 debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
311
312 let a_llval = bx.extract_value(llval, 0);
314 let b_llval = bx.extract_value(llval, 1);
315 OperandValue::Pair(a_llval, b_llval)
316 } else {
317 OperandValue::Immediate(llval)
318 };
319 OperandRef { val, layout }
320 }
321
322 pub(crate) fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
323 &self,
324 fx: &mut FunctionCx<'a, 'tcx, Bx>,
325 bx: &mut Bx,
326 i: usize,
327 ) -> Self {
328 let field = self.layout.field(bx.cx(), i);
329 let offset = self.layout.fields.offset(i);
330
331 if !bx.is_backend_ref(self.layout) && bx.is_backend_ref(field) {
332 if let BackendRepr::SimdVector { count, .. } = self.layout.backend_repr
333 && let BackendRepr::Memory { sized: true } = field.backend_repr
334 && count.is_power_of_two()
335 {
336 assert_eq!(field.size, self.layout.size);
337 let place = PlaceRef::alloca(bx, field);
340 self.val.store(bx, place.val.with_type(self.layout));
341 return bx.load_operand(place);
342 } else {
343 bug!("Non-ref type {self:?} cannot project to ref field type {field:?}");
345 }
346 }
347
348 let val = if field.is_zst() {
349 OperandValue::ZeroSized
350 } else if let BackendRepr::SimdVector { .. } = self.layout.backend_repr {
351 assert_eq!(field.size, self.layout.size);
356 self.val
357 } else if field.size == self.layout.size {
358 assert_eq!(offset.bytes(), 0);
359 fx.codegen_transmute_operand(bx, *self, field)
360 } else {
361 let (in_scalar, imm) = match (self.val, self.layout.backend_repr) {
362 (OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
364 if offset.bytes() == 0 {
365 assert_eq!(field.size, a.size(bx.cx()));
366 (Some(a), a_llval)
367 } else {
368 assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
369 assert_eq!(field.size, b.size(bx.cx()));
370 (Some(b), b_llval)
371 }
372 }
373
374 _ => {
375 span_bug!(fx.mir.span, "OperandRef::extract_field({:?}): not applicable", self)
376 }
377 };
378 OperandValue::Immediate(match field.backend_repr {
379 BackendRepr::SimdVector { .. } => imm,
380 BackendRepr::Scalar(out_scalar) => {
381 let Some(in_scalar) = in_scalar else {
382 span_bug!(
383 fx.mir.span,
384 "OperandRef::extract_field({:?}): missing input scalar for output scalar",
385 self
386 )
387 };
388 if in_scalar != out_scalar {
389 let backend = bx.from_immediate(imm);
394 bx.to_immediate_scalar(backend, out_scalar)
395 } else {
396 imm
397 }
398 }
399 BackendRepr::ScalarPair(_, _) | BackendRepr::Memory { .. } => bug!(),
400 })
401 };
402
403 OperandRef { val, layout: field }
404 }
405
406 #[instrument(level = "trace", skip(fx, bx))]
408 pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
409 self,
410 fx: &mut FunctionCx<'a, 'tcx, Bx>,
411 bx: &mut Bx,
412 cast_to: Ty<'tcx>,
413 ) -> V {
414 let dl = &bx.tcx().data_layout;
415 let cast_to_layout = bx.cx().layout_of(cast_to);
416 let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
417
418 if self.layout.is_uninhabited() {
422 return bx.cx().const_poison(cast_to);
423 }
424
425 let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
426 Variants::Empty => unreachable!("we already handled uninhabited types"),
427 Variants::Single { index } => {
428 let discr_val =
429 if let Some(discr) = self.layout.ty.discriminant_for_variant(bx.tcx(), index) {
430 discr.val
431 } else {
432 assert_eq!(index, FIRST_VARIANT);
435 0
438 };
439 return bx.cx().const_uint_big(cast_to, discr_val);
440 }
441 Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
442 (tag, tag_encoding, tag_field)
443 }
444 };
445
446 let tag_op = match self.val {
448 OperandValue::ZeroSized => bug!(),
449 OperandValue::Immediate(_) | OperandValue::Pair(_, _) => {
450 self.extract_field(fx, bx, tag_field.as_usize())
451 }
452 OperandValue::Ref(place) => {
453 let tag = place.with_type(self.layout).project_field(bx, tag_field.as_usize());
454 bx.load_operand(tag)
455 }
456 };
457 let tag_imm = tag_op.immediate();
458
459 match *tag_encoding {
461 TagEncoding::Direct => {
462 let signed = match tag_scalar.primitive() {
463 Primitive::Int(_, signed) => !tag_scalar.is_bool() && signed,
468 _ => false,
469 };
470 bx.intcast(tag_imm, cast_to, signed)
471 }
472 TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start } => {
473 let (tag, tag_llty) = match tag_scalar.primitive() {
476 Primitive::Pointer(_) => {
478 let t = bx.type_from_integer(dl.ptr_sized_integer());
479 let tag = bx.ptrtoint(tag_imm, t);
480 (tag, t)
481 }
482 _ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
483 };
484
485 let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
489
490 let (is_niche, tagged_discr, delta) = if relative_max == 0 {
505 let niche_start = bx.cx().const_uint_big(tag_llty, niche_start);
515 let is_niche = bx.icmp(IntPredicate::IntEQ, tag, niche_start);
516 let tagged_discr =
517 bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64);
518 (is_niche, tagged_discr, 0)
519 } else {
520 let relative_discr = bx.sub(tag, bx.cx().const_uint_big(tag_llty, niche_start));
523 let cast_tag = bx.intcast(relative_discr, cast_to, false);
524 let is_niche = bx.icmp(
525 IntPredicate::IntULE,
526 relative_discr,
527 bx.cx().const_uint(tag_llty, relative_max as u64),
528 );
529
530 if niche_variants.contains(&untagged_variant)
535 && bx.cx().sess().opts.optimize != OptLevel::No
536 {
537 let impossible =
538 u64::from(untagged_variant.as_u32() - niche_variants.start().as_u32());
539 let impossible = bx.cx().const_uint(tag_llty, impossible);
540 let ne = bx.icmp(IntPredicate::IntNE, relative_discr, impossible);
541 bx.assume(ne);
542 }
543
544 (is_niche, cast_tag, niche_variants.start().as_u32() as u128)
545 };
546
547 let tagged_discr = if delta == 0 {
548 tagged_discr
549 } else {
550 bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
551 };
552
553 let discr = bx.select(
554 is_niche,
555 tagged_discr,
556 bx.cx().const_uint(cast_to, untagged_variant.as_u32() as u64),
557 );
558
559 discr
565 }
566 }
567 }
568
569 pub(crate) fn builder(
575 layout: TyAndLayout<'tcx>,
576 ) -> Option<OperandRef<'tcx, Either<V, abi::Scalar>>> {
577 if layout.uninhabited {
581 return None;
582 }
583
584 let val = match layout.backend_repr {
585 BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized,
586 BackendRepr::Scalar(s) => OperandValue::Immediate(Either::Right(s)),
587 BackendRepr::ScalarPair(a, b) => OperandValue::Pair(Either::Right(a), Either::Right(b)),
588 BackendRepr::Memory { .. } | BackendRepr::SimdVector { .. } => return None,
589 };
590 Some(OperandRef { val, layout })
591 }
592}
593
594impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Either<V, abi::Scalar>> {
595 pub(crate) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
596 &mut self,
597 bx: &mut Bx,
598 v: VariantIdx,
599 f: FieldIdx,
600 operand: OperandRef<'tcx, V>,
601 ) {
602 let (expect_zst, is_zero_offset) = if let abi::FieldsShape::Primitive = self.layout.fields {
603 assert!(!self.layout.is_zst());
608 assert_eq!(v, FIRST_VARIANT);
609 let first_field = f == FieldIdx::ZERO;
610 (!first_field, first_field)
611 } else {
612 let variant_layout = self.layout.for_variant(bx.cx(), v);
613 let field_layout = variant_layout.field(bx.cx(), f.as_usize());
614 let field_offset = variant_layout.fields.offset(f.as_usize());
615 (field_layout.is_zst(), field_offset == Size::ZERO)
616 };
617
618 let mut update = |tgt: &mut Either<V, abi::Scalar>, src, from_scalar| {
619 let to_scalar = tgt.unwrap_right();
620 let imm = transmute_scalar(bx, src, from_scalar, to_scalar);
621 *tgt = Either::Left(imm);
622 };
623
624 match (operand.val, operand.layout.backend_repr) {
625 (OperandValue::ZeroSized, _) if expect_zst => {}
626 (OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {
627 OperandValue::Immediate(val @ Either::Right(_)) if is_zero_offset => {
628 update(val, v, from_scalar);
629 }
630 OperandValue::Pair(fst @ Either::Right(_), _) if is_zero_offset => {
631 update(fst, v, from_scalar);
632 }
633 OperandValue::Pair(_, snd @ Either::Right(_)) if !is_zero_offset => {
634 update(snd, v, from_scalar);
635 }
636 _ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"),
637 },
638 (OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {
639 match &mut self.val {
640 OperandValue::Pair(fst @ Either::Right(_), snd @ Either::Right(_)) => {
641 update(fst, a, from_sa);
642 update(snd, b, from_sb);
643 }
644 _ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"),
645 }
646 }
647 _ => bug!("Unsupported operand {operand:?} inserting into {v:?}.{f:?} of {self:?}"),
648 }
649 }
650
651 pub(super) fn insert_imm(&mut self, f: FieldIdx, imm: V) {
657 let field_offset = self.layout.fields.offset(f.as_usize());
658 let is_zero_offset = field_offset == Size::ZERO;
659 match &mut self.val {
660 OperandValue::Immediate(val @ Either::Right(_)) if is_zero_offset => {
661 *val = Either::Left(imm);
662 }
663 OperandValue::Pair(fst @ Either::Right(_), _) if is_zero_offset => {
664 *fst = Either::Left(imm);
665 }
666 OperandValue::Pair(_, snd @ Either::Right(_)) if !is_zero_offset => {
667 *snd = Either::Left(imm);
668 }
669 _ => bug!("Tried to insert {imm:?} into field {f:?} of {self:?}"),
670 }
671 }
672
673 pub fn build(&self, cx: &impl CodegenMethods<'tcx, Value = V>) -> OperandRef<'tcx, V> {
679 let OperandRef { val, layout } = *self;
680
681 let unwrap = |r: Either<V, abi::Scalar>| match r {
686 Either::Left(v) => v,
687 Either::Right(s) if s.is_uninit_valid() => {
688 let bty = cx.type_from_scalar(s);
689 cx.const_undef(bty)
690 }
691 Either::Right(_) => bug!("OperandRef::build called while fields are missing {self:?}"),
692 };
693
694 let val = match val {
695 OperandValue::ZeroSized => OperandValue::ZeroSized,
696 OperandValue::Immediate(v) => OperandValue::Immediate(unwrap(v)),
697 OperandValue::Pair(a, b) => OperandValue::Pair(unwrap(a), unwrap(b)),
698 OperandValue::Ref(_) => bug!(),
699 };
700 OperandRef { val, layout }
701 }
702}
703
704impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
705 pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
712 bx: &mut Bx,
713 layout: TyAndLayout<'tcx>,
714 ) -> OperandValue<V> {
715 assert!(layout.is_sized());
716 if layout.is_zst() {
717 OperandValue::ZeroSized
718 } else if bx.cx().is_backend_immediate(layout) {
719 let ibty = bx.cx().immediate_backend_type(layout);
720 OperandValue::Immediate(bx.const_poison(ibty))
721 } else if bx.cx().is_backend_scalar_pair(layout) {
722 let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
723 let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
724 OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
725 } else {
726 let ptr = bx.cx().type_ptr();
727 OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi))
728 }
729 }
730
731 pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
732 self,
733 bx: &mut Bx,
734 dest: PlaceRef<'tcx, V>,
735 ) {
736 self.store_with_flags(bx, dest, MemFlags::empty());
737 }
738
739 pub fn volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
740 self,
741 bx: &mut Bx,
742 dest: PlaceRef<'tcx, V>,
743 ) {
744 self.store_with_flags(bx, dest, MemFlags::VOLATILE);
745 }
746
747 pub fn unaligned_volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
748 self,
749 bx: &mut Bx,
750 dest: PlaceRef<'tcx, V>,
751 ) {
752 self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
753 }
754
755 pub fn nontemporal_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
756 self,
757 bx: &mut Bx,
758 dest: PlaceRef<'tcx, V>,
759 ) {
760 self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
761 }
762
763 pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
764 self,
765 bx: &mut Bx,
766 dest: PlaceRef<'tcx, V>,
767 flags: MemFlags,
768 ) {
769 debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
770 match self {
771 OperandValue::ZeroSized => {
772 }
775 OperandValue::Ref(val) => {
776 assert!(dest.layout.is_sized(), "cannot directly store unsized values");
777 if val.llextra.is_some() {
778 bug!("cannot directly store unsized values");
779 }
780 bx.typed_place_copy_with_flags(dest.val, val, dest.layout, flags);
781 }
782 OperandValue::Immediate(s) => {
783 let val = bx.from_immediate(s);
784 bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
785 }
786 OperandValue::Pair(a, b) => {
787 let BackendRepr::ScalarPair(a_scalar, b_scalar) = dest.layout.backend_repr else {
788 bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
789 };
790 let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
791
792 let val = bx.from_immediate(a);
793 let align = dest.val.align;
794 bx.store_with_flags(val, dest.val.llval, align, flags);
795
796 let llptr = bx.inbounds_ptradd(dest.val.llval, bx.const_usize(b_offset.bytes()));
797 let val = bx.from_immediate(b);
798 let align = dest.val.align.restrict_for_offset(b_offset);
799 bx.store_with_flags(val, llptr, align, flags);
800 }
801 }
802 }
803
804 pub fn store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
805 self,
806 bx: &mut Bx,
807 indirect_dest: PlaceRef<'tcx, V>,
808 ) {
809 debug!("OperandRef::store_unsized: operand={:?}, indirect_dest={:?}", self, indirect_dest);
810 let unsized_ty = indirect_dest
812 .layout
813 .ty
814 .builtin_deref(true)
815 .unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest));
816
817 let OperandValue::Ref(PlaceValue { llval: llptr, llextra: Some(llextra), .. }) = self
818 else {
819 bug!("store_unsized called with a sized value (or with an extern type)")
820 };
821
822 let (size, align) = size_of_val::size_and_align_of_dst(bx, unsized_ty, Some(llextra));
826 let one = bx.const_usize(1);
827 let align_minus_1 = bx.sub(align, one);
828 let size_extra = bx.add(size, align_minus_1);
829 let min_align = Align::ONE;
830 let alloca = bx.dynamic_alloca(size_extra, min_align);
831 let address = bx.ptrtoint(alloca, bx.type_isize());
832 let neg_address = bx.neg(address);
833 let offset = bx.and(neg_address, align_minus_1);
834 let dst = bx.inbounds_ptradd(alloca, offset);
835 bx.memcpy(dst, min_align, llptr, min_align, size, MemFlags::empty());
836
837 let indirect_operand = OperandValue::Pair(dst, llextra);
839 indirect_operand.store(bx, indirect_dest);
840 }
841}
842
843impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
844 fn maybe_codegen_consume_direct(
845 &mut self,
846 bx: &mut Bx,
847 place_ref: mir::PlaceRef<'tcx>,
848 ) -> Option<OperandRef<'tcx, Bx::Value>> {
849 debug!("maybe_codegen_consume_direct(place_ref={:?})", place_ref);
850
851 match self.locals[place_ref.local] {
852 LocalRef::Operand(mut o) => {
853 for elem in place_ref.projection.iter() {
855 match elem {
856 mir::ProjectionElem::Field(f, _) => {
857 assert!(
858 !o.layout.ty.is_any_ptr(),
859 "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
860 but tried to access field {f:?} of pointer {o:?}",
861 );
862 o = o.extract_field(self, bx, f.index());
863 }
864 mir::ProjectionElem::Index(_)
865 | mir::ProjectionElem::ConstantIndex { .. } => {
866 let elem = o.layout.field(bx.cx(), 0);
870 if elem.is_zst() {
871 o = OperandRef::zero_sized(elem);
872 } else {
873 return None;
874 }
875 }
876 _ => return None,
877 }
878 }
879
880 Some(o)
881 }
882 LocalRef::PendingOperand => {
883 bug!("use of {:?} before def", place_ref);
884 }
885 LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
886 None
889 }
890 }
891 }
892
893 pub fn codegen_consume(
894 &mut self,
895 bx: &mut Bx,
896 place_ref: mir::PlaceRef<'tcx>,
897 ) -> OperandRef<'tcx, Bx::Value> {
898 debug!("codegen_consume(place_ref={:?})", place_ref);
899
900 let ty = self.monomorphized_place_ty(place_ref);
901 let layout = bx.cx().layout_of(ty);
902
903 if layout.is_zst() {
905 return OperandRef::zero_sized(layout);
906 }
907
908 if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {
909 return o;
910 }
911
912 let place = self.codegen_place(bx, place_ref);
915 bx.load_operand(place)
916 }
917
918 pub fn codegen_operand(
919 &mut self,
920 bx: &mut Bx,
921 operand: &mir::Operand<'tcx>,
922 ) -> OperandRef<'tcx, Bx::Value> {
923 debug!("codegen_operand(operand={:?})", operand);
924
925 match *operand {
926 mir::Operand::Copy(ref place) | mir::Operand::Move(ref place) => {
927 self.codegen_consume(bx, place.as_ref())
928 }
929
930 mir::Operand::Constant(ref constant) => {
931 let constant_ty = self.monomorphize(constant.ty());
932 if constant_ty.is_simd() {
935 let layout = bx.layout_of(constant_ty);
938 if let BackendRepr::SimdVector { .. } = layout.backend_repr {
939 let (llval, ty) = self.immediate_const_vector(bx, constant);
940 return OperandRef {
941 val: OperandValue::Immediate(llval),
942 layout: bx.layout_of(ty),
943 };
944 }
945 }
946 self.eval_mir_constant_to_operand(bx, constant)
947 }
948 }
949 }
950}