1use std::fmt;
2
3use rustc_abi as abi;
4use rustc_abi::{
5 Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, VariantIdx, Variants,
6};
7use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
8use rustc_middle::mir::{self, ConstValue};
9use rustc_middle::ty::Ty;
10use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
11use rustc_middle::{bug, span_bug};
12use rustc_session::config::OptLevel;
13use tracing::{debug, instrument};
14
15use super::place::{PlaceRef, PlaceValue};
16use super::rvalue::transmute_immediate;
17use super::{FunctionCx, LocalRef};
18use crate::common::IntPredicate;
19use crate::traits::*;
20use crate::{MemFlags, size_of_val};
21
22#[derive(Copy, Clone, Debug)]
26pub enum OperandValue<V> {
27 Ref(PlaceValue<V>),
40 Immediate(V),
47 Pair(V, V),
61 ZeroSized,
70}
71
72impl<V: CodegenObject> OperandValue<V> {
73 pub(crate) fn pointer_parts(self) -> (V, Option<V>) {
78 match self {
79 OperandValue::Immediate(llptr) => (llptr, None),
80 OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
81 _ => bug!("OperandValue cannot be a pointer: {self:?}"),
82 }
83 }
84
85 pub(crate) fn deref(self, align: Align) -> PlaceValue<V> {
93 let (llval, llextra) = self.pointer_parts();
94 PlaceValue { llval, llextra, align }
95 }
96
97 pub(crate) fn is_expected_variant_for_type<'tcx, Cx: LayoutTypeCodegenMethods<'tcx>>(
98 &self,
99 cx: &Cx,
100 ty: TyAndLayout<'tcx>,
101 ) -> bool {
102 match self {
103 OperandValue::ZeroSized => ty.is_zst(),
104 OperandValue::Immediate(_) => cx.is_backend_immediate(ty),
105 OperandValue::Pair(_, _) => cx.is_backend_scalar_pair(ty),
106 OperandValue::Ref(_) => cx.is_backend_ref(ty),
107 }
108 }
109}
110
111#[derive(Copy, Clone)]
120pub struct OperandRef<'tcx, V> {
121 pub val: OperandValue<V>,
123
124 pub layout: TyAndLayout<'tcx>,
126}
127
128impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
129 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
130 write!(f, "OperandRef({:?} @ {:?})", self.val, self.layout)
131 }
132}
133
134impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
135 pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
136 assert!(layout.is_zst());
137 OperandRef { val: OperandValue::ZeroSized, layout }
138 }
139
140 pub(crate) fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
141 bx: &mut Bx,
142 val: mir::ConstValue<'tcx>,
143 ty: Ty<'tcx>,
144 ) -> Self {
145 let layout = bx.layout_of(ty);
146
147 let val = match val {
148 ConstValue::Scalar(x) => {
149 let BackendRepr::Scalar(scalar) = layout.backend_repr else {
150 bug!("from_const: invalid ByVal layout: {:#?}", layout);
151 };
152 let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
153 OperandValue::Immediate(llval)
154 }
155 ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
156 ConstValue::Slice { data, meta } => {
157 let BackendRepr::ScalarPair(a_scalar, _) = layout.backend_repr else {
158 bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
159 };
160 let a = Scalar::from_pointer(
161 Pointer::new(bx.tcx().reserve_and_set_memory_alloc(data).into(), Size::ZERO),
162 &bx.tcx(),
163 );
164 let a_llval = bx.scalar_to_backend(
165 a,
166 a_scalar,
167 bx.scalar_pair_element_backend_type(layout, 0, true),
168 );
169 let b_llval = bx.const_usize(meta);
170 OperandValue::Pair(a_llval, b_llval)
171 }
172 ConstValue::Indirect { alloc_id, offset } => {
173 let alloc = bx.tcx().global_alloc(alloc_id).unwrap_memory();
174 return Self::from_const_alloc(bx, layout, alloc, offset);
175 }
176 };
177
178 OperandRef { val, layout }
179 }
180
181 fn from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
182 bx: &mut Bx,
183 layout: TyAndLayout<'tcx>,
184 alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>,
185 offset: Size,
186 ) -> Self {
187 let alloc_align = alloc.inner().align;
188 assert!(alloc_align >= layout.align.abi);
189
190 let read_scalar = |start, size, s: abi::Scalar, ty| {
191 match alloc.0.read_scalar(
192 bx,
193 alloc_range(start, size),
194 matches!(s.primitive(), abi::Primitive::Pointer(_)),
195 ) {
196 Ok(val) => bx.scalar_to_backend(val, s, ty),
197 Err(_) => bx.const_poison(ty),
198 }
199 };
200
201 match layout.backend_repr {
208 BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
209 let size = s.size(bx);
210 assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
211 let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
212 OperandRef { val: OperandValue::Immediate(val), layout }
213 }
214 BackendRepr::ScalarPair(
215 a @ abi::Scalar::Initialized { .. },
216 b @ abi::Scalar::Initialized { .. },
217 ) => {
218 let (a_size, b_size) = (a.size(bx), b.size(bx));
219 let b_offset = (offset + a_size).align_to(b.align(bx).abi);
220 assert!(b_offset.bytes() > 0);
221 let a_val = read_scalar(
222 offset,
223 a_size,
224 a,
225 bx.scalar_pair_element_backend_type(layout, 0, true),
226 );
227 let b_val = read_scalar(
228 b_offset,
229 b_size,
230 b,
231 bx.scalar_pair_element_backend_type(layout, 1, true),
232 );
233 OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
234 }
235 _ if layout.is_zst() => OperandRef::zero_sized(layout),
236 _ => {
237 let init = bx.const_data_from_alloc(alloc);
241 let base_addr = bx.static_addr_of(init, alloc_align, None);
242
243 let llval = bx.const_ptr_byte_offset(base_addr, offset);
244 bx.load_operand(PlaceRef::new_sized(llval, layout))
245 }
246 }
247 }
248
249 pub fn immediate(self) -> V {
252 match self.val {
253 OperandValue::Immediate(s) => s,
254 _ => bug!("not immediate: {:?}", self),
255 }
256 }
257
258 pub fn deref<Cx: CodegenMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
268 if self.layout.ty.is_box() {
269 bug!("dereferencing {:?} in codegen", self.layout.ty);
271 }
272
273 let projected_ty = self
274 .layout
275 .ty
276 .builtin_deref(true)
277 .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self));
278
279 let layout = cx.layout_of(projected_ty);
280 self.val.deref(layout.align.abi).with_type(layout)
281 }
282
283 pub fn immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
286 self,
287 bx: &mut Bx,
288 ) -> V {
289 if let OperandValue::Pair(a, b) = self.val {
290 let llty = bx.cx().immediate_backend_type(self.layout);
291 debug!("Operand::immediate_or_packed_pair: packing {:?} into {:?}", self, llty);
292 let mut llpair = bx.cx().const_poison(llty);
294 llpair = bx.insert_value(llpair, a, 0);
295 llpair = bx.insert_value(llpair, b, 1);
296 llpair
297 } else {
298 self.immediate()
299 }
300 }
301
302 pub fn from_immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
304 bx: &mut Bx,
305 llval: V,
306 layout: TyAndLayout<'tcx>,
307 ) -> Self {
308 let val = if let BackendRepr::ScalarPair(..) = layout.backend_repr {
309 debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
310
311 let a_llval = bx.extract_value(llval, 0);
313 let b_llval = bx.extract_value(llval, 1);
314 OperandValue::Pair(a_llval, b_llval)
315 } else {
316 OperandValue::Immediate(llval)
317 };
318 OperandRef { val, layout }
319 }
320
321 pub(crate) fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
322 &self,
323 fx: &mut FunctionCx<'a, 'tcx, Bx>,
324 bx: &mut Bx,
325 i: usize,
326 ) -> Self {
327 let field = self.layout.field(bx.cx(), i);
328 let offset = self.layout.fields.offset(i);
329
330 if !bx.is_backend_ref(self.layout) && bx.is_backend_ref(field) {
331 if let BackendRepr::SimdVector { count, .. } = self.layout.backend_repr
332 && let BackendRepr::Memory { sized: true } = field.backend_repr
333 && count.is_power_of_two()
334 {
335 assert_eq!(field.size, self.layout.size);
336 let place = PlaceRef::alloca(bx, field);
339 self.val.store(bx, place.val.with_type(self.layout));
340 return bx.load_operand(place);
341 } else {
342 bug!("Non-ref type {self:?} cannot project to ref field type {field:?}");
344 }
345 }
346
347 let val = if field.is_zst() {
348 OperandValue::ZeroSized
349 } else if field.size == self.layout.size {
350 assert_eq!(offset.bytes(), 0);
351 fx.codegen_transmute_operand(bx, *self, field).unwrap_or_else(|| {
352 bug!(
353 "Expected `codegen_transmute_operand` to handle equal-size \
354 field {i:?} projection from {self:?} to {field:?}"
355 )
356 })
357 } else {
358 let (in_scalar, imm) = match (self.val, self.layout.backend_repr) {
359 (OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
361 if offset.bytes() == 0 {
362 assert_eq!(field.size, a.size(bx.cx()));
363 (Some(a), a_llval)
364 } else {
365 assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
366 assert_eq!(field.size, b.size(bx.cx()));
367 (Some(b), b_llval)
368 }
369 }
370
371 _ => {
372 span_bug!(fx.mir.span, "OperandRef::extract_field({:?}): not applicable", self)
373 }
374 };
375 OperandValue::Immediate(match field.backend_repr {
376 BackendRepr::SimdVector { .. } => imm,
377 BackendRepr::Scalar(out_scalar) => {
378 let Some(in_scalar) = in_scalar else {
379 span_bug!(
380 fx.mir.span,
381 "OperandRef::extract_field({:?}): missing input scalar for output scalar",
382 self
383 )
384 };
385 if in_scalar != out_scalar {
386 let backend = bx.from_immediate(imm);
391 bx.to_immediate_scalar(backend, out_scalar)
392 } else {
393 imm
394 }
395 }
396 BackendRepr::ScalarPair(_, _) | BackendRepr::Memory { .. } => bug!(),
397 })
398 };
399
400 OperandRef { val, layout: field }
401 }
402
403 #[instrument(level = "trace", skip(fx, bx))]
405 pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
406 self,
407 fx: &mut FunctionCx<'a, 'tcx, Bx>,
408 bx: &mut Bx,
409 cast_to: Ty<'tcx>,
410 ) -> V {
411 let dl = &bx.tcx().data_layout;
412 let cast_to_layout = bx.cx().layout_of(cast_to);
413 let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
414
415 if self.layout.is_uninhabited() {
419 return bx.cx().const_poison(cast_to);
420 }
421
422 let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
423 Variants::Empty => unreachable!("we already handled uninhabited types"),
424 Variants::Single { index } => {
425 let discr_val =
426 if let Some(discr) = self.layout.ty.discriminant_for_variant(bx.tcx(), index) {
427 discr.val
428 } else {
429 assert_eq!(index, FIRST_VARIANT);
432 0
435 };
436 return bx.cx().const_uint_big(cast_to, discr_val);
437 }
438 Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
439 (tag, tag_encoding, tag_field)
440 }
441 };
442
443 let tag_op = match self.val {
445 OperandValue::ZeroSized => bug!(),
446 OperandValue::Immediate(_) | OperandValue::Pair(_, _) => {
447 self.extract_field(fx, bx, tag_field.as_usize())
448 }
449 OperandValue::Ref(place) => {
450 let tag = place.with_type(self.layout).project_field(bx, tag_field.as_usize());
451 bx.load_operand(tag)
452 }
453 };
454 let tag_imm = tag_op.immediate();
455
456 match *tag_encoding {
458 TagEncoding::Direct => {
459 let signed = match tag_scalar.primitive() {
460 Primitive::Int(_, signed) => !tag_scalar.is_bool() && signed,
465 _ => false,
466 };
467 bx.intcast(tag_imm, cast_to, signed)
468 }
469 TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start } => {
470 let (tag, tag_llty) = match tag_scalar.primitive() {
473 Primitive::Pointer(_) => {
475 let t = bx.type_from_integer(dl.ptr_sized_integer());
476 let tag = bx.ptrtoint(tag_imm, t);
477 (tag, t)
478 }
479 _ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
480 };
481
482 let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
486
487 let (is_niche, tagged_discr, delta) = if relative_max == 0 {
502 let niche_start = bx.cx().const_uint_big(tag_llty, niche_start);
512 let is_niche = bx.icmp(IntPredicate::IntEQ, tag, niche_start);
513 let tagged_discr =
514 bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64);
515 (is_niche, tagged_discr, 0)
516 } else {
517 let relative_discr = bx.sub(tag, bx.cx().const_uint_big(tag_llty, niche_start));
520 let cast_tag = bx.intcast(relative_discr, cast_to, false);
521 let is_niche = bx.icmp(
522 IntPredicate::IntULE,
523 relative_discr,
524 bx.cx().const_uint(tag_llty, relative_max as u64),
525 );
526
527 if niche_variants.contains(&untagged_variant)
532 && bx.cx().sess().opts.optimize != OptLevel::No
533 {
534 let impossible =
535 u64::from(untagged_variant.as_u32() - niche_variants.start().as_u32());
536 let impossible = bx.cx().const_uint(tag_llty, impossible);
537 let ne = bx.icmp(IntPredicate::IntNE, relative_discr, impossible);
538 bx.assume(ne);
539 }
540
541 (is_niche, cast_tag, niche_variants.start().as_u32() as u128)
542 };
543
544 let tagged_discr = if delta == 0 {
545 tagged_discr
546 } else {
547 bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
548 };
549
550 let discr = bx.select(
551 is_niche,
552 tagged_discr,
553 bx.cx().const_uint(cast_to, untagged_variant.as_u32() as u64),
554 );
555
556 discr
562 }
563 }
564 }
565
566 pub(crate) fn builder(
572 layout: TyAndLayout<'tcx>,
573 ) -> Option<OperandRef<'tcx, Result<V, abi::Scalar>>> {
574 let val = match layout.backend_repr {
575 BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized,
576 BackendRepr::Scalar(s) => OperandValue::Immediate(Err(s)),
577 BackendRepr::ScalarPair(a, b) => OperandValue::Pair(Err(a), Err(b)),
578 BackendRepr::Memory { .. } | BackendRepr::SimdVector { .. } => return None,
579 };
580 Some(OperandRef { val, layout })
581 }
582}
583
584impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Result<V, abi::Scalar>> {
585 pub(crate) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
586 &mut self,
587 bx: &mut Bx,
588 v: VariantIdx,
589 f: FieldIdx,
590 operand: OperandRef<'tcx, V>,
591 ) {
592 let (expect_zst, is_zero_offset) = if let abi::FieldsShape::Primitive = self.layout.fields {
593 assert!(!self.layout.is_zst());
598 assert_eq!(v, FIRST_VARIANT);
599 let first_field = f == FieldIdx::ZERO;
600 (!first_field, first_field)
601 } else {
602 let variant_layout = self.layout.for_variant(bx.cx(), v);
603 let field_layout = variant_layout.field(bx.cx(), f.as_usize());
604 let field_offset = variant_layout.fields.offset(f.as_usize());
605 (field_layout.is_zst(), field_offset == Size::ZERO)
606 };
607
608 let mut update = |tgt: &mut Result<V, abi::Scalar>, src, from_scalar| {
609 let from_bty = bx.cx().type_from_scalar(from_scalar);
610 let to_scalar = tgt.unwrap_err();
611 let to_bty = bx.cx().type_from_scalar(to_scalar);
612 let imm = transmute_immediate(bx, src, from_scalar, from_bty, to_scalar, to_bty);
613 *tgt = Ok(imm);
614 };
615
616 match (operand.val, operand.layout.backend_repr) {
617 (OperandValue::ZeroSized, _) if expect_zst => {}
618 (OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {
619 OperandValue::Immediate(val @ Err(_)) if is_zero_offset => {
620 update(val, v, from_scalar);
621 }
622 OperandValue::Pair(fst @ Err(_), _) if is_zero_offset => {
623 update(fst, v, from_scalar);
624 }
625 OperandValue::Pair(_, snd @ Err(_)) if !is_zero_offset => {
626 update(snd, v, from_scalar);
627 }
628 _ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"),
629 },
630 (OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {
631 match &mut self.val {
632 OperandValue::Pair(fst @ Err(_), snd @ Err(_)) => {
633 update(fst, a, from_sa);
634 update(snd, b, from_sb);
635 }
636 _ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"),
637 }
638 }
639 _ => bug!("Unsupported operand {operand:?} inserting into {v:?}.{f:?} of {self:?}"),
640 }
641 }
642
643 pub fn build(&self) -> OperandRef<'tcx, V> {
649 let OperandRef { val, layout } = *self;
650
651 let unwrap = |r: Result<V, abi::Scalar>| match r {
652 Ok(v) => v,
653 Err(_) => bug!("OperandRef::build called while fields are missing {self:?}"),
654 };
655
656 let val = match val {
657 OperandValue::ZeroSized => OperandValue::ZeroSized,
658 OperandValue::Immediate(v) => OperandValue::Immediate(unwrap(v)),
659 OperandValue::Pair(a, b) => OperandValue::Pair(unwrap(a), unwrap(b)),
660 OperandValue::Ref(_) => bug!(),
661 };
662 OperandRef { val, layout }
663 }
664}
665
666impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
667 pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
674 bx: &mut Bx,
675 layout: TyAndLayout<'tcx>,
676 ) -> OperandValue<V> {
677 assert!(layout.is_sized());
678 if layout.is_zst() {
679 OperandValue::ZeroSized
680 } else if bx.cx().is_backend_immediate(layout) {
681 let ibty = bx.cx().immediate_backend_type(layout);
682 OperandValue::Immediate(bx.const_poison(ibty))
683 } else if bx.cx().is_backend_scalar_pair(layout) {
684 let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
685 let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
686 OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
687 } else {
688 let ptr = bx.cx().type_ptr();
689 OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi))
690 }
691 }
692
693 pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
694 self,
695 bx: &mut Bx,
696 dest: PlaceRef<'tcx, V>,
697 ) {
698 self.store_with_flags(bx, dest, MemFlags::empty());
699 }
700
701 pub fn volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
702 self,
703 bx: &mut Bx,
704 dest: PlaceRef<'tcx, V>,
705 ) {
706 self.store_with_flags(bx, dest, MemFlags::VOLATILE);
707 }
708
709 pub fn unaligned_volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
710 self,
711 bx: &mut Bx,
712 dest: PlaceRef<'tcx, V>,
713 ) {
714 self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
715 }
716
717 pub fn nontemporal_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
718 self,
719 bx: &mut Bx,
720 dest: PlaceRef<'tcx, V>,
721 ) {
722 self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
723 }
724
725 pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
726 self,
727 bx: &mut Bx,
728 dest: PlaceRef<'tcx, V>,
729 flags: MemFlags,
730 ) {
731 debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
732 match self {
733 OperandValue::ZeroSized => {
734 }
737 OperandValue::Ref(val) => {
738 assert!(dest.layout.is_sized(), "cannot directly store unsized values");
739 if val.llextra.is_some() {
740 bug!("cannot directly store unsized values");
741 }
742 bx.typed_place_copy_with_flags(dest.val, val, dest.layout, flags);
743 }
744 OperandValue::Immediate(s) => {
745 let val = bx.from_immediate(s);
746 bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
747 }
748 OperandValue::Pair(a, b) => {
749 let BackendRepr::ScalarPair(a_scalar, b_scalar) = dest.layout.backend_repr else {
750 bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
751 };
752 let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
753
754 let val = bx.from_immediate(a);
755 let align = dest.val.align;
756 bx.store_with_flags(val, dest.val.llval, align, flags);
757
758 let llptr = bx.inbounds_ptradd(dest.val.llval, bx.const_usize(b_offset.bytes()));
759 let val = bx.from_immediate(b);
760 let align = dest.val.align.restrict_for_offset(b_offset);
761 bx.store_with_flags(val, llptr, align, flags);
762 }
763 }
764 }
765
766 pub fn store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
767 self,
768 bx: &mut Bx,
769 indirect_dest: PlaceRef<'tcx, V>,
770 ) {
771 debug!("OperandRef::store_unsized: operand={:?}, indirect_dest={:?}", self, indirect_dest);
772 let unsized_ty = indirect_dest
774 .layout
775 .ty
776 .builtin_deref(true)
777 .unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest));
778
779 let OperandValue::Ref(PlaceValue { llval: llptr, llextra: Some(llextra), .. }) = self
780 else {
781 bug!("store_unsized called with a sized value (or with an extern type)")
782 };
783
784 let (size, align) = size_of_val::size_and_align_of_dst(bx, unsized_ty, Some(llextra));
788 let one = bx.const_usize(1);
789 let align_minus_1 = bx.sub(align, one);
790 let size_extra = bx.add(size, align_minus_1);
791 let min_align = Align::ONE;
792 let alloca = bx.dynamic_alloca(size_extra, min_align);
793 let address = bx.ptrtoint(alloca, bx.type_isize());
794 let neg_address = bx.neg(address);
795 let offset = bx.and(neg_address, align_minus_1);
796 let dst = bx.inbounds_ptradd(alloca, offset);
797 bx.memcpy(dst, min_align, llptr, min_align, size, MemFlags::empty());
798
799 let indirect_operand = OperandValue::Pair(dst, llextra);
801 indirect_operand.store(bx, indirect_dest);
802 }
803}
804
805impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
806 fn maybe_codegen_consume_direct(
807 &mut self,
808 bx: &mut Bx,
809 place_ref: mir::PlaceRef<'tcx>,
810 ) -> Option<OperandRef<'tcx, Bx::Value>> {
811 debug!("maybe_codegen_consume_direct(place_ref={:?})", place_ref);
812
813 match self.locals[place_ref.local] {
814 LocalRef::Operand(mut o) => {
815 for elem in place_ref.projection.iter() {
817 match elem {
818 mir::ProjectionElem::Field(f, _) => {
819 assert!(
820 !o.layout.ty.is_any_ptr(),
821 "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
822 but tried to access field {f:?} of pointer {o:?}",
823 );
824 o = o.extract_field(self, bx, f.index());
825 }
826 mir::ProjectionElem::Index(_)
827 | mir::ProjectionElem::ConstantIndex { .. } => {
828 let elem = o.layout.field(bx.cx(), 0);
832 if elem.is_zst() {
833 o = OperandRef::zero_sized(elem);
834 } else {
835 return None;
836 }
837 }
838 _ => return None,
839 }
840 }
841
842 Some(o)
843 }
844 LocalRef::PendingOperand => {
845 bug!("use of {:?} before def", place_ref);
846 }
847 LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
848 None
851 }
852 }
853 }
854
855 pub fn codegen_consume(
856 &mut self,
857 bx: &mut Bx,
858 place_ref: mir::PlaceRef<'tcx>,
859 ) -> OperandRef<'tcx, Bx::Value> {
860 debug!("codegen_consume(place_ref={:?})", place_ref);
861
862 let ty = self.monomorphized_place_ty(place_ref);
863 let layout = bx.cx().layout_of(ty);
864
865 if layout.is_zst() {
867 return OperandRef::zero_sized(layout);
868 }
869
870 if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {
871 return o;
872 }
873
874 let place = self.codegen_place(bx, place_ref);
877 bx.load_operand(place)
878 }
879
880 pub fn codegen_operand(
881 &mut self,
882 bx: &mut Bx,
883 operand: &mir::Operand<'tcx>,
884 ) -> OperandRef<'tcx, Bx::Value> {
885 debug!("codegen_operand(operand={:?})", operand);
886
887 match *operand {
888 mir::Operand::Copy(ref place) | mir::Operand::Move(ref place) => {
889 self.codegen_consume(bx, place.as_ref())
890 }
891
892 mir::Operand::Constant(ref constant) => {
893 let constant_ty = self.monomorphize(constant.ty());
894 if constant_ty.is_simd() {
897 let layout = bx.layout_of(constant_ty);
900 if let BackendRepr::SimdVector { .. } = layout.backend_repr {
901 let (llval, ty) = self.immediate_const_vector(bx, constant);
902 return OperandRef {
903 val: OperandValue::Immediate(llval),
904 layout: bx.layout_of(ty),
905 };
906 }
907 }
908 self.eval_mir_constant_to_operand(bx, constant)
909 }
910 }
911 }
912}