1use rustc_abi::{
2 Align, BackendRepr, FieldIdx, FieldsShape, Size, TagEncoding, VariantIdx, Variants,
3};
4use rustc_middle::mir::PlaceTy;
5use rustc_middle::mir::interpret::Scalar;
6use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
7use rustc_middle::ty::{self, Ty};
8use rustc_middle::{bug, mir};
9use tracing::{debug, instrument};
10
11use super::operand::OperandValue;
12use super::{FunctionCx, LocalRef};
13use crate::common::IntPredicate;
14use crate::size_of_val;
15use crate::traits::*;
16
17#[derive(Copy, Clone, Debug)]
24pub struct PlaceValue<V> {
25 pub llval: V,
27
28 pub llextra: Option<V>,
30
31 pub align: Align,
33}
34
35impl<V: CodegenObject> PlaceValue<V> {
36 pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
40 PlaceValue { llval, llextra: None, align }
41 }
42
43 pub fn alloca<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx, Value = V>>(
48 bx: &mut Bx,
49 size: Size,
50 align: Align,
51 ) -> PlaceValue<V> {
52 let llval = bx.alloca(size, align);
53 PlaceValue::new_sized(llval, align)
54 }
55
56 pub fn with_type<'tcx>(self, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
58 assert!(
59 layout.is_unsized() || layout.is_uninhabited() || self.llextra.is_none(),
60 "Had pointer metadata {:?} for sized type {layout:?}",
61 self.llextra,
62 );
63 PlaceRef { val: self, layout }
64 }
65
66 pub fn address(self) -> OperandValue<V> {
71 if let Some(llextra) = self.llextra {
72 OperandValue::Pair(self.llval, llextra)
73 } else {
74 OperandValue::Immediate(self.llval)
75 }
76 }
77}
78
79#[derive(Copy, Clone, Debug)]
80pub struct PlaceRef<'tcx, V> {
81 pub val: PlaceValue<V>,
83
84 pub layout: TyAndLayout<'tcx>,
90}
91
92impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
93 pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
94 PlaceRef::new_sized_aligned(llval, layout, layout.align.abi)
95 }
96
97 pub fn new_sized_aligned(
98 llval: V,
99 layout: TyAndLayout<'tcx>,
100 align: Align,
101 ) -> PlaceRef<'tcx, V> {
102 assert!(layout.is_sized());
103 PlaceValue::new_sized(llval, align).with_type(layout)
104 }
105
106 pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
109 bx: &mut Bx,
110 layout: TyAndLayout<'tcx>,
111 ) -> Self {
112 Self::alloca_size(bx, layout.size, layout)
113 }
114
115 pub fn alloca_size<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
116 bx: &mut Bx,
117 size: Size,
118 layout: TyAndLayout<'tcx>,
119 ) -> Self {
120 assert!(layout.is_sized(), "tried to statically allocate unsized place");
121 PlaceValue::alloca(bx, size, layout.align.abi).with_type(layout)
122 }
123
124 pub fn alloca_unsized_indirect<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
128 bx: &mut Bx,
129 layout: TyAndLayout<'tcx>,
130 ) -> Self {
131 assert!(layout.is_unsized(), "tried to allocate indirect place for sized values");
132 let ptr_ty = Ty::new_mut_ptr(bx.cx().tcx(), layout.ty);
133 let ptr_layout = bx.cx().layout_of(ptr_ty);
134 Self::alloca(bx, ptr_layout)
135 }
136
137 pub fn len<Cx: ConstCodegenMethods<Value = V>>(&self, cx: &Cx) -> V {
138 if let FieldsShape::Array { count, .. } = self.layout.fields {
139 if self.layout.is_unsized() {
140 assert_eq!(count, 0);
141 self.val.llextra.unwrap()
142 } else {
143 cx.const_usize(count)
144 }
145 } else {
146 bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
147 }
148 }
149}
150
151impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
152 pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
154 self,
155 bx: &mut Bx,
156 ix: usize,
157 ) -> Self {
158 let field = self.layout.field(bx.cx(), ix);
159 let offset = self.layout.fields.offset(ix);
160 let effective_field_align = self.val.align.restrict_for_offset(offset);
161
162 let mut simple = || {
165 let llval = if offset.bytes() == 0 {
166 self.val.llval
167 } else {
168 bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
169 };
170 let val = PlaceValue {
171 llval,
172 llextra: if bx.cx().tcx().type_has_metadata(field.ty, bx.cx().typing_env()) {
173 self.val.llextra
174 } else {
175 None
176 },
177 align: effective_field_align,
178 };
179 val.with_type(field)
180 };
181
182 match field.ty.kind() {
188 _ if field.is_sized() => return simple(),
189 ty::Slice(..) | ty::Str => return simple(),
190 _ if offset.bytes() == 0 => return simple(),
191 _ => {}
192 }
193
194 let meta = self.val.llextra;
210
211 let unaligned_offset = bx.cx().const_usize(offset.bytes());
212
213 let (_, mut unsized_align) = size_of_val::size_and_align_of_dst(bx, field.ty, meta);
215
216 if let ty::Adt(def, _) = self.layout.ty.kind()
218 && let Some(packed) = def.repr().pack
219 {
220 let packed = bx.const_usize(packed.bytes());
221 let cmp = bx.icmp(IntPredicate::IntULT, unsized_align, packed);
222 unsized_align = bx.select(cmp, unsized_align, packed)
223 }
224
225 let offset = round_up_const_value_to_alignment(bx, unaligned_offset, unsized_align);
227
228 debug!("struct_field_ptr: DST field offset: {:?}", offset);
229
230 let ptr = bx.inbounds_ptradd(self.val.llval, offset);
232 let val =
233 PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
234 val.with_type(field)
235 }
236
237 pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
240 &self,
241 bx: &mut Bx,
242 variant_index: VariantIdx,
243 ) {
244 match codegen_tag_value(bx.cx(), variant_index, self.layout) {
245 Err(UninhabitedVariantError) => {
246 bx.abort();
249 }
250 Ok(Some((tag_field, imm))) => {
251 let tag_place = self.project_field(bx, tag_field.as_usize());
252 OperandValue::Immediate(imm).store(bx, tag_place);
253 }
254 Ok(None) => {}
255 }
256 }
257
258 pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
259 &self,
260 bx: &mut Bx,
261 llindex: V,
262 ) -> Self {
263 let layout = self.layout.field(bx, 0);
266 let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) {
267 layout.size.checked_mul(llindex, bx).unwrap_or(layout.size)
268 } else {
269 layout.size
270 };
271
272 let llval = bx.inbounds_nuw_gep(bx.cx().backend_type(layout), self.val.llval, &[llindex]);
273 let align = self.val.align.restrict_for_offset(offset);
274 PlaceValue::new_sized(llval, align).with_type(layout)
275 }
276
277 pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
278 &self,
279 bx: &mut Bx,
280 variant_index: VariantIdx,
281 ) -> Self {
282 let mut downcast = *self;
283 downcast.layout = self.layout.for_variant(bx.cx(), variant_index);
284 downcast
285 }
286
287 pub fn project_type<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
288 &self,
289 bx: &mut Bx,
290 ty: Ty<'tcx>,
291 ) -> Self {
292 let mut downcast = *self;
293 downcast.layout = bx.cx().layout_of(ty);
294 downcast
295 }
296
297 pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
298 bx.lifetime_start(self.val.llval, self.layout.size);
299 }
300
301 pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
302 bx.lifetime_end(self.val.llval, self.layout.size);
303 }
304}
305
306impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
307 #[instrument(level = "trace", skip(self, bx))]
308 pub fn codegen_place(
309 &mut self,
310 bx: &mut Bx,
311 place_ref: mir::PlaceRef<'tcx>,
312 ) -> PlaceRef<'tcx, Bx::Value> {
313 let cx = self.cx;
314 let tcx = self.cx.tcx();
315
316 let mut base = 0;
317 let mut cg_base = match self.locals[place_ref.local] {
318 LocalRef::Place(place) => place,
319 LocalRef::UnsizedPlace(place) => bx.load_operand(place).deref(cx),
320 LocalRef::Operand(..) => {
321 if place_ref.is_indirect_first_projection() {
322 base = 1;
323 let cg_base = self.codegen_consume(
324 bx,
325 mir::PlaceRef { projection: &place_ref.projection[..0], ..place_ref },
326 );
327 cg_base.deref(bx.cx())
328 } else {
329 bug!("using operand local {:?} as place", place_ref);
330 }
331 }
332 LocalRef::PendingOperand => {
333 bug!("using still-pending operand local {:?} as place", place_ref);
334 }
335 };
336 for elem in place_ref.projection[base..].iter() {
337 cg_base = match *elem {
338 mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
339 mir::ProjectionElem::Field(ref field, _) => {
340 assert!(
341 !cg_base.layout.ty.is_any_ptr(),
342 "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
343 but tried to access field {field:?} of pointer {cg_base:?}",
344 );
345 cg_base.project_field(bx, field.index())
346 }
347 mir::ProjectionElem::OpaqueCast(ty) => {
348 bug!("encountered OpaqueCast({ty}) in codegen")
349 }
350 mir::ProjectionElem::Subtype(ty) => cg_base.project_type(bx, self.monomorphize(ty)),
351 mir::ProjectionElem::UnwrapUnsafeBinder(ty) => {
352 cg_base.project_type(bx, self.monomorphize(ty))
353 }
354 mir::ProjectionElem::Index(index) => {
355 let index = &mir::Operand::Copy(mir::Place::from(index));
356 let index = self.codegen_operand(bx, index);
357 let llindex = index.immediate();
358 cg_base.project_index(bx, llindex)
359 }
360 mir::ProjectionElem::ConstantIndex { offset, from_end: false, min_length: _ } => {
361 let lloffset = bx.cx().const_usize(offset);
362 cg_base.project_index(bx, lloffset)
363 }
364 mir::ProjectionElem::ConstantIndex { offset, from_end: true, min_length: _ } => {
365 let lloffset = bx.cx().const_usize(offset);
366 let lllen = cg_base.len(bx.cx());
367 let llindex = bx.sub(lllen, lloffset);
368 cg_base.project_index(bx, llindex)
369 }
370 mir::ProjectionElem::Subslice { from, to, from_end } => {
371 let mut subslice = cg_base.project_index(bx, bx.cx().const_usize(from));
372 let projected_ty =
373 PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, *elem).ty;
374 subslice.layout = bx.cx().layout_of(self.monomorphize(projected_ty));
375
376 if subslice.layout.is_unsized() {
377 assert!(from_end, "slice subslices should be `from_end`");
378 subslice.val.llextra = Some(
379 bx.sub(cg_base.val.llextra.unwrap(), bx.cx().const_usize(from + to)),
380 );
381 }
382
383 subslice
384 }
385 mir::ProjectionElem::Downcast(_, v) => cg_base.project_downcast(bx, v),
386 };
387 }
388 debug!("codegen_place(place={:?}) => {:?}", place_ref, cg_base);
389 cg_base
390 }
391
392 pub fn monomorphized_place_ty(&self, place_ref: mir::PlaceRef<'tcx>) -> Ty<'tcx> {
393 let tcx = self.cx.tcx();
394 let place_ty = place_ref.ty(self.mir, tcx);
395 self.monomorphize(place_ty.ty)
396 }
397}
398
399fn round_up_const_value_to_alignment<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
400 bx: &mut Bx,
401 value: Bx::Value,
402 align: Bx::Value,
403) -> Bx::Value {
404 let one = bx.const_usize(1);
435 let align_minus_1 = bx.sub(align, one);
436 let neg_value = bx.neg(value);
437 let offset = bx.and(neg_value, align_minus_1);
438 bx.add(value, offset)
439}
440
441pub(super) fn codegen_tag_value<'tcx, V>(
451 cx: &impl CodegenMethods<'tcx, Value = V>,
452 variant_index: VariantIdx,
453 layout: TyAndLayout<'tcx>,
454) -> Result<Option<(FieldIdx, V)>, UninhabitedVariantError> {
455 if layout.for_variant(cx, variant_index).is_uninhabited() {
458 return Err(UninhabitedVariantError);
459 }
460
461 Ok(match layout.variants {
462 Variants::Empty => unreachable!("we already handled uninhabited types"),
463 Variants::Single { index } => {
464 assert_eq!(index, variant_index);
465 None
466 }
467
468 Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
469 let discr = layout.ty.discriminant_for_variant(cx.tcx(), variant_index);
470 let to = discr.unwrap().val;
471 let tag_layout = layout.field(cx, tag_field.as_usize());
472 let tag_llty = cx.immediate_backend_type(tag_layout);
473 let imm = cx.const_uint_big(tag_llty, to);
474 Some((tag_field, imm))
475 }
476 Variants::Multiple {
477 tag_encoding: TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start },
478 tag_field,
479 ..
480 } => {
481 if variant_index != untagged_variant {
482 let niche_layout = layout.field(cx, tag_field.as_usize());
483 let niche_llty = cx.immediate_backend_type(niche_layout);
484 let BackendRepr::Scalar(scalar) = niche_layout.backend_repr else {
485 bug!("expected a scalar placeref for the niche");
486 };
487 let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
492 let niche_value = (niche_value as u128).wrapping_add(niche_start);
493 let niche_value = niche_value & niche_layout.size.unsigned_int_max();
494
495 let niche_llval = cx.scalar_to_backend(
496 Scalar::from_uint(niche_value, niche_layout.size),
497 scalar,
498 niche_llty,
499 );
500 Some((tag_field, niche_llval))
501 } else {
502 None
503 }
504 }
505 })
506}
507
508#[derive(Debug)]
509pub(super) struct UninhabitedVariantError;