1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", doc(rust_logo))]
4#![cfg_attr(feature = "nightly", feature(assert_matches))]
5#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
6#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
7#![cfg_attr(feature = "nightly", feature(step_trait))]
8use std::fmt;
43#[cfg(feature = "nightly")]
44use std::iter::Step;
45use std::num::{NonZeroUsize, ParseIntError};
46use std::ops::{Add, AddAssign, Deref, Mul, RangeInclusive, Sub};
47use std::str::FromStr;
48
49use bitflags::bitflags;
50#[cfg(feature = "nightly")]
51use rustc_data_structures::stable_hasher::StableOrd;
52use rustc_hashes::Hash64;
53use rustc_index::{Idx, IndexSlice, IndexVec};
54#[cfg(feature = "nightly")]
55use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
56
57mod callconv;
58mod canon_abi;
59mod extern_abi;
60mod layout;
61#[cfg(test)]
62mod tests;
63
64pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
65pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
66pub use extern_abi::{ExternAbi, all_names};
67#[cfg(feature = "nightly")]
68pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
69pub use layout::{LayoutCalculator, LayoutCalculatorError};
70
71#[cfg(feature = "nightly")]
75pub trait HashStableContext {}
76
77#[derive(Clone, Copy, PartialEq, Eq, Default)]
78#[cfg_attr(
79 feature = "nightly",
80 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
81)]
82pub struct ReprFlags(u8);
83
84bitflags! {
85 impl ReprFlags: u8 {
86 const IS_C = 1 << 0;
87 const IS_SIMD = 1 << 1;
88 const IS_TRANSPARENT = 1 << 2;
89 const IS_LINEAR = 1 << 3;
92 const RANDOMIZE_LAYOUT = 1 << 4;
96 const FIELD_ORDER_UNOPTIMIZABLE = ReprFlags::IS_C.bits()
98 | ReprFlags::IS_SIMD.bits()
99 | ReprFlags::IS_LINEAR.bits();
100 const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
101 }
102}
103
104impl std::fmt::Debug for ReprFlags {
107 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
108 bitflags::parser::to_writer(self, f)
109 }
110}
111
112#[derive(Copy, Clone, Debug, Eq, PartialEq)]
113#[cfg_attr(
114 feature = "nightly",
115 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
116)]
117pub enum IntegerType {
118 Pointer(bool),
121 Fixed(Integer, bool),
124}
125
126impl IntegerType {
127 pub fn is_signed(&self) -> bool {
128 match self {
129 IntegerType::Pointer(b) => *b,
130 IntegerType::Fixed(_, b) => *b,
131 }
132 }
133}
134
135#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
137#[cfg_attr(
138 feature = "nightly",
139 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
140)]
141pub struct ReprOptions {
142 pub int: Option<IntegerType>,
143 pub align: Option<Align>,
144 pub pack: Option<Align>,
145 pub flags: ReprFlags,
146 pub field_shuffle_seed: Hash64,
154}
155
156impl ReprOptions {
157 #[inline]
158 pub fn simd(&self) -> bool {
159 self.flags.contains(ReprFlags::IS_SIMD)
160 }
161
162 #[inline]
163 pub fn c(&self) -> bool {
164 self.flags.contains(ReprFlags::IS_C)
165 }
166
167 #[inline]
168 pub fn packed(&self) -> bool {
169 self.pack.is_some()
170 }
171
172 #[inline]
173 pub fn transparent(&self) -> bool {
174 self.flags.contains(ReprFlags::IS_TRANSPARENT)
175 }
176
177 #[inline]
178 pub fn linear(&self) -> bool {
179 self.flags.contains(ReprFlags::IS_LINEAR)
180 }
181
182 pub fn discr_type(&self) -> IntegerType {
185 self.int.unwrap_or(IntegerType::Pointer(true))
186 }
187
188 pub fn inhibit_enum_layout_opt(&self) -> bool {
192 self.c() || self.int.is_some()
193 }
194
195 pub fn inhibit_newtype_abi_optimization(&self) -> bool {
196 self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
197 }
198
199 pub fn inhibit_struct_field_reordering(&self) -> bool {
202 self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
203 }
204
205 pub fn can_randomize_type_layout(&self) -> bool {
208 !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
209 }
210
211 pub fn inhibits_union_abi_opt(&self) -> bool {
213 self.c()
214 }
215}
216
217pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
223
224#[derive(Copy, Clone, Debug, PartialEq, Eq)]
226pub struct PointerSpec {
227 pointer_size: Size,
229 pointer_align: AbiAlign,
231 pointer_offset: Size,
233 _is_fat: bool,
236}
237
238#[derive(Debug, PartialEq, Eq)]
241pub struct TargetDataLayout {
242 pub endian: Endian,
243 pub i1_align: AbiAlign,
244 pub i8_align: AbiAlign,
245 pub i16_align: AbiAlign,
246 pub i32_align: AbiAlign,
247 pub i64_align: AbiAlign,
248 pub i128_align: AbiAlign,
249 pub f16_align: AbiAlign,
250 pub f32_align: AbiAlign,
251 pub f64_align: AbiAlign,
252 pub f128_align: AbiAlign,
253 pub aggregate_align: AbiAlign,
254
255 pub vector_align: Vec<(Size, AbiAlign)>,
257
258 pub default_address_space: AddressSpace,
259 pub default_address_space_pointer_spec: PointerSpec,
260
261 address_space_info: Vec<(AddressSpace, PointerSpec)>,
268
269 pub instruction_address_space: AddressSpace,
270
271 pub c_enum_min_size: Integer,
275}
276
277impl Default for TargetDataLayout {
278 fn default() -> TargetDataLayout {
280 let align = |bits| Align::from_bits(bits).unwrap();
281 TargetDataLayout {
282 endian: Endian::Big,
283 i1_align: AbiAlign::new(align(8)),
284 i8_align: AbiAlign::new(align(8)),
285 i16_align: AbiAlign::new(align(16)),
286 i32_align: AbiAlign::new(align(32)),
287 i64_align: AbiAlign::new(align(32)),
288 i128_align: AbiAlign::new(align(32)),
289 f16_align: AbiAlign::new(align(16)),
290 f32_align: AbiAlign::new(align(32)),
291 f64_align: AbiAlign::new(align(64)),
292 f128_align: AbiAlign::new(align(128)),
293 aggregate_align: AbiAlign { abi: align(8) },
294 vector_align: vec![
295 (Size::from_bits(64), AbiAlign::new(align(64))),
296 (Size::from_bits(128), AbiAlign::new(align(128))),
297 ],
298 default_address_space: AddressSpace::ZERO,
299 default_address_space_pointer_spec: PointerSpec {
300 pointer_size: Size::from_bits(64),
301 pointer_align: AbiAlign::new(align(64)),
302 pointer_offset: Size::from_bits(64),
303 _is_fat: false,
304 },
305 address_space_info: vec![],
306 instruction_address_space: AddressSpace::ZERO,
307 c_enum_min_size: Integer::I32,
308 }
309 }
310}
311
312pub enum TargetDataLayoutErrors<'a> {
313 InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
314 InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
315 MissingAlignment { cause: &'a str },
316 InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
317 InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
318 InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
319 InvalidBitsSize { err: String },
320 UnknownPointerSpecification { err: String },
321}
322
323impl TargetDataLayout {
324 pub fn parse_from_llvm_datalayout_string<'a>(
330 input: &'a str,
331 default_address_space: AddressSpace,
332 ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
333 let parse_address_space = |s: &'a str, cause: &'a str| {
335 s.parse::<u32>().map(AddressSpace).map_err(|err| {
336 TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
337 })
338 };
339
340 let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
342 s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
343 kind,
344 bit: s,
345 cause,
346 err,
347 })
348 };
349
350 let parse_size =
352 |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
353
354 let parse_align_str = |s: &'a str, cause: &'a str| {
356 let align_from_bits = |bits| {
357 Align::from_bits(bits)
358 .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
359 };
360 let abi = parse_bits(s, "alignment", cause)?;
361 Ok(AbiAlign::new(align_from_bits(abi)?))
362 };
363
364 let parse_align_seq = |s: &[&'a str], cause: &'a str| {
367 if s.is_empty() {
368 return Err(TargetDataLayoutErrors::MissingAlignment { cause });
369 }
370 parse_align_str(s[0], cause)
371 };
372
373 let mut dl = TargetDataLayout::default();
374 dl.default_address_space = default_address_space;
375
376 let mut i128_align_src = 64;
377 for spec in input.split('-') {
378 let spec_parts = spec.split(':').collect::<Vec<_>>();
379
380 match &*spec_parts {
381 ["e"] => dl.endian = Endian::Little,
382 ["E"] => dl.endian = Endian::Big,
383 [p] if p.starts_with('P') => {
384 dl.instruction_address_space = parse_address_space(&p[1..], "P")?
385 }
386 ["a", a @ ..] => dl.aggregate_align = parse_align_seq(a, "a")?,
387 ["f16", a @ ..] => dl.f16_align = parse_align_seq(a, "f16")?,
388 ["f32", a @ ..] => dl.f32_align = parse_align_seq(a, "f32")?,
389 ["f64", a @ ..] => dl.f64_align = parse_align_seq(a, "f64")?,
390 ["f128", a @ ..] => dl.f128_align = parse_align_seq(a, "f128")?,
391 [p, s, a @ ..] if p.starts_with("p") => {
392 let mut p = p.strip_prefix('p').unwrap();
393 let mut _is_fat = false;
394
395 if p.starts_with('f') {
399 p = p.strip_prefix('f').unwrap();
400 _is_fat = true;
401 }
402
403 if p.starts_with(char::is_alphabetic) {
406 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
407 err: p.to_string(),
408 });
409 }
410
411 let addr_space = if !p.is_empty() {
412 parse_address_space(p, "p-")?
413 } else {
414 AddressSpace::ZERO
415 };
416
417 let pointer_size = parse_size(s, "p-")?;
418 let pointer_align = parse_align_seq(a, "p-")?;
419 let info = PointerSpec {
420 pointer_offset: pointer_size,
421 pointer_size,
422 pointer_align,
423 _is_fat,
424 };
425 if addr_space == default_address_space {
426 dl.default_address_space_pointer_spec = info;
427 } else {
428 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
429 Some(e) => e.1 = info,
430 None => {
431 dl.address_space_info.push((addr_space, info));
432 }
433 }
434 }
435 }
436 [p, s, a, _pr, i] if p.starts_with("p") => {
437 let mut p = p.strip_prefix('p').unwrap();
438 let mut _is_fat = false;
439
440 if p.starts_with('f') {
444 p = p.strip_prefix('f').unwrap();
445 _is_fat = true;
446 }
447
448 if p.starts_with(char::is_alphabetic) {
451 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
452 err: p.to_string(),
453 });
454 }
455
456 let addr_space = if !p.is_empty() {
457 parse_address_space(p, "p")?
458 } else {
459 AddressSpace::ZERO
460 };
461
462 let info = PointerSpec {
463 pointer_size: parse_size(s, "p-")?,
464 pointer_align: parse_align_str(a, "p-")?,
465 pointer_offset: parse_size(i, "p-")?,
466 _is_fat,
467 };
468
469 if addr_space == default_address_space {
470 dl.default_address_space_pointer_spec = info;
471 } else {
472 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
473 Some(e) => e.1 = info,
474 None => {
475 dl.address_space_info.push((addr_space, info));
476 }
477 }
478 }
479 }
480
481 [s, a @ ..] if s.starts_with('i') => {
482 let Ok(bits) = s[1..].parse::<u64>() else {
483 parse_size(&s[1..], "i")?; continue;
485 };
486 let a = parse_align_seq(a, s)?;
487 match bits {
488 1 => dl.i1_align = a,
489 8 => dl.i8_align = a,
490 16 => dl.i16_align = a,
491 32 => dl.i32_align = a,
492 64 => dl.i64_align = a,
493 _ => {}
494 }
495 if bits >= i128_align_src && bits <= 128 {
496 i128_align_src = bits;
499 dl.i128_align = a;
500 }
501 }
502 [s, a @ ..] if s.starts_with('v') => {
503 let v_size = parse_size(&s[1..], "v")?;
504 let a = parse_align_seq(a, s)?;
505 if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
506 v.1 = a;
507 continue;
508 }
509 dl.vector_align.push((v_size, a));
511 }
512 _ => {} }
514 }
515
516 if (dl.instruction_address_space != dl.default_address_space)
519 && dl
520 .address_space_info
521 .iter()
522 .find(|(a, _)| *a == dl.instruction_address_space)
523 .is_none()
524 {
525 dl.address_space_info.push((
526 dl.instruction_address_space,
527 dl.default_address_space_pointer_spec.clone(),
528 ));
529 }
530
531 Ok(dl)
532 }
533
534 #[inline]
545 pub fn obj_size_bound(&self) -> u64 {
546 match self.pointer_size().bits() {
547 16 => 1 << 15,
548 32 => 1 << 31,
549 64 => 1 << 61,
550 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
551 }
552 }
553
554 #[inline]
564 pub fn obj_size_bound_in(&self, address_space: AddressSpace) -> u64 {
565 match self.pointer_size_in(address_space).bits() {
566 16 => 1 << 15,
567 32 => 1 << 31,
568 64 => 1 << 61,
569 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
570 }
571 }
572
573 #[inline]
574 pub fn ptr_sized_integer(&self) -> Integer {
575 use Integer::*;
576 match self.pointer_offset().bits() {
577 16 => I16,
578 32 => I32,
579 64 => I64,
580 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
581 }
582 }
583
584 #[inline]
585 pub fn ptr_sized_integer_in(&self, address_space: AddressSpace) -> Integer {
586 use Integer::*;
587 match self.pointer_offset_in(address_space).bits() {
588 16 => I16,
589 32 => I32,
590 64 => I64,
591 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
592 }
593 }
594
595 #[inline]
597 fn cabi_vector_align(&self, vec_size: Size) -> Option<AbiAlign> {
598 self.vector_align
599 .iter()
600 .find(|(size, _align)| *size == vec_size)
601 .map(|(_size, align)| *align)
602 }
603
604 #[inline]
606 pub fn llvmlike_vector_align(&self, vec_size: Size) -> AbiAlign {
607 self.cabi_vector_align(vec_size).unwrap_or(AbiAlign::new(
608 Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap(),
609 ))
610 }
611
612 #[inline]
614 pub fn pointer_size(&self) -> Size {
615 self.default_address_space_pointer_spec.pointer_size
616 }
617
618 #[inline]
620 pub fn pointer_size_in(&self, c: AddressSpace) -> Size {
621 if c == self.default_address_space {
622 return self.default_address_space_pointer_spec.pointer_size;
623 }
624
625 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
626 e.1.pointer_size
627 } else {
628 panic!("Use of unknown address space {c:?}");
629 }
630 }
631
632 #[inline]
634 pub fn pointer_offset(&self) -> Size {
635 self.default_address_space_pointer_spec.pointer_offset
636 }
637
638 #[inline]
640 pub fn pointer_offset_in(&self, c: AddressSpace) -> Size {
641 if c == self.default_address_space {
642 return self.default_address_space_pointer_spec.pointer_offset;
643 }
644
645 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
646 e.1.pointer_offset
647 } else {
648 panic!("Use of unknown address space {c:?}");
649 }
650 }
651
652 #[inline]
654 pub fn pointer_align(&self) -> AbiAlign {
655 self.default_address_space_pointer_spec.pointer_align
656 }
657
658 #[inline]
660 pub fn pointer_align_in(&self, c: AddressSpace) -> AbiAlign {
661 if c == self.default_address_space {
662 return self.default_address_space_pointer_spec.pointer_align;
663 }
664
665 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
666 e.1.pointer_align
667 } else {
668 panic!("Use of unknown address space {c:?}");
669 }
670 }
671}
672
673pub trait HasDataLayout {
674 fn data_layout(&self) -> &TargetDataLayout;
675}
676
677impl HasDataLayout for TargetDataLayout {
678 #[inline]
679 fn data_layout(&self) -> &TargetDataLayout {
680 self
681 }
682}
683
684impl HasDataLayout for &TargetDataLayout {
686 #[inline]
687 fn data_layout(&self) -> &TargetDataLayout {
688 (**self).data_layout()
689 }
690}
691
692#[derive(Copy, Clone, PartialEq, Eq)]
694pub enum Endian {
695 Little,
696 Big,
697}
698
699impl Endian {
700 pub fn as_str(&self) -> &'static str {
701 match self {
702 Self::Little => "little",
703 Self::Big => "big",
704 }
705 }
706}
707
708impl fmt::Debug for Endian {
709 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
710 f.write_str(self.as_str())
711 }
712}
713
714impl FromStr for Endian {
715 type Err = String;
716
717 fn from_str(s: &str) -> Result<Self, Self::Err> {
718 match s {
719 "little" => Ok(Self::Little),
720 "big" => Ok(Self::Big),
721 _ => Err(format!(r#"unknown endian: "{s}""#)),
722 }
723 }
724}
725
726#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
728#[cfg_attr(
729 feature = "nightly",
730 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
731)]
732pub struct Size {
733 raw: u64,
734}
735
736#[cfg(feature = "nightly")]
737impl StableOrd for Size {
738 const CAN_USE_UNSTABLE_SORT: bool = true;
739
740 const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
743}
744
745impl fmt::Debug for Size {
747 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
748 write!(f, "Size({} bytes)", self.bytes())
749 }
750}
751
752impl Size {
753 pub const ZERO: Size = Size { raw: 0 };
754
755 pub fn from_bits(bits: impl TryInto<u64>) -> Size {
758 let bits = bits.try_into().ok().unwrap();
759 Size { raw: bits.div_ceil(8) }
760 }
761
762 #[inline]
763 pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
764 let bytes: u64 = bytes.try_into().ok().unwrap();
765 Size { raw: bytes }
766 }
767
768 #[inline]
769 pub fn bytes(self) -> u64 {
770 self.raw
771 }
772
773 #[inline]
774 pub fn bytes_usize(self) -> usize {
775 self.bytes().try_into().unwrap()
776 }
777
778 #[inline]
779 pub fn bits(self) -> u64 {
780 #[cold]
781 fn overflow(bytes: u64) -> ! {
782 panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
783 }
784
785 self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
786 }
787
788 #[inline]
789 pub fn bits_usize(self) -> usize {
790 self.bits().try_into().unwrap()
791 }
792
793 #[inline]
794 pub fn align_to(self, align: Align) -> Size {
795 let mask = align.bytes() - 1;
796 Size::from_bytes((self.bytes() + mask) & !mask)
797 }
798
799 #[inline]
800 pub fn is_aligned(self, align: Align) -> bool {
801 let mask = align.bytes() - 1;
802 self.bytes() & mask == 0
803 }
804
805 #[inline]
806 pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
807 let dl = cx.data_layout();
808
809 let bytes = self.bytes().checked_add(offset.bytes())?;
810
811 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
812 }
813
814 #[inline]
815 pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
816 let dl = cx.data_layout();
817
818 let bytes = self.bytes().checked_mul(count)?;
819 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
820 }
821
822 #[inline]
825 pub fn sign_extend(self, value: u128) -> i128 {
826 let size = self.bits();
827 if size == 0 {
828 return 0;
830 }
831 let shift = 128 - size;
833 ((value << shift) as i128) >> shift
836 }
837
838 #[inline]
840 pub fn truncate(self, value: u128) -> u128 {
841 let size = self.bits();
842 if size == 0 {
843 return 0;
845 }
846 let shift = 128 - size;
847 (value << shift) >> shift
849 }
850
851 #[inline]
852 pub fn signed_int_min(&self) -> i128 {
853 self.sign_extend(1_u128 << (self.bits() - 1))
854 }
855
856 #[inline]
857 pub fn signed_int_max(&self) -> i128 {
858 i128::MAX >> (128 - self.bits())
859 }
860
861 #[inline]
862 pub fn unsigned_int_max(&self) -> u128 {
863 u128::MAX >> (128 - self.bits())
864 }
865}
866
867impl Add for Size {
871 type Output = Size;
872 #[inline]
873 fn add(self, other: Size) -> Size {
874 Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
875 panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
876 }))
877 }
878}
879
880impl Sub for Size {
881 type Output = Size;
882 #[inline]
883 fn sub(self, other: Size) -> Size {
884 Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
885 panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
886 }))
887 }
888}
889
890impl Mul<Size> for u64 {
891 type Output = Size;
892 #[inline]
893 fn mul(self, size: Size) -> Size {
894 size * self
895 }
896}
897
898impl Mul<u64> for Size {
899 type Output = Size;
900 #[inline]
901 fn mul(self, count: u64) -> Size {
902 match self.bytes().checked_mul(count) {
903 Some(bytes) => Size::from_bytes(bytes),
904 None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
905 }
906 }
907}
908
909impl AddAssign for Size {
910 #[inline]
911 fn add_assign(&mut self, other: Size) {
912 *self = *self + other;
913 }
914}
915
916#[cfg(feature = "nightly")]
917impl Step for Size {
918 #[inline]
919 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
920 u64::steps_between(&start.bytes(), &end.bytes())
921 }
922
923 #[inline]
924 fn forward_checked(start: Self, count: usize) -> Option<Self> {
925 u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
926 }
927
928 #[inline]
929 fn forward(start: Self, count: usize) -> Self {
930 Self::from_bytes(u64::forward(start.bytes(), count))
931 }
932
933 #[inline]
934 unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
935 Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
936 }
937
938 #[inline]
939 fn backward_checked(start: Self, count: usize) -> Option<Self> {
940 u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
941 }
942
943 #[inline]
944 fn backward(start: Self, count: usize) -> Self {
945 Self::from_bytes(u64::backward(start.bytes(), count))
946 }
947
948 #[inline]
949 unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
950 Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
951 }
952}
953
954#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
956#[cfg_attr(
957 feature = "nightly",
958 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
959)]
960pub struct Align {
961 pow2: u8,
962}
963
964impl fmt::Debug for Align {
966 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
967 write!(f, "Align({} bytes)", self.bytes())
968 }
969}
970
971#[derive(Clone, Copy)]
972pub enum AlignFromBytesError {
973 NotPowerOfTwo(u64),
974 TooLarge(u64),
975}
976
977impl AlignFromBytesError {
978 pub fn diag_ident(self) -> &'static str {
979 match self {
980 Self::NotPowerOfTwo(_) => "not_power_of_two",
981 Self::TooLarge(_) => "too_large",
982 }
983 }
984
985 pub fn align(self) -> u64 {
986 let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
987 align
988 }
989}
990
991impl fmt::Debug for AlignFromBytesError {
992 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
993 fmt::Display::fmt(self, f)
994 }
995}
996
997impl fmt::Display for AlignFromBytesError {
998 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
999 match self {
1000 AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
1001 AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
1002 }
1003 }
1004}
1005
1006impl Align {
1007 pub const ONE: Align = Align { pow2: 0 };
1008 pub const EIGHT: Align = Align { pow2: 3 };
1009 pub const MAX: Align = Align { pow2: 29 };
1011
1012 #[inline]
1013 pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
1014 Align::from_bytes(Size::from_bits(bits).bytes())
1015 }
1016
1017 #[inline]
1018 pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
1019 if align == 0 {
1021 return Ok(Align::ONE);
1022 }
1023
1024 #[cold]
1025 const fn not_power_of_2(align: u64) -> AlignFromBytesError {
1026 AlignFromBytesError::NotPowerOfTwo(align)
1027 }
1028
1029 #[cold]
1030 const fn too_large(align: u64) -> AlignFromBytesError {
1031 AlignFromBytesError::TooLarge(align)
1032 }
1033
1034 let tz = align.trailing_zeros();
1035 if align != (1 << tz) {
1036 return Err(not_power_of_2(align));
1037 }
1038
1039 let pow2 = tz as u8;
1040 if pow2 > Self::MAX.pow2 {
1041 return Err(too_large(align));
1042 }
1043
1044 Ok(Align { pow2 })
1045 }
1046
1047 #[inline]
1048 pub const fn bytes(self) -> u64 {
1049 1 << self.pow2
1050 }
1051
1052 #[inline]
1053 pub fn bytes_usize(self) -> usize {
1054 self.bytes().try_into().unwrap()
1055 }
1056
1057 #[inline]
1058 pub const fn bits(self) -> u64 {
1059 self.bytes() * 8
1060 }
1061
1062 #[inline]
1063 pub fn bits_usize(self) -> usize {
1064 self.bits().try_into().unwrap()
1065 }
1066
1067 #[inline]
1072 pub fn max_aligned_factor(size: Size) -> Align {
1073 Align { pow2: size.bytes().trailing_zeros() as u8 }
1074 }
1075
1076 #[inline]
1078 pub fn restrict_for_offset(self, size: Size) -> Align {
1079 self.min(Align::max_aligned_factor(size))
1080 }
1081}
1082
1083#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1093#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1094pub struct AbiAlign {
1095 pub abi: Align,
1096}
1097
1098impl AbiAlign {
1099 #[inline]
1100 pub fn new(align: Align) -> AbiAlign {
1101 AbiAlign { abi: align }
1102 }
1103
1104 #[inline]
1105 pub fn min(self, other: AbiAlign) -> AbiAlign {
1106 AbiAlign { abi: self.abi.min(other.abi) }
1107 }
1108
1109 #[inline]
1110 pub fn max(self, other: AbiAlign) -> AbiAlign {
1111 AbiAlign { abi: self.abi.max(other.abi) }
1112 }
1113}
1114
1115impl Deref for AbiAlign {
1116 type Target = Align;
1117
1118 fn deref(&self) -> &Self::Target {
1119 &self.abi
1120 }
1121}
1122
1123#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1125#[cfg_attr(
1126 feature = "nightly",
1127 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
1128)]
1129pub enum Integer {
1130 I8,
1131 I16,
1132 I32,
1133 I64,
1134 I128,
1135}
1136
1137impl Integer {
1138 pub fn int_ty_str(self) -> &'static str {
1139 use Integer::*;
1140 match self {
1141 I8 => "i8",
1142 I16 => "i16",
1143 I32 => "i32",
1144 I64 => "i64",
1145 I128 => "i128",
1146 }
1147 }
1148
1149 pub fn uint_ty_str(self) -> &'static str {
1150 use Integer::*;
1151 match self {
1152 I8 => "u8",
1153 I16 => "u16",
1154 I32 => "u32",
1155 I64 => "u64",
1156 I128 => "u128",
1157 }
1158 }
1159
1160 #[inline]
1161 pub fn size(self) -> Size {
1162 use Integer::*;
1163 match self {
1164 I8 => Size::from_bytes(1),
1165 I16 => Size::from_bytes(2),
1166 I32 => Size::from_bytes(4),
1167 I64 => Size::from_bytes(8),
1168 I128 => Size::from_bytes(16),
1169 }
1170 }
1171
1172 pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
1174 let dl = cx.data_layout();
1175
1176 match ity {
1177 IntegerType::Pointer(_) => dl.ptr_sized_integer(),
1178 IntegerType::Fixed(x, _) => x,
1179 }
1180 }
1181
1182 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1183 use Integer::*;
1184 let dl = cx.data_layout();
1185
1186 match self {
1187 I8 => dl.i8_align,
1188 I16 => dl.i16_align,
1189 I32 => dl.i32_align,
1190 I64 => dl.i64_align,
1191 I128 => dl.i128_align,
1192 }
1193 }
1194
1195 #[inline]
1197 pub fn signed_max(self) -> i128 {
1198 use Integer::*;
1199 match self {
1200 I8 => i8::MAX as i128,
1201 I16 => i16::MAX as i128,
1202 I32 => i32::MAX as i128,
1203 I64 => i64::MAX as i128,
1204 I128 => i128::MAX,
1205 }
1206 }
1207
1208 #[inline]
1210 pub fn fit_signed(x: i128) -> Integer {
1211 use Integer::*;
1212 match x {
1213 -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
1214 -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
1215 -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
1216 -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
1217 _ => I128,
1218 }
1219 }
1220
1221 #[inline]
1223 pub fn fit_unsigned(x: u128) -> Integer {
1224 use Integer::*;
1225 match x {
1226 0..=0x0000_0000_0000_00ff => I8,
1227 0..=0x0000_0000_0000_ffff => I16,
1228 0..=0x0000_0000_ffff_ffff => I32,
1229 0..=0xffff_ffff_ffff_ffff => I64,
1230 _ => I128,
1231 }
1232 }
1233
1234 pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1236 use Integer::*;
1237 let dl = cx.data_layout();
1238
1239 [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1240 wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1241 })
1242 }
1243
1244 pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1246 use Integer::*;
1247 let dl = cx.data_layout();
1248
1249 for candidate in [I64, I32, I16] {
1251 if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1252 return candidate;
1253 }
1254 }
1255 I8
1256 }
1257
1258 #[inline]
1261 pub fn from_size(size: Size) -> Result<Self, String> {
1262 match size.bits() {
1263 8 => Ok(Integer::I8),
1264 16 => Ok(Integer::I16),
1265 32 => Ok(Integer::I32),
1266 64 => Ok(Integer::I64),
1267 128 => Ok(Integer::I128),
1268 _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1269 }
1270 }
1271}
1272
1273#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1275#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1276pub enum Float {
1277 F16,
1278 F32,
1279 F64,
1280 F128,
1281}
1282
1283impl Float {
1284 pub fn size(self) -> Size {
1285 use Float::*;
1286
1287 match self {
1288 F16 => Size::from_bits(16),
1289 F32 => Size::from_bits(32),
1290 F64 => Size::from_bits(64),
1291 F128 => Size::from_bits(128),
1292 }
1293 }
1294
1295 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1296 use Float::*;
1297 let dl = cx.data_layout();
1298
1299 match self {
1300 F16 => dl.f16_align,
1301 F32 => dl.f32_align,
1302 F64 => dl.f64_align,
1303 F128 => dl.f128_align,
1304 }
1305 }
1306}
1307
1308#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1310#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1311pub enum Primitive {
1312 Int(Integer, bool),
1320 Float(Float),
1321 Pointer(AddressSpace),
1322}
1323
1324impl Primitive {
1325 pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1326 use Primitive::*;
1327 let dl = cx.data_layout();
1328
1329 match self {
1330 Int(i, _) => i.size(),
1331 Float(f) => f.size(),
1332 Pointer(a) => dl.pointer_size_in(a),
1333 }
1334 }
1335
1336 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1337 use Primitive::*;
1338 let dl = cx.data_layout();
1339
1340 match self {
1341 Int(i, _) => i.align(dl),
1342 Float(f) => f.align(dl),
1343 Pointer(a) => dl.pointer_align_in(a),
1344 }
1345 }
1346}
1347
1348#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1358#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1359pub struct WrappingRange {
1360 pub start: u128,
1361 pub end: u128,
1362}
1363
1364impl WrappingRange {
1365 pub fn full(size: Size) -> Self {
1366 Self { start: 0, end: size.unsigned_int_max() }
1367 }
1368
1369 #[inline(always)]
1371 pub fn contains(&self, v: u128) -> bool {
1372 if self.start <= self.end {
1373 self.start <= v && v <= self.end
1374 } else {
1375 self.start <= v || v <= self.end
1376 }
1377 }
1378
1379 #[inline(always)]
1381 fn with_start(mut self, start: u128) -> Self {
1382 self.start = start;
1383 self
1384 }
1385
1386 #[inline(always)]
1388 fn with_end(mut self, end: u128) -> Self {
1389 self.end = end;
1390 self
1391 }
1392
1393 #[inline]
1395 fn is_full_for(&self, size: Size) -> bool {
1396 let max_value = size.unsigned_int_max();
1397 debug_assert!(self.start <= max_value && self.end <= max_value);
1398 self.start == (self.end.wrapping_add(1) & max_value)
1399 }
1400}
1401
1402impl fmt::Debug for WrappingRange {
1403 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1404 if self.start > self.end {
1405 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1406 } else {
1407 write!(fmt, "{}..={}", self.start, self.end)?;
1408 }
1409 Ok(())
1410 }
1411}
1412
1413#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1415#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1416pub enum Scalar {
1417 Initialized {
1418 value: Primitive,
1419
1420 valid_range: WrappingRange,
1424 },
1425 Union {
1426 value: Primitive,
1432 },
1433}
1434
1435impl Scalar {
1436 #[inline]
1437 pub fn is_bool(&self) -> bool {
1438 use Integer::*;
1439 matches!(
1440 self,
1441 Scalar::Initialized {
1442 value: Primitive::Int(I8, false),
1443 valid_range: WrappingRange { start: 0, end: 1 }
1444 }
1445 )
1446 }
1447
1448 pub fn primitive(&self) -> Primitive {
1451 match *self {
1452 Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1453 }
1454 }
1455
1456 pub fn align(self, cx: &impl HasDataLayout) -> AbiAlign {
1457 self.primitive().align(cx)
1458 }
1459
1460 pub fn size(self, cx: &impl HasDataLayout) -> Size {
1461 self.primitive().size(cx)
1462 }
1463
1464 #[inline]
1465 pub fn to_union(&self) -> Self {
1466 Self::Union { value: self.primitive() }
1467 }
1468
1469 #[inline]
1470 pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1471 match *self {
1472 Scalar::Initialized { valid_range, .. } => valid_range,
1473 Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1474 }
1475 }
1476
1477 #[inline]
1478 pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1481 match self {
1482 Scalar::Initialized { valid_range, .. } => valid_range,
1483 Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1484 }
1485 }
1486
1487 #[inline]
1490 pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1491 match *self {
1492 Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1493 Scalar::Union { .. } => true,
1494 }
1495 }
1496
1497 #[inline]
1499 pub fn is_uninit_valid(&self) -> bool {
1500 match *self {
1501 Scalar::Initialized { .. } => false,
1502 Scalar::Union { .. } => true,
1503 }
1504 }
1505
1506 #[inline]
1508 pub fn is_signed(&self) -> bool {
1509 match self.primitive() {
1510 Primitive::Int(_, signed) => signed,
1511 _ => false,
1512 }
1513 }
1514}
1515
1516#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1519#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1520pub enum FieldsShape<FieldIdx: Idx> {
1521 Primitive,
1523
1524 Union(NonZeroUsize),
1526
1527 Array { stride: Size, count: u64 },
1529
1530 Arbitrary {
1538 offsets: IndexVec<FieldIdx, Size>,
1543
1544 memory_index: IndexVec<FieldIdx, u32>,
1557 },
1558}
1559
1560impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1561 #[inline]
1562 pub fn count(&self) -> usize {
1563 match *self {
1564 FieldsShape::Primitive => 0,
1565 FieldsShape::Union(count) => count.get(),
1566 FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1567 FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1568 }
1569 }
1570
1571 #[inline]
1572 pub fn offset(&self, i: usize) -> Size {
1573 match *self {
1574 FieldsShape::Primitive => {
1575 unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1576 }
1577 FieldsShape::Union(count) => {
1578 assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1579 Size::ZERO
1580 }
1581 FieldsShape::Array { stride, count } => {
1582 let i = u64::try_from(i).unwrap();
1583 assert!(i < count, "tried to access field {i} of array with {count} fields");
1584 stride * i
1585 }
1586 FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1587 }
1588 }
1589
1590 #[inline]
1591 pub fn memory_index(&self, i: usize) -> usize {
1592 match *self {
1593 FieldsShape::Primitive => {
1594 unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1595 }
1596 FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1597 FieldsShape::Arbitrary { ref memory_index, .. } => {
1598 memory_index[FieldIdx::new(i)].try_into().unwrap()
1599 }
1600 }
1601 }
1602
1603 #[inline]
1605 pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1606 let mut inverse_small = [0u8; 64];
1607 let mut inverse_big = IndexVec::new();
1608 let use_small = self.count() <= inverse_small.len();
1609
1610 if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1612 if use_small {
1613 for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1614 inverse_small[mem_idx as usize] = field_idx.index() as u8;
1615 }
1616 } else {
1617 inverse_big = memory_index.invert_bijective_mapping();
1618 }
1619 }
1620
1621 let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1625
1626 (0..pseudofield_count).map(move |i| match *self {
1627 FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1628 FieldsShape::Arbitrary { .. } => {
1629 if use_small {
1630 inverse_small[i] as usize
1631 } else {
1632 inverse_big[i as u32].index()
1633 }
1634 }
1635 })
1636 }
1637}
1638
1639#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1643#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1644pub struct AddressSpace(pub u32);
1645
1646impl AddressSpace {
1647 pub const ZERO: Self = AddressSpace(0);
1649}
1650
1651#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1662#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1663pub enum BackendRepr {
1664 Scalar(Scalar),
1665 ScalarPair(Scalar, Scalar),
1666 SimdVector {
1667 element: Scalar,
1668 count: u64,
1669 },
1670 Memory {
1672 sized: bool,
1674 },
1675}
1676
1677impl BackendRepr {
1678 #[inline]
1680 pub fn is_unsized(&self) -> bool {
1681 match *self {
1682 BackendRepr::Scalar(_)
1683 | BackendRepr::ScalarPair(..)
1684 | BackendRepr::SimdVector { .. } => false,
1685 BackendRepr::Memory { sized } => !sized,
1686 }
1687 }
1688
1689 #[inline]
1690 pub fn is_sized(&self) -> bool {
1691 !self.is_unsized()
1692 }
1693
1694 #[inline]
1697 pub fn is_signed(&self) -> bool {
1698 match self {
1699 BackendRepr::Scalar(scal) => scal.is_signed(),
1700 _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1701 }
1702 }
1703
1704 #[inline]
1706 pub fn is_scalar(&self) -> bool {
1707 matches!(*self, BackendRepr::Scalar(_))
1708 }
1709
1710 #[inline]
1712 pub fn is_bool(&self) -> bool {
1713 matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1714 }
1715
1716 pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1720 match *self {
1721 BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1722 BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1723 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1725 }
1726 }
1727
1728 pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1732 match *self {
1733 BackendRepr::Scalar(s) => Some(s.size(cx)),
1735 BackendRepr::ScalarPair(s1, s2) => {
1737 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1738 let size = (field2_offset + s2.size(cx)).align_to(
1739 self.scalar_align(cx)
1740 .unwrap(),
1742 );
1743 Some(size)
1744 }
1745 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1747 }
1748 }
1749
1750 pub fn to_union(&self) -> Self {
1752 match *self {
1753 BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1754 BackendRepr::ScalarPair(s1, s2) => {
1755 BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1756 }
1757 BackendRepr::SimdVector { element, count } => {
1758 BackendRepr::SimdVector { element: element.to_union(), count }
1759 }
1760 BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1761 }
1762 }
1763
1764 pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1765 match (self, other) {
1766 (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1769 (
1770 BackendRepr::SimdVector { element: element_l, count: count_l },
1771 BackendRepr::SimdVector { element: element_r, count: count_r },
1772 ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1773 (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1774 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1775 }
1776 _ => self == other,
1778 }
1779 }
1780}
1781
1782#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1784#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1785pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1786 Empty,
1788
1789 Single {
1791 index: VariantIdx,
1793 },
1794
1795 Multiple {
1802 tag: Scalar,
1803 tag_encoding: TagEncoding<VariantIdx>,
1804 tag_field: FieldIdx,
1805 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1806 },
1807}
1808
1809#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1811#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1812pub enum TagEncoding<VariantIdx: Idx> {
1813 Direct,
1816
1817 Niche {
1841 untagged_variant: VariantIdx,
1842 niche_variants: RangeInclusive<VariantIdx>,
1845 niche_start: u128,
1848 },
1849}
1850
1851#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1852#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1853pub struct Niche {
1854 pub offset: Size,
1855 pub value: Primitive,
1856 pub valid_range: WrappingRange,
1857}
1858
1859impl Niche {
1860 pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1861 let Scalar::Initialized { value, valid_range } = scalar else { return None };
1862 let niche = Niche { offset, value, valid_range };
1863 if niche.available(cx) > 0 { Some(niche) } else { None }
1864 }
1865
1866 pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1867 let Self { value, valid_range: v, .. } = *self;
1868 let size = value.size(cx);
1869 assert!(size.bits() <= 128);
1870 let max_value = size.unsigned_int_max();
1871
1872 let niche = v.end.wrapping_add(1)..v.start;
1874 niche.end.wrapping_sub(niche.start) & max_value
1875 }
1876
1877 pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1878 assert!(count > 0);
1879
1880 let Self { value, valid_range: v, .. } = *self;
1881 let size = value.size(cx);
1882 assert!(size.bits() <= 128);
1883 let max_value = size.unsigned_int_max();
1884
1885 let niche = v.end.wrapping_add(1)..v.start;
1886 let available = niche.end.wrapping_sub(niche.start) & max_value;
1887 if count > available {
1888 return None;
1889 }
1890
1891 let move_start = |v: WrappingRange| {
1905 let start = v.start.wrapping_sub(count) & max_value;
1906 Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1907 };
1908 let move_end = |v: WrappingRange| {
1909 let start = v.end.wrapping_add(1) & max_value;
1910 let end = v.end.wrapping_add(count) & max_value;
1911 Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1912 };
1913 let distance_end_zero = max_value - v.end;
1914 if v.start > v.end {
1915 move_end(v)
1917 } else if v.start <= distance_end_zero {
1918 if count <= v.start {
1919 move_start(v)
1920 } else {
1921 move_end(v)
1923 }
1924 } else {
1925 let end = v.end.wrapping_add(count) & max_value;
1926 let overshot_zero = (1..=v.end).contains(&end);
1927 if overshot_zero {
1928 move_start(v)
1930 } else {
1931 move_end(v)
1932 }
1933 }
1934 }
1935}
1936
1937#[derive(PartialEq, Eq, Hash, Clone)]
1939#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1940pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
1941 pub fields: FieldsShape<FieldIdx>,
1943
1944 pub variants: Variants<FieldIdx, VariantIdx>,
1952
1953 pub backend_repr: BackendRepr,
1961
1962 pub largest_niche: Option<Niche>,
1965 pub uninhabited: bool,
1970
1971 pub align: AbiAlign,
1972 pub size: Size,
1973
1974 pub max_repr_align: Option<Align>,
1978
1979 pub unadjusted_abi_align: Align,
1983
1984 pub randomization_seed: Hash64,
1995}
1996
1997impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
1998 pub fn is_aggregate(&self) -> bool {
2000 match self.backend_repr {
2001 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => false,
2002 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
2003 }
2004 }
2005
2006 pub fn is_uninhabited(&self) -> bool {
2008 self.uninhabited
2009 }
2010}
2011
2012impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
2013where
2014 FieldsShape<FieldIdx>: fmt::Debug,
2015 Variants<FieldIdx, VariantIdx>: fmt::Debug,
2016{
2017 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2018 let LayoutData {
2022 size,
2023 align,
2024 backend_repr,
2025 fields,
2026 largest_niche,
2027 uninhabited,
2028 variants,
2029 max_repr_align,
2030 unadjusted_abi_align,
2031 randomization_seed,
2032 } = self;
2033 f.debug_struct("Layout")
2034 .field("size", size)
2035 .field("align", align)
2036 .field("backend_repr", backend_repr)
2037 .field("fields", fields)
2038 .field("largest_niche", largest_niche)
2039 .field("uninhabited", uninhabited)
2040 .field("variants", variants)
2041 .field("max_repr_align", max_repr_align)
2042 .field("unadjusted_abi_align", unadjusted_abi_align)
2043 .field("randomization_seed", randomization_seed)
2044 .finish()
2045 }
2046}
2047
2048#[derive(Copy, Clone, PartialEq, Eq, Debug)]
2049pub enum PointerKind {
2050 SharedRef { frozen: bool },
2052 MutableRef { unpin: bool },
2054 Box { unpin: bool, global: bool },
2057}
2058
2059#[derive(Copy, Clone, Debug)]
2064pub struct PointeeInfo {
2065 pub safe: Option<PointerKind>,
2068 pub size: Size,
2074 pub align: Align,
2076}
2077
2078impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2079 #[inline]
2081 pub fn is_unsized(&self) -> bool {
2082 self.backend_repr.is_unsized()
2083 }
2084
2085 #[inline]
2086 pub fn is_sized(&self) -> bool {
2087 self.backend_repr.is_sized()
2088 }
2089
2090 pub fn is_1zst(&self) -> bool {
2092 self.is_sized() && self.size.bytes() == 0 && self.align.abi.bytes() == 1
2093 }
2094
2095 pub fn is_zst(&self) -> bool {
2100 match self.backend_repr {
2101 BackendRepr::Scalar(_)
2102 | BackendRepr::ScalarPair(..)
2103 | BackendRepr::SimdVector { .. } => false,
2104 BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
2105 }
2106 }
2107
2108 pub fn eq_abi(&self, other: &Self) -> bool {
2114 self.size == other.size
2118 && self.is_sized() == other.is_sized()
2119 && self.backend_repr.eq_up_to_validity(&other.backend_repr)
2120 && self.backend_repr.is_bool() == other.backend_repr.is_bool()
2121 && self.align.abi == other.align.abi
2122 && self.max_repr_align == other.max_repr_align
2123 && self.unadjusted_abi_align == other.unadjusted_abi_align
2124 }
2125}
2126
2127#[derive(Copy, Clone, Debug)]
2128pub enum StructKind {
2129 AlwaysSized,
2131 MaybeUnsized,
2133 Prefixed(Size, Align),
2135}
2136
2137#[derive(Clone, Debug)]
2138pub enum AbiFromStrErr {
2139 Unknown,
2141 NoExplicitUnwind,
2143}