1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", doc(rust_logo))]
4#![cfg_attr(feature = "nightly", feature(assert_matches))]
5#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
6#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
7#![cfg_attr(feature = "nightly", feature(step_trait))]
8use std::fmt;
43#[cfg(feature = "nightly")]
44use std::iter::Step;
45use std::num::{NonZeroUsize, ParseIntError};
46use std::ops::{Add, AddAssign, Deref, Mul, RangeFull, RangeInclusive, Sub};
47use std::str::FromStr;
48
49use bitflags::bitflags;
50#[cfg(feature = "nightly")]
51use rustc_data_structures::stable_hasher::StableOrd;
52use rustc_hashes::Hash64;
53use rustc_index::{Idx, IndexSlice, IndexVec};
54#[cfg(feature = "nightly")]
55use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
56
57mod callconv;
58mod canon_abi;
59mod extern_abi;
60mod layout;
61#[cfg(test)]
62mod tests;
63
64pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
65pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
66pub use extern_abi::{ExternAbi, all_names};
67#[cfg(feature = "nightly")]
68pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
69pub use layout::{LayoutCalculator, LayoutCalculatorError};
70
71#[cfg(feature = "nightly")]
75pub trait HashStableContext {}
76
77#[derive(Clone, Copy, PartialEq, Eq, Default)]
78#[cfg_attr(
79    feature = "nightly",
80    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
81)]
82pub struct ReprFlags(u8);
83
84bitflags! {
85    impl ReprFlags: u8 {
86        const IS_C               = 1 << 0;
87        const IS_SIMD            = 1 << 1;
88        const IS_TRANSPARENT     = 1 << 2;
89        const IS_LINEAR          = 1 << 3;
92        const RANDOMIZE_LAYOUT   = 1 << 4;
96        const FIELD_ORDER_UNOPTIMIZABLE   = ReprFlags::IS_C.bits()
98                                 | ReprFlags::IS_SIMD.bits()
99                                 | ReprFlags::IS_LINEAR.bits();
100        const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
101    }
102}
103
104impl std::fmt::Debug for ReprFlags {
107    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
108        bitflags::parser::to_writer(self, f)
109    }
110}
111
112#[derive(Copy, Clone, Debug, Eq, PartialEq)]
113#[cfg_attr(
114    feature = "nightly",
115    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
116)]
117pub enum IntegerType {
118    Pointer(bool),
121    Fixed(Integer, bool),
124}
125
126impl IntegerType {
127    pub fn is_signed(&self) -> bool {
128        match self {
129            IntegerType::Pointer(b) => *b,
130            IntegerType::Fixed(_, b) => *b,
131        }
132    }
133}
134
135#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
137#[cfg_attr(
138    feature = "nightly",
139    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
140)]
141pub struct ReprOptions {
142    pub int: Option<IntegerType>,
143    pub align: Option<Align>,
144    pub pack: Option<Align>,
145    pub flags: ReprFlags,
146    pub field_shuffle_seed: Hash64,
154}
155
156impl ReprOptions {
157    #[inline]
158    pub fn simd(&self) -> bool {
159        self.flags.contains(ReprFlags::IS_SIMD)
160    }
161
162    #[inline]
163    pub fn c(&self) -> bool {
164        self.flags.contains(ReprFlags::IS_C)
165    }
166
167    #[inline]
168    pub fn packed(&self) -> bool {
169        self.pack.is_some()
170    }
171
172    #[inline]
173    pub fn transparent(&self) -> bool {
174        self.flags.contains(ReprFlags::IS_TRANSPARENT)
175    }
176
177    #[inline]
178    pub fn linear(&self) -> bool {
179        self.flags.contains(ReprFlags::IS_LINEAR)
180    }
181
182    pub fn discr_type(&self) -> IntegerType {
185        self.int.unwrap_or(IntegerType::Pointer(true))
186    }
187
188    pub fn inhibit_enum_layout_opt(&self) -> bool {
192        self.c() || self.int.is_some()
193    }
194
195    pub fn inhibit_newtype_abi_optimization(&self) -> bool {
196        self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
197    }
198
199    pub fn inhibit_struct_field_reordering(&self) -> bool {
202        self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
203    }
204
205    pub fn can_randomize_type_layout(&self) -> bool {
208        !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
209    }
210
211    pub fn inhibits_union_abi_opt(&self) -> bool {
213        self.c()
214    }
215}
216
217pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
223
224#[derive(Copy, Clone, Debug, PartialEq, Eq)]
226pub struct PointerSpec {
227    pointer_size: Size,
229    pointer_align: AbiAlign,
231    pointer_offset: Size,
233    _is_fat: bool,
236}
237
238#[derive(Debug, PartialEq, Eq)]
241pub struct TargetDataLayout {
242    pub endian: Endian,
243    pub i1_align: AbiAlign,
244    pub i8_align: AbiAlign,
245    pub i16_align: AbiAlign,
246    pub i32_align: AbiAlign,
247    pub i64_align: AbiAlign,
248    pub i128_align: AbiAlign,
249    pub f16_align: AbiAlign,
250    pub f32_align: AbiAlign,
251    pub f64_align: AbiAlign,
252    pub f128_align: AbiAlign,
253    pub aggregate_align: AbiAlign,
254
255    pub vector_align: Vec<(Size, AbiAlign)>,
257
258    pub default_address_space: AddressSpace,
259    pub default_address_space_pointer_spec: PointerSpec,
260
261    address_space_info: Vec<(AddressSpace, PointerSpec)>,
268
269    pub instruction_address_space: AddressSpace,
270
271    pub c_enum_min_size: Integer,
275}
276
277impl Default for TargetDataLayout {
278    fn default() -> TargetDataLayout {
280        let align = |bits| Align::from_bits(bits).unwrap();
281        TargetDataLayout {
282            endian: Endian::Big,
283            i1_align: AbiAlign::new(align(8)),
284            i8_align: AbiAlign::new(align(8)),
285            i16_align: AbiAlign::new(align(16)),
286            i32_align: AbiAlign::new(align(32)),
287            i64_align: AbiAlign::new(align(32)),
288            i128_align: AbiAlign::new(align(32)),
289            f16_align: AbiAlign::new(align(16)),
290            f32_align: AbiAlign::new(align(32)),
291            f64_align: AbiAlign::new(align(64)),
292            f128_align: AbiAlign::new(align(128)),
293            aggregate_align: AbiAlign { abi: align(8) },
294            vector_align: vec![
295                (Size::from_bits(64), AbiAlign::new(align(64))),
296                (Size::from_bits(128), AbiAlign::new(align(128))),
297            ],
298            default_address_space: AddressSpace::ZERO,
299            default_address_space_pointer_spec: PointerSpec {
300                pointer_size: Size::from_bits(64),
301                pointer_align: AbiAlign::new(align(64)),
302                pointer_offset: Size::from_bits(64),
303                _is_fat: false,
304            },
305            address_space_info: vec![],
306            instruction_address_space: AddressSpace::ZERO,
307            c_enum_min_size: Integer::I32,
308        }
309    }
310}
311
312pub enum TargetDataLayoutErrors<'a> {
313    InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
314    InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
315    MissingAlignment { cause: &'a str },
316    InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
317    InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
318    InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
319    InvalidBitsSize { err: String },
320    UnknownPointerSpecification { err: String },
321}
322
323impl TargetDataLayout {
324    pub fn parse_from_llvm_datalayout_string<'a>(
330        input: &'a str,
331        default_address_space: AddressSpace,
332    ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
333        let parse_address_space = |s: &'a str, cause: &'a str| {
335            s.parse::<u32>().map(AddressSpace).map_err(|err| {
336                TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
337            })
338        };
339
340        let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
342            s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
343                kind,
344                bit: s,
345                cause,
346                err,
347            })
348        };
349
350        let parse_size =
352            |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
353
354        let parse_align_str = |s: &'a str, cause: &'a str| {
356            let align_from_bits = |bits| {
357                Align::from_bits(bits)
358                    .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
359            };
360            let abi = parse_bits(s, "alignment", cause)?;
361            Ok(AbiAlign::new(align_from_bits(abi)?))
362        };
363
364        let parse_align_seq = |s: &[&'a str], cause: &'a str| {
367            if s.is_empty() {
368                return Err(TargetDataLayoutErrors::MissingAlignment { cause });
369            }
370            parse_align_str(s[0], cause)
371        };
372
373        let mut dl = TargetDataLayout::default();
374        dl.default_address_space = default_address_space;
375
376        let mut i128_align_src = 64;
377        for spec in input.split('-') {
378            let spec_parts = spec.split(':').collect::<Vec<_>>();
379
380            match &*spec_parts {
381                ["e"] => dl.endian = Endian::Little,
382                ["E"] => dl.endian = Endian::Big,
383                [p] if p.starts_with('P') => {
384                    dl.instruction_address_space = parse_address_space(&p[1..], "P")?
385                }
386                ["a", a @ ..] => dl.aggregate_align = parse_align_seq(a, "a")?,
387                ["f16", a @ ..] => dl.f16_align = parse_align_seq(a, "f16")?,
388                ["f32", a @ ..] => dl.f32_align = parse_align_seq(a, "f32")?,
389                ["f64", a @ ..] => dl.f64_align = parse_align_seq(a, "f64")?,
390                ["f128", a @ ..] => dl.f128_align = parse_align_seq(a, "f128")?,
391                [p, s, a @ ..] if p.starts_with("p") => {
392                    let mut p = p.strip_prefix('p').unwrap();
393                    let mut _is_fat = false;
394
395                    if p.starts_with('f') {
399                        p = p.strip_prefix('f').unwrap();
400                        _is_fat = true;
401                    }
402
403                    if p.starts_with(char::is_alphabetic) {
406                        return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
407                            err: p.to_string(),
408                        });
409                    }
410
411                    let addr_space = if !p.is_empty() {
412                        parse_address_space(p, "p-")?
413                    } else {
414                        AddressSpace::ZERO
415                    };
416
417                    let pointer_size = parse_size(s, "p-")?;
418                    let pointer_align = parse_align_seq(a, "p-")?;
419                    let info = PointerSpec {
420                        pointer_offset: pointer_size,
421                        pointer_size,
422                        pointer_align,
423                        _is_fat,
424                    };
425                    if addr_space == default_address_space {
426                        dl.default_address_space_pointer_spec = info;
427                    } else {
428                        match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
429                            Some(e) => e.1 = info,
430                            None => {
431                                dl.address_space_info.push((addr_space, info));
432                            }
433                        }
434                    }
435                }
436                [p, s, a, _pr, i] if p.starts_with("p") => {
437                    let mut p = p.strip_prefix('p').unwrap();
438                    let mut _is_fat = false;
439
440                    if p.starts_with('f') {
444                        p = p.strip_prefix('f').unwrap();
445                        _is_fat = true;
446                    }
447
448                    if p.starts_with(char::is_alphabetic) {
451                        return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
452                            err: p.to_string(),
453                        });
454                    }
455
456                    let addr_space = if !p.is_empty() {
457                        parse_address_space(p, "p")?
458                    } else {
459                        AddressSpace::ZERO
460                    };
461
462                    let info = PointerSpec {
463                        pointer_size: parse_size(s, "p-")?,
464                        pointer_align: parse_align_str(a, "p-")?,
465                        pointer_offset: parse_size(i, "p-")?,
466                        _is_fat,
467                    };
468
469                    if addr_space == default_address_space {
470                        dl.default_address_space_pointer_spec = info;
471                    } else {
472                        match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
473                            Some(e) => e.1 = info,
474                            None => {
475                                dl.address_space_info.push((addr_space, info));
476                            }
477                        }
478                    }
479                }
480
481                [s, a @ ..] if s.starts_with('i') => {
482                    let Ok(bits) = s[1..].parse::<u64>() else {
483                        parse_size(&s[1..], "i")?; continue;
485                    };
486                    let a = parse_align_seq(a, s)?;
487                    match bits {
488                        1 => dl.i1_align = a,
489                        8 => dl.i8_align = a,
490                        16 => dl.i16_align = a,
491                        32 => dl.i32_align = a,
492                        64 => dl.i64_align = a,
493                        _ => {}
494                    }
495                    if bits >= i128_align_src && bits <= 128 {
496                        i128_align_src = bits;
499                        dl.i128_align = a;
500                    }
501                }
502                [s, a @ ..] if s.starts_with('v') => {
503                    let v_size = parse_size(&s[1..], "v")?;
504                    let a = parse_align_seq(a, s)?;
505                    if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
506                        v.1 = a;
507                        continue;
508                    }
509                    dl.vector_align.push((v_size, a));
511                }
512                _ => {} }
514        }
515
516        if (dl.instruction_address_space != dl.default_address_space)
519            && dl
520                .address_space_info
521                .iter()
522                .find(|(a, _)| *a == dl.instruction_address_space)
523                .is_none()
524        {
525            dl.address_space_info.push((
526                dl.instruction_address_space,
527                dl.default_address_space_pointer_spec.clone(),
528            ));
529        }
530
531        Ok(dl)
532    }
533
534    #[inline]
545    pub fn obj_size_bound(&self) -> u64 {
546        match self.pointer_size().bits() {
547            16 => 1 << 15,
548            32 => 1 << 31,
549            64 => 1 << 61,
550            bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
551        }
552    }
553
554    #[inline]
564    pub fn obj_size_bound_in(&self, address_space: AddressSpace) -> u64 {
565        match self.pointer_size_in(address_space).bits() {
566            16 => 1 << 15,
567            32 => 1 << 31,
568            64 => 1 << 61,
569            bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
570        }
571    }
572
573    #[inline]
574    pub fn ptr_sized_integer(&self) -> Integer {
575        use Integer::*;
576        match self.pointer_offset().bits() {
577            16 => I16,
578            32 => I32,
579            64 => I64,
580            bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
581        }
582    }
583
584    #[inline]
585    pub fn ptr_sized_integer_in(&self, address_space: AddressSpace) -> Integer {
586        use Integer::*;
587        match self.pointer_offset_in(address_space).bits() {
588            16 => I16,
589            32 => I32,
590            64 => I64,
591            bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
592        }
593    }
594
595    #[inline]
597    fn cabi_vector_align(&self, vec_size: Size) -> Option<AbiAlign> {
598        self.vector_align
599            .iter()
600            .find(|(size, _align)| *size == vec_size)
601            .map(|(_size, align)| *align)
602    }
603
604    #[inline]
606    pub fn llvmlike_vector_align(&self, vec_size: Size) -> AbiAlign {
607        self.cabi_vector_align(vec_size).unwrap_or(AbiAlign::new(
608            Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap(),
609        ))
610    }
611
612    #[inline]
614    pub fn pointer_size(&self) -> Size {
615        self.default_address_space_pointer_spec.pointer_size
616    }
617
618    #[inline]
620    pub fn pointer_size_in(&self, c: AddressSpace) -> Size {
621        if c == self.default_address_space {
622            return self.default_address_space_pointer_spec.pointer_size;
623        }
624
625        if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
626            e.1.pointer_size
627        } else {
628            panic!("Use of unknown address space {c:?}");
629        }
630    }
631
632    #[inline]
634    pub fn pointer_offset(&self) -> Size {
635        self.default_address_space_pointer_spec.pointer_offset
636    }
637
638    #[inline]
640    pub fn pointer_offset_in(&self, c: AddressSpace) -> Size {
641        if c == self.default_address_space {
642            return self.default_address_space_pointer_spec.pointer_offset;
643        }
644
645        if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
646            e.1.pointer_offset
647        } else {
648            panic!("Use of unknown address space {c:?}");
649        }
650    }
651
652    #[inline]
654    pub fn pointer_align(&self) -> AbiAlign {
655        self.default_address_space_pointer_spec.pointer_align
656    }
657
658    #[inline]
660    pub fn pointer_align_in(&self, c: AddressSpace) -> AbiAlign {
661        if c == self.default_address_space {
662            return self.default_address_space_pointer_spec.pointer_align;
663        }
664
665        if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
666            e.1.pointer_align
667        } else {
668            panic!("Use of unknown address space {c:?}");
669        }
670    }
671}
672
673pub trait HasDataLayout {
674    fn data_layout(&self) -> &TargetDataLayout;
675}
676
677impl HasDataLayout for TargetDataLayout {
678    #[inline]
679    fn data_layout(&self) -> &TargetDataLayout {
680        self
681    }
682}
683
684impl HasDataLayout for &TargetDataLayout {
686    #[inline]
687    fn data_layout(&self) -> &TargetDataLayout {
688        (**self).data_layout()
689    }
690}
691
692#[derive(Copy, Clone, PartialEq, Eq)]
694pub enum Endian {
695    Little,
696    Big,
697}
698
699impl Endian {
700    pub fn as_str(&self) -> &'static str {
701        match self {
702            Self::Little => "little",
703            Self::Big => "big",
704        }
705    }
706}
707
708impl fmt::Debug for Endian {
709    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
710        f.write_str(self.as_str())
711    }
712}
713
714impl FromStr for Endian {
715    type Err = String;
716
717    fn from_str(s: &str) -> Result<Self, Self::Err> {
718        match s {
719            "little" => Ok(Self::Little),
720            "big" => Ok(Self::Big),
721            _ => Err(format!(r#"unknown endian: "{s}""#)),
722        }
723    }
724}
725
726#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
728#[cfg_attr(
729    feature = "nightly",
730    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
731)]
732pub struct Size {
733    raw: u64,
734}
735
736#[cfg(feature = "nightly")]
737impl StableOrd for Size {
738    const CAN_USE_UNSTABLE_SORT: bool = true;
739
740    const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
743}
744
745impl fmt::Debug for Size {
747    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
748        write!(f, "Size({} bytes)", self.bytes())
749    }
750}
751
752impl Size {
753    pub const ZERO: Size = Size { raw: 0 };
754
755    pub fn from_bits(bits: impl TryInto<u64>) -> Size {
758        let bits = bits.try_into().ok().unwrap();
759        Size { raw: bits.div_ceil(8) }
760    }
761
762    #[inline]
763    pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
764        let bytes: u64 = bytes.try_into().ok().unwrap();
765        Size { raw: bytes }
766    }
767
768    #[inline]
769    pub fn bytes(self) -> u64 {
770        self.raw
771    }
772
773    #[inline]
774    pub fn bytes_usize(self) -> usize {
775        self.bytes().try_into().unwrap()
776    }
777
778    #[inline]
779    pub fn bits(self) -> u64 {
780        #[cold]
781        fn overflow(bytes: u64) -> ! {
782            panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
783        }
784
785        self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
786    }
787
788    #[inline]
789    pub fn bits_usize(self) -> usize {
790        self.bits().try_into().unwrap()
791    }
792
793    #[inline]
794    pub fn align_to(self, align: Align) -> Size {
795        let mask = align.bytes() - 1;
796        Size::from_bytes((self.bytes() + mask) & !mask)
797    }
798
799    #[inline]
800    pub fn is_aligned(self, align: Align) -> bool {
801        let mask = align.bytes() - 1;
802        self.bytes() & mask == 0
803    }
804
805    #[inline]
806    pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
807        let dl = cx.data_layout();
808
809        let bytes = self.bytes().checked_add(offset.bytes())?;
810
811        if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
812    }
813
814    #[inline]
815    pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
816        let dl = cx.data_layout();
817
818        let bytes = self.bytes().checked_mul(count)?;
819        if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
820    }
821
822    #[inline]
825    pub fn sign_extend(self, value: u128) -> i128 {
826        let size = self.bits();
827        if size == 0 {
828            return 0;
830        }
831        let shift = 128 - size;
833        ((value << shift) as i128) >> shift
836    }
837
838    #[inline]
840    pub fn truncate(self, value: u128) -> u128 {
841        let size = self.bits();
842        if size == 0 {
843            return 0;
845        }
846        let shift = 128 - size;
847        (value << shift) >> shift
849    }
850
851    #[inline]
852    pub fn signed_int_min(&self) -> i128 {
853        self.sign_extend(1_u128 << (self.bits() - 1))
854    }
855
856    #[inline]
857    pub fn signed_int_max(&self) -> i128 {
858        i128::MAX >> (128 - self.bits())
859    }
860
861    #[inline]
862    pub fn unsigned_int_max(&self) -> u128 {
863        u128::MAX >> (128 - self.bits())
864    }
865}
866
867impl Add for Size {
871    type Output = Size;
872    #[inline]
873    fn add(self, other: Size) -> Size {
874        Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
875            panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
876        }))
877    }
878}
879
880impl Sub for Size {
881    type Output = Size;
882    #[inline]
883    fn sub(self, other: Size) -> Size {
884        Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
885            panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
886        }))
887    }
888}
889
890impl Mul<Size> for u64 {
891    type Output = Size;
892    #[inline]
893    fn mul(self, size: Size) -> Size {
894        size * self
895    }
896}
897
898impl Mul<u64> for Size {
899    type Output = Size;
900    #[inline]
901    fn mul(self, count: u64) -> Size {
902        match self.bytes().checked_mul(count) {
903            Some(bytes) => Size::from_bytes(bytes),
904            None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
905        }
906    }
907}
908
909impl AddAssign for Size {
910    #[inline]
911    fn add_assign(&mut self, other: Size) {
912        *self = *self + other;
913    }
914}
915
916#[cfg(feature = "nightly")]
917impl Step for Size {
918    #[inline]
919    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
920        u64::steps_between(&start.bytes(), &end.bytes())
921    }
922
923    #[inline]
924    fn forward_checked(start: Self, count: usize) -> Option<Self> {
925        u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
926    }
927
928    #[inline]
929    fn forward(start: Self, count: usize) -> Self {
930        Self::from_bytes(u64::forward(start.bytes(), count))
931    }
932
933    #[inline]
934    unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
935        Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
936    }
937
938    #[inline]
939    fn backward_checked(start: Self, count: usize) -> Option<Self> {
940        u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
941    }
942
943    #[inline]
944    fn backward(start: Self, count: usize) -> Self {
945        Self::from_bytes(u64::backward(start.bytes(), count))
946    }
947
948    #[inline]
949    unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
950        Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
951    }
952}
953
954#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
956#[cfg_attr(
957    feature = "nightly",
958    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
959)]
960pub struct Align {
961    pow2: u8,
962}
963
964impl fmt::Debug for Align {
966    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
967        write!(f, "Align({} bytes)", self.bytes())
968    }
969}
970
971#[derive(Clone, Copy)]
972pub enum AlignFromBytesError {
973    NotPowerOfTwo(u64),
974    TooLarge(u64),
975}
976
977impl AlignFromBytesError {
978    pub fn diag_ident(self) -> &'static str {
979        match self {
980            Self::NotPowerOfTwo(_) => "not_power_of_two",
981            Self::TooLarge(_) => "too_large",
982        }
983    }
984
985    pub fn align(self) -> u64 {
986        let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
987        align
988    }
989}
990
991impl fmt::Debug for AlignFromBytesError {
992    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
993        fmt::Display::fmt(self, f)
994    }
995}
996
997impl fmt::Display for AlignFromBytesError {
998    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
999        match self {
1000            AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
1001            AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
1002        }
1003    }
1004}
1005
1006impl Align {
1007    pub const ONE: Align = Align { pow2: 0 };
1008    pub const EIGHT: Align = Align { pow2: 3 };
1009    pub const MAX: Align = Align { pow2: 29 };
1011
1012    #[inline]
1013    pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
1014        Align::from_bytes(Size::from_bits(bits).bytes())
1015    }
1016
1017    #[inline]
1018    pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
1019        if align == 0 {
1021            return Ok(Align::ONE);
1022        }
1023
1024        #[cold]
1025        const fn not_power_of_2(align: u64) -> AlignFromBytesError {
1026            AlignFromBytesError::NotPowerOfTwo(align)
1027        }
1028
1029        #[cold]
1030        const fn too_large(align: u64) -> AlignFromBytesError {
1031            AlignFromBytesError::TooLarge(align)
1032        }
1033
1034        let tz = align.trailing_zeros();
1035        if align != (1 << tz) {
1036            return Err(not_power_of_2(align));
1037        }
1038
1039        let pow2 = tz as u8;
1040        if pow2 > Self::MAX.pow2 {
1041            return Err(too_large(align));
1042        }
1043
1044        Ok(Align { pow2 })
1045    }
1046
1047    #[inline]
1048    pub const fn bytes(self) -> u64 {
1049        1 << self.pow2
1050    }
1051
1052    #[inline]
1053    pub fn bytes_usize(self) -> usize {
1054        self.bytes().try_into().unwrap()
1055    }
1056
1057    #[inline]
1058    pub const fn bits(self) -> u64 {
1059        self.bytes() * 8
1060    }
1061
1062    #[inline]
1063    pub fn bits_usize(self) -> usize {
1064        self.bits().try_into().unwrap()
1065    }
1066
1067    #[inline]
1072    pub fn max_aligned_factor(size: Size) -> Align {
1073        Align { pow2: size.bytes().trailing_zeros() as u8 }
1074    }
1075
1076    #[inline]
1078    pub fn restrict_for_offset(self, size: Size) -> Align {
1079        self.min(Align::max_aligned_factor(size))
1080    }
1081}
1082
1083#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1093#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1094pub struct AbiAlign {
1095    pub abi: Align,
1096}
1097
1098impl AbiAlign {
1099    #[inline]
1100    pub fn new(align: Align) -> AbiAlign {
1101        AbiAlign { abi: align }
1102    }
1103
1104    #[inline]
1105    pub fn min(self, other: AbiAlign) -> AbiAlign {
1106        AbiAlign { abi: self.abi.min(other.abi) }
1107    }
1108
1109    #[inline]
1110    pub fn max(self, other: AbiAlign) -> AbiAlign {
1111        AbiAlign { abi: self.abi.max(other.abi) }
1112    }
1113}
1114
1115impl Deref for AbiAlign {
1116    type Target = Align;
1117
1118    fn deref(&self) -> &Self::Target {
1119        &self.abi
1120    }
1121}
1122
1123#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1125#[cfg_attr(
1126    feature = "nightly",
1127    derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
1128)]
1129pub enum Integer {
1130    I8,
1131    I16,
1132    I32,
1133    I64,
1134    I128,
1135}
1136
1137impl Integer {
1138    pub fn int_ty_str(self) -> &'static str {
1139        use Integer::*;
1140        match self {
1141            I8 => "i8",
1142            I16 => "i16",
1143            I32 => "i32",
1144            I64 => "i64",
1145            I128 => "i128",
1146        }
1147    }
1148
1149    pub fn uint_ty_str(self) -> &'static str {
1150        use Integer::*;
1151        match self {
1152            I8 => "u8",
1153            I16 => "u16",
1154            I32 => "u32",
1155            I64 => "u64",
1156            I128 => "u128",
1157        }
1158    }
1159
1160    #[inline]
1161    pub fn size(self) -> Size {
1162        use Integer::*;
1163        match self {
1164            I8 => Size::from_bytes(1),
1165            I16 => Size::from_bytes(2),
1166            I32 => Size::from_bytes(4),
1167            I64 => Size::from_bytes(8),
1168            I128 => Size::from_bytes(16),
1169        }
1170    }
1171
1172    pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
1174        let dl = cx.data_layout();
1175
1176        match ity {
1177            IntegerType::Pointer(_) => dl.ptr_sized_integer(),
1178            IntegerType::Fixed(x, _) => x,
1179        }
1180    }
1181
1182    pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1183        use Integer::*;
1184        let dl = cx.data_layout();
1185
1186        match self {
1187            I8 => dl.i8_align,
1188            I16 => dl.i16_align,
1189            I32 => dl.i32_align,
1190            I64 => dl.i64_align,
1191            I128 => dl.i128_align,
1192        }
1193    }
1194
1195    #[inline]
1197    pub fn signed_max(self) -> i128 {
1198        use Integer::*;
1199        match self {
1200            I8 => i8::MAX as i128,
1201            I16 => i16::MAX as i128,
1202            I32 => i32::MAX as i128,
1203            I64 => i64::MAX as i128,
1204            I128 => i128::MAX,
1205        }
1206    }
1207
1208    #[inline]
1210    pub fn fit_signed(x: i128) -> Integer {
1211        use Integer::*;
1212        match x {
1213            -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
1214            -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
1215            -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
1216            -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
1217            _ => I128,
1218        }
1219    }
1220
1221    #[inline]
1223    pub fn fit_unsigned(x: u128) -> Integer {
1224        use Integer::*;
1225        match x {
1226            0..=0x0000_0000_0000_00ff => I8,
1227            0..=0x0000_0000_0000_ffff => I16,
1228            0..=0x0000_0000_ffff_ffff => I32,
1229            0..=0xffff_ffff_ffff_ffff => I64,
1230            _ => I128,
1231        }
1232    }
1233
1234    pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1236        use Integer::*;
1237        let dl = cx.data_layout();
1238
1239        [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1240            wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1241        })
1242    }
1243
1244    pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1246        use Integer::*;
1247        let dl = cx.data_layout();
1248
1249        for candidate in [I64, I32, I16] {
1251            if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1252                return candidate;
1253            }
1254        }
1255        I8
1256    }
1257
1258    #[inline]
1261    pub fn from_size(size: Size) -> Result<Self, String> {
1262        match size.bits() {
1263            8 => Ok(Integer::I8),
1264            16 => Ok(Integer::I16),
1265            32 => Ok(Integer::I32),
1266            64 => Ok(Integer::I64),
1267            128 => Ok(Integer::I128),
1268            _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1269        }
1270    }
1271}
1272
1273#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1275#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1276pub enum Float {
1277    F16,
1278    F32,
1279    F64,
1280    F128,
1281}
1282
1283impl Float {
1284    pub fn size(self) -> Size {
1285        use Float::*;
1286
1287        match self {
1288            F16 => Size::from_bits(16),
1289            F32 => Size::from_bits(32),
1290            F64 => Size::from_bits(64),
1291            F128 => Size::from_bits(128),
1292        }
1293    }
1294
1295    pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1296        use Float::*;
1297        let dl = cx.data_layout();
1298
1299        match self {
1300            F16 => dl.f16_align,
1301            F32 => dl.f32_align,
1302            F64 => dl.f64_align,
1303            F128 => dl.f128_align,
1304        }
1305    }
1306}
1307
1308#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1310#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1311pub enum Primitive {
1312    Int(Integer, bool),
1320    Float(Float),
1321    Pointer(AddressSpace),
1322}
1323
1324impl Primitive {
1325    pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1326        use Primitive::*;
1327        let dl = cx.data_layout();
1328
1329        match self {
1330            Int(i, _) => i.size(),
1331            Float(f) => f.size(),
1332            Pointer(a) => dl.pointer_size_in(a),
1333        }
1334    }
1335
1336    pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1337        use Primitive::*;
1338        let dl = cx.data_layout();
1339
1340        match self {
1341            Int(i, _) => i.align(dl),
1342            Float(f) => f.align(dl),
1343            Pointer(a) => dl.pointer_align_in(a),
1344        }
1345    }
1346}
1347
1348#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1358#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1359pub struct WrappingRange {
1360    pub start: u128,
1361    pub end: u128,
1362}
1363
1364impl WrappingRange {
1365    pub fn full(size: Size) -> Self {
1366        Self { start: 0, end: size.unsigned_int_max() }
1367    }
1368
1369    #[inline(always)]
1371    pub fn contains(&self, v: u128) -> bool {
1372        if self.start <= self.end {
1373            self.start <= v && v <= self.end
1374        } else {
1375            self.start <= v || v <= self.end
1376        }
1377    }
1378
1379    #[inline(always)]
1381    fn with_start(mut self, start: u128) -> Self {
1382        self.start = start;
1383        self
1384    }
1385
1386    #[inline(always)]
1388    fn with_end(mut self, end: u128) -> Self {
1389        self.end = end;
1390        self
1391    }
1392
1393    #[inline]
1399    fn is_full_for(&self, size: Size) -> bool {
1400        let max_value = size.unsigned_int_max();
1401        debug_assert!(self.start <= max_value && self.end <= max_value);
1402        self.start == (self.end.wrapping_add(1) & max_value)
1403    }
1404
1405    #[inline]
1411    pub fn no_unsigned_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1412        if self.is_full_for(size) { Err(..) } else { Ok(self.start <= self.end) }
1413    }
1414
1415    #[inline]
1424    pub fn no_signed_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1425        if self.is_full_for(size) {
1426            Err(..)
1427        } else {
1428            let start: i128 = size.sign_extend(self.start);
1429            let end: i128 = size.sign_extend(self.end);
1430            Ok(start <= end)
1431        }
1432    }
1433}
1434
1435impl fmt::Debug for WrappingRange {
1436    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1437        if self.start > self.end {
1438            write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1439        } else {
1440            write!(fmt, "{}..={}", self.start, self.end)?;
1441        }
1442        Ok(())
1443    }
1444}
1445
1446#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1448#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1449pub enum Scalar {
1450    Initialized {
1451        value: Primitive,
1452
1453        valid_range: WrappingRange,
1457    },
1458    Union {
1459        value: Primitive,
1465    },
1466}
1467
1468impl Scalar {
1469    #[inline]
1470    pub fn is_bool(&self) -> bool {
1471        use Integer::*;
1472        matches!(
1473            self,
1474            Scalar::Initialized {
1475                value: Primitive::Int(I8, false),
1476                valid_range: WrappingRange { start: 0, end: 1 }
1477            }
1478        )
1479    }
1480
1481    pub fn primitive(&self) -> Primitive {
1484        match *self {
1485            Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1486        }
1487    }
1488
1489    pub fn align(self, cx: &impl HasDataLayout) -> AbiAlign {
1490        self.primitive().align(cx)
1491    }
1492
1493    pub fn size(self, cx: &impl HasDataLayout) -> Size {
1494        self.primitive().size(cx)
1495    }
1496
1497    #[inline]
1498    pub fn to_union(&self) -> Self {
1499        Self::Union { value: self.primitive() }
1500    }
1501
1502    #[inline]
1503    pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1504        match *self {
1505            Scalar::Initialized { valid_range, .. } => valid_range,
1506            Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1507        }
1508    }
1509
1510    #[inline]
1511    pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1514        match self {
1515            Scalar::Initialized { valid_range, .. } => valid_range,
1516            Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1517        }
1518    }
1519
1520    #[inline]
1523    pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1524        match *self {
1525            Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1526            Scalar::Union { .. } => true,
1527        }
1528    }
1529
1530    #[inline]
1532    pub fn is_uninit_valid(&self) -> bool {
1533        match *self {
1534            Scalar::Initialized { .. } => false,
1535            Scalar::Union { .. } => true,
1536        }
1537    }
1538
1539    #[inline]
1541    pub fn is_signed(&self) -> bool {
1542        match self.primitive() {
1543            Primitive::Int(_, signed) => signed,
1544            _ => false,
1545        }
1546    }
1547}
1548
1549#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1552#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1553pub enum FieldsShape<FieldIdx: Idx> {
1554    Primitive,
1556
1557    Union(NonZeroUsize),
1559
1560    Array { stride: Size, count: u64 },
1562
1563    Arbitrary {
1571        offsets: IndexVec<FieldIdx, Size>,
1576
1577        memory_index: IndexVec<FieldIdx, u32>,
1590    },
1591}
1592
1593impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1594    #[inline]
1595    pub fn count(&self) -> usize {
1596        match *self {
1597            FieldsShape::Primitive => 0,
1598            FieldsShape::Union(count) => count.get(),
1599            FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1600            FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1601        }
1602    }
1603
1604    #[inline]
1605    pub fn offset(&self, i: usize) -> Size {
1606        match *self {
1607            FieldsShape::Primitive => {
1608                unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1609            }
1610            FieldsShape::Union(count) => {
1611                assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1612                Size::ZERO
1613            }
1614            FieldsShape::Array { stride, count } => {
1615                let i = u64::try_from(i).unwrap();
1616                assert!(i < count, "tried to access field {i} of array with {count} fields");
1617                stride * i
1618            }
1619            FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1620        }
1621    }
1622
1623    #[inline]
1624    pub fn memory_index(&self, i: usize) -> usize {
1625        match *self {
1626            FieldsShape::Primitive => {
1627                unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1628            }
1629            FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1630            FieldsShape::Arbitrary { ref memory_index, .. } => {
1631                memory_index[FieldIdx::new(i)].try_into().unwrap()
1632            }
1633        }
1634    }
1635
1636    #[inline]
1638    pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1639        let mut inverse_small = [0u8; 64];
1640        let mut inverse_big = IndexVec::new();
1641        let use_small = self.count() <= inverse_small.len();
1642
1643        if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1645            if use_small {
1646                for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1647                    inverse_small[mem_idx as usize] = field_idx.index() as u8;
1648                }
1649            } else {
1650                inverse_big = memory_index.invert_bijective_mapping();
1651            }
1652        }
1653
1654        let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1658
1659        (0..pseudofield_count).map(move |i| match *self {
1660            FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1661            FieldsShape::Arbitrary { .. } => {
1662                if use_small {
1663                    inverse_small[i] as usize
1664                } else {
1665                    inverse_big[i as u32].index()
1666                }
1667            }
1668        })
1669    }
1670}
1671
1672#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1676#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1677pub struct AddressSpace(pub u32);
1678
1679impl AddressSpace {
1680    pub const ZERO: Self = AddressSpace(0);
1682}
1683
1684#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1695#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1696pub enum BackendRepr {
1697    Scalar(Scalar),
1698    ScalarPair(Scalar, Scalar),
1699    SimdVector {
1700        element: Scalar,
1701        count: u64,
1702    },
1703    Memory {
1705        sized: bool,
1707    },
1708}
1709
1710impl BackendRepr {
1711    #[inline]
1713    pub fn is_unsized(&self) -> bool {
1714        match *self {
1715            BackendRepr::Scalar(_)
1716            | BackendRepr::ScalarPair(..)
1717            | BackendRepr::SimdVector { .. } => false,
1718            BackendRepr::Memory { sized } => !sized,
1719        }
1720    }
1721
1722    #[inline]
1723    pub fn is_sized(&self) -> bool {
1724        !self.is_unsized()
1725    }
1726
1727    #[inline]
1730    pub fn is_signed(&self) -> bool {
1731        match self {
1732            BackendRepr::Scalar(scal) => scal.is_signed(),
1733            _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1734        }
1735    }
1736
1737    #[inline]
1739    pub fn is_scalar(&self) -> bool {
1740        matches!(*self, BackendRepr::Scalar(_))
1741    }
1742
1743    #[inline]
1745    pub fn is_bool(&self) -> bool {
1746        matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1747    }
1748
1749    pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1753        match *self {
1754            BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1755            BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1756            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1758        }
1759    }
1760
1761    pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1765        match *self {
1766            BackendRepr::Scalar(s) => Some(s.size(cx)),
1768            BackendRepr::ScalarPair(s1, s2) => {
1770                let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1771                let size = (field2_offset + s2.size(cx)).align_to(
1772                    self.scalar_align(cx)
1773                        .unwrap(),
1775                );
1776                Some(size)
1777            }
1778            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1780        }
1781    }
1782
1783    pub fn to_union(&self) -> Self {
1785        match *self {
1786            BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1787            BackendRepr::ScalarPair(s1, s2) => {
1788                BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1789            }
1790            BackendRepr::SimdVector { element, count } => {
1791                BackendRepr::SimdVector { element: element.to_union(), count }
1792            }
1793            BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1794        }
1795    }
1796
1797    pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1798        match (self, other) {
1799            (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1802            (
1803                BackendRepr::SimdVector { element: element_l, count: count_l },
1804                BackendRepr::SimdVector { element: element_r, count: count_r },
1805            ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1806            (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1807                l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1808            }
1809            _ => self == other,
1811        }
1812    }
1813}
1814
1815#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1817#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1818pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1819    Empty,
1821
1822    Single {
1824        index: VariantIdx,
1826    },
1827
1828    Multiple {
1835        tag: Scalar,
1836        tag_encoding: TagEncoding<VariantIdx>,
1837        tag_field: FieldIdx,
1838        variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1839    },
1840}
1841
1842#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1844#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1845pub enum TagEncoding<VariantIdx: Idx> {
1846    Direct,
1849
1850    Niche {
1874        untagged_variant: VariantIdx,
1875        niche_variants: RangeInclusive<VariantIdx>,
1878        niche_start: u128,
1881    },
1882}
1883
1884#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1885#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1886pub struct Niche {
1887    pub offset: Size,
1888    pub value: Primitive,
1889    pub valid_range: WrappingRange,
1890}
1891
1892impl Niche {
1893    pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1894        let Scalar::Initialized { value, valid_range } = scalar else { return None };
1895        let niche = Niche { offset, value, valid_range };
1896        if niche.available(cx) > 0 { Some(niche) } else { None }
1897    }
1898
1899    pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1900        let Self { value, valid_range: v, .. } = *self;
1901        let size = value.size(cx);
1902        assert!(size.bits() <= 128);
1903        let max_value = size.unsigned_int_max();
1904
1905        let niche = v.end.wrapping_add(1)..v.start;
1907        niche.end.wrapping_sub(niche.start) & max_value
1908    }
1909
1910    pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1911        assert!(count > 0);
1912
1913        let Self { value, valid_range: v, .. } = *self;
1914        let size = value.size(cx);
1915        assert!(size.bits() <= 128);
1916        let max_value = size.unsigned_int_max();
1917
1918        let niche = v.end.wrapping_add(1)..v.start;
1919        let available = niche.end.wrapping_sub(niche.start) & max_value;
1920        if count > available {
1921            return None;
1922        }
1923
1924        let move_start = |v: WrappingRange| {
1938            let start = v.start.wrapping_sub(count) & max_value;
1939            Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1940        };
1941        let move_end = |v: WrappingRange| {
1942            let start = v.end.wrapping_add(1) & max_value;
1943            let end = v.end.wrapping_add(count) & max_value;
1944            Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1945        };
1946        let distance_end_zero = max_value - v.end;
1947        if v.start > v.end {
1948            move_end(v)
1950        } else if v.start <= distance_end_zero {
1951            if count <= v.start {
1952                move_start(v)
1953            } else {
1954                move_end(v)
1956            }
1957        } else {
1958            let end = v.end.wrapping_add(count) & max_value;
1959            let overshot_zero = (1..=v.end).contains(&end);
1960            if overshot_zero {
1961                move_start(v)
1963            } else {
1964                move_end(v)
1965            }
1966        }
1967    }
1968}
1969
1970#[derive(PartialEq, Eq, Hash, Clone)]
1972#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1973pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
1974    pub fields: FieldsShape<FieldIdx>,
1976
1977    pub variants: Variants<FieldIdx, VariantIdx>,
1985
1986    pub backend_repr: BackendRepr,
1994
1995    pub largest_niche: Option<Niche>,
1998    pub uninhabited: bool,
2003
2004    pub align: AbiAlign,
2005    pub size: Size,
2006
2007    pub max_repr_align: Option<Align>,
2011
2012    pub unadjusted_abi_align: Align,
2016
2017    pub randomization_seed: Hash64,
2028}
2029
2030impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2031    pub fn is_aggregate(&self) -> bool {
2033        match self.backend_repr {
2034            BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => false,
2035            BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
2036        }
2037    }
2038
2039    pub fn is_uninhabited(&self) -> bool {
2041        self.uninhabited
2042    }
2043}
2044
2045impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
2046where
2047    FieldsShape<FieldIdx>: fmt::Debug,
2048    Variants<FieldIdx, VariantIdx>: fmt::Debug,
2049{
2050    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2051        let LayoutData {
2055            size,
2056            align,
2057            backend_repr,
2058            fields,
2059            largest_niche,
2060            uninhabited,
2061            variants,
2062            max_repr_align,
2063            unadjusted_abi_align,
2064            randomization_seed,
2065        } = self;
2066        f.debug_struct("Layout")
2067            .field("size", size)
2068            .field("align", align)
2069            .field("backend_repr", backend_repr)
2070            .field("fields", fields)
2071            .field("largest_niche", largest_niche)
2072            .field("uninhabited", uninhabited)
2073            .field("variants", variants)
2074            .field("max_repr_align", max_repr_align)
2075            .field("unadjusted_abi_align", unadjusted_abi_align)
2076            .field("randomization_seed", randomization_seed)
2077            .finish()
2078    }
2079}
2080
2081#[derive(Copy, Clone, PartialEq, Eq, Debug)]
2082pub enum PointerKind {
2083    SharedRef { frozen: bool },
2085    MutableRef { unpin: bool },
2087    Box { unpin: bool, global: bool },
2090}
2091
2092#[derive(Copy, Clone, Debug)]
2097pub struct PointeeInfo {
2098    pub safe: Option<PointerKind>,
2101    pub size: Size,
2107    pub align: Align,
2109}
2110
2111impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2112    #[inline]
2114    pub fn is_unsized(&self) -> bool {
2115        self.backend_repr.is_unsized()
2116    }
2117
2118    #[inline]
2119    pub fn is_sized(&self) -> bool {
2120        self.backend_repr.is_sized()
2121    }
2122
2123    pub fn is_1zst(&self) -> bool {
2125        self.is_sized() && self.size.bytes() == 0 && self.align.abi.bytes() == 1
2126    }
2127
2128    pub fn is_zst(&self) -> bool {
2133        match self.backend_repr {
2134            BackendRepr::Scalar(_)
2135            | BackendRepr::ScalarPair(..)
2136            | BackendRepr::SimdVector { .. } => false,
2137            BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
2138        }
2139    }
2140
2141    pub fn eq_abi(&self, other: &Self) -> bool {
2147        self.size == other.size
2151            && self.is_sized() == other.is_sized()
2152            && self.backend_repr.eq_up_to_validity(&other.backend_repr)
2153            && self.backend_repr.is_bool() == other.backend_repr.is_bool()
2154            && self.align.abi == other.align.abi
2155            && self.max_repr_align == other.max_repr_align
2156            && self.unadjusted_abi_align == other.unadjusted_abi_align
2157    }
2158}
2159
2160#[derive(Copy, Clone, Debug)]
2161pub enum StructKind {
2162    AlwaysSized,
2164    MaybeUnsized,
2166    Prefixed(Size, Align),
2168}
2169
2170#[derive(Clone, Debug)]
2171pub enum AbiFromStrErr {
2172    Unknown,
2174    NoExplicitUnwind,
2176}