rustc_codegen_llvm/
consts.rs

1use std::ops::Range;
2
3use rustc_abi::{
4    Align, AlignFromBytesError, HasDataLayout, Primitive, Scalar, Size, WrappingRange,
5};
6use rustc_codegen_ssa::common;
7use rustc_codegen_ssa::traits::*;
8use rustc_hir::LangItem;
9use rustc_hir::def::DefKind;
10use rustc_hir::def_id::DefId;
11use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
12use rustc_middle::mir::interpret::{
13    Allocation, ConstAllocation, ErrorHandled, InitChunk, Pointer, Scalar as InterpScalar,
14    read_target_uint,
15};
16use rustc_middle::mir::mono::{Linkage, MonoItem};
17use rustc_middle::ty::layout::{HasTypingEnv, LayoutOf};
18use rustc_middle::ty::{self, Instance};
19use rustc_middle::{bug, span_bug};
20use tracing::{debug, instrument, trace};
21
22use crate::common::{AsCCharPtr, CodegenCx};
23use crate::errors::{
24    InvalidMinimumAlignmentNotPowerOfTwo, InvalidMinimumAlignmentTooLarge, SymbolAlreadyDefined,
25};
26use crate::llvm::{self, True};
27use crate::type_::Type;
28use crate::type_of::LayoutLlvmExt;
29use crate::value::Value;
30use crate::{base, debuginfo};
31
32pub(crate) fn const_alloc_to_llvm<'ll>(
33    cx: &CodegenCx<'ll, '_>,
34    alloc: ConstAllocation<'_>,
35    is_static: bool,
36) -> &'ll Value {
37    let alloc = alloc.inner();
38    // We expect that callers of const_alloc_to_llvm will instead directly codegen a pointer or
39    // integer for any &ZST where the ZST is a constant (i.e. not a static). We should never be
40    // producing empty LLVM allocations as they're just adding noise to binaries and forcing less
41    // optimal codegen.
42    //
43    // Statics have a guaranteed meaningful address so it's less clear that we want to do
44    // something like this; it's also harder.
45    if !is_static {
46        assert!(alloc.len() != 0);
47    }
48    let mut llvals = Vec::with_capacity(alloc.provenance().ptrs().len() + 1);
49    let dl = cx.data_layout();
50    let pointer_size = dl.pointer_size.bytes() as usize;
51
52    // Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`, so `range`
53    // must be within the bounds of `alloc` and not contain or overlap a pointer provenance.
54    fn append_chunks_of_init_and_uninit_bytes<'ll, 'a, 'b>(
55        llvals: &mut Vec<&'ll Value>,
56        cx: &'a CodegenCx<'ll, 'b>,
57        alloc: &'a Allocation,
58        range: Range<usize>,
59    ) {
60        let chunks = alloc.init_mask().range_as_init_chunks(range.clone().into());
61
62        let chunk_to_llval = move |chunk| match chunk {
63            InitChunk::Init(range) => {
64                let range = (range.start.bytes() as usize)..(range.end.bytes() as usize);
65                let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
66                cx.const_bytes(bytes)
67            }
68            InitChunk::Uninit(range) => {
69                let len = range.end.bytes() - range.start.bytes();
70                cx.const_undef(cx.type_array(cx.type_i8(), len))
71            }
72        };
73
74        // Generating partially-uninit consts is limited to small numbers of chunks,
75        // to avoid the cost of generating large complex const expressions.
76        // For example, `[(u32, u8); 1024 * 1024]` contains uninit padding in each element, and
77        // would result in `{ [5 x i8] zeroinitializer, [3 x i8] undef, ...repeat 1M times... }`.
78        let max = cx.sess().opts.unstable_opts.uninit_const_chunk_threshold;
79        let allow_uninit_chunks = chunks.clone().take(max.saturating_add(1)).count() <= max;
80
81        if allow_uninit_chunks {
82            llvals.extend(chunks.map(chunk_to_llval));
83        } else {
84            // If this allocation contains any uninit bytes, codegen as if it was initialized
85            // (using some arbitrary value for uninit bytes).
86            let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
87            llvals.push(cx.const_bytes(bytes));
88        }
89    }
90
91    let mut next_offset = 0;
92    for &(offset, prov) in alloc.provenance().ptrs().iter() {
93        let offset = offset.bytes();
94        assert_eq!(offset as usize as u64, offset);
95        let offset = offset as usize;
96        if offset > next_offset {
97            // This `inspect` is okay since we have checked that there is no provenance, it
98            // is within the bounds of the allocation, and it doesn't affect interpreter execution
99            // (we inspect the result after interpreter execution).
100            append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, next_offset..offset);
101        }
102        let ptr_offset = read_target_uint(
103            dl.endian,
104            // This `inspect` is okay since it is within the bounds of the allocation, it doesn't
105            // affect interpreter execution (we inspect the result after interpreter execution),
106            // and we properly interpret the provenance as a relocation pointer offset.
107            alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)),
108        )
109        .expect("const_alloc_to_llvm: could not read relocation pointer")
110            as u64;
111
112        let address_space = cx.tcx.global_alloc(prov.alloc_id()).address_space(cx);
113
114        llvals.push(cx.scalar_to_backend(
115            InterpScalar::from_pointer(Pointer::new(prov, Size::from_bytes(ptr_offset)), &cx.tcx),
116            Scalar::Initialized {
117                value: Primitive::Pointer(address_space),
118                valid_range: WrappingRange::full(dl.pointer_size),
119            },
120            cx.type_ptr_ext(address_space),
121        ));
122        next_offset = offset + pointer_size;
123    }
124    if alloc.len() >= next_offset {
125        let range = next_offset..alloc.len();
126        // This `inspect` is okay since we have check that it is after all provenance, it is
127        // within the bounds of the allocation, and it doesn't affect interpreter execution (we
128        // inspect the result after interpreter execution).
129        append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range);
130    }
131
132    // Avoid wrapping in a struct if there is only a single value. This ensures
133    // that LLVM is able to perform the string merging optimization if the constant
134    // is a valid C string. LLVM only considers bare arrays for this optimization,
135    // not arrays wrapped in a struct. LLVM handles this at:
136    // https://github.com/rust-lang/llvm-project/blob/acaea3d2bb8f351b740db7ebce7d7a40b9e21488/llvm/lib/Target/TargetLoweringObjectFile.cpp#L249-L280
137    if let &[data] = &*llvals { data } else { cx.const_struct(&llvals, true) }
138}
139
140fn codegen_static_initializer<'ll, 'tcx>(
141    cx: &CodegenCx<'ll, 'tcx>,
142    def_id: DefId,
143) -> Result<(&'ll Value, ConstAllocation<'tcx>), ErrorHandled> {
144    let alloc = cx.tcx.eval_static_initializer(def_id)?;
145    Ok((const_alloc_to_llvm(cx, alloc, /*static*/ true), alloc))
146}
147
148fn set_global_alignment<'ll>(cx: &CodegenCx<'ll, '_>, gv: &'ll Value, mut align: Align) {
149    // The target may require greater alignment for globals than the type does.
150    // Note: GCC and Clang also allow `__attribute__((aligned))` on variables,
151    // which can force it to be smaller. Rust doesn't support this yet.
152    if let Some(min) = cx.sess().target.min_global_align {
153        match Align::from_bits(min) {
154            Ok(min) => align = align.max(min),
155            Err(err) => match err {
156                AlignFromBytesError::NotPowerOfTwo(align) => {
157                    cx.sess().dcx().emit_err(InvalidMinimumAlignmentNotPowerOfTwo { align });
158                }
159                AlignFromBytesError::TooLarge(align) => {
160                    cx.sess().dcx().emit_err(InvalidMinimumAlignmentTooLarge { align });
161                }
162            },
163        }
164    }
165    unsafe {
166        llvm::LLVMSetAlignment(gv, align.bytes() as u32);
167    }
168}
169
170fn check_and_apply_linkage<'ll, 'tcx>(
171    cx: &CodegenCx<'ll, 'tcx>,
172    attrs: &CodegenFnAttrs,
173    llty: &'ll Type,
174    sym: &str,
175    def_id: DefId,
176) -> &'ll Value {
177    if let Some(linkage) = attrs.import_linkage {
178        debug!("get_static: sym={} linkage={:?}", sym, linkage);
179
180        // Declare a symbol `foo`. If `foo` is an extern_weak symbol, we declare
181        // an extern_weak function, otherwise a global with the desired linkage.
182        let g1 = if matches!(attrs.import_linkage, Some(Linkage::ExternalWeak)) {
183            // An `extern_weak` function is represented as an `Option<unsafe extern ...>`,
184            // we extract the function signature and declare it as an extern_weak function
185            // instead of an extern_weak i8.
186            let instance = Instance::mono(cx.tcx, def_id);
187            if let ty::Adt(struct_def, args) = instance.ty(cx.tcx, cx.typing_env()).kind()
188                && cx.tcx.is_lang_item(struct_def.did(), LangItem::Option)
189                && let ty::FnPtr(sig, header) = args.type_at(0).kind()
190            {
191                let fn_sig = sig.with(*header);
192
193                let fn_abi = cx.fn_abi_of_fn_ptr(fn_sig, ty::List::empty());
194                cx.declare_fn(sym, &fn_abi, None)
195            } else {
196                cx.declare_global(sym, cx.type_i8())
197            }
198        } else {
199            cx.declare_global(sym, cx.type_i8())
200        };
201        llvm::set_linkage(g1, base::linkage_to_llvm(linkage));
202
203        // Declare an internal global `extern_with_linkage_foo` which
204        // is initialized with the address of `foo`. If `foo` is
205        // discarded during linking (for example, if `foo` has weak
206        // linkage and there are no definitions), then
207        // `extern_with_linkage_foo` will instead be initialized to
208        // zero.
209        let mut real_name = "_rust_extern_with_linkage_".to_string();
210        real_name.push_str(sym);
211        let g2 = cx.define_global(&real_name, llty).unwrap_or_else(|| {
212            cx.sess().dcx().emit_fatal(SymbolAlreadyDefined {
213                span: cx.tcx.def_span(def_id),
214                symbol_name: sym,
215            })
216        });
217        llvm::set_linkage(g2, llvm::Linkage::InternalLinkage);
218        llvm::set_initializer(g2, g1);
219        g2
220    } else if cx.tcx.sess.target.arch == "x86"
221        && common::is_mingw_gnu_toolchain(&cx.tcx.sess.target)
222        && let Some(dllimport) = crate::common::get_dllimport(cx.tcx, def_id, sym)
223    {
224        cx.declare_global(&common::i686_decorated_name(dllimport, true, true, false), llty)
225    } else {
226        // Generate an external declaration.
227        // FIXME(nagisa): investigate whether it can be changed into define_global
228        cx.declare_global(sym, llty)
229    }
230}
231
232impl<'ll> CodegenCx<'ll, '_> {
233    pub(crate) fn const_bitcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
234        unsafe { llvm::LLVMConstBitCast(val, ty) }
235    }
236
237    pub(crate) fn const_pointercast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
238        unsafe { llvm::LLVMConstPointerCast(val, ty) }
239    }
240
241    /// Create a global variable.
242    ///
243    /// The returned global variable is a pointer in the default address space for globals.
244    /// Fails if a symbol with the given name already exists.
245    pub(crate) fn static_addr_of_mut(
246        &self,
247        cv: &'ll Value,
248        align: Align,
249        kind: Option<&str>,
250    ) -> &'ll Value {
251        let gv = match kind {
252            Some(kind) if !self.tcx.sess.fewer_names() => {
253                let name = self.generate_local_symbol_name(kind);
254                let gv = self.define_global(&name, self.val_ty(cv)).unwrap_or_else(|| {
255                    bug!("symbol `{}` is already defined", name);
256                });
257                llvm::set_linkage(gv, llvm::Linkage::PrivateLinkage);
258                gv
259            }
260            _ => self.define_private_global(self.val_ty(cv)),
261        };
262        llvm::set_initializer(gv, cv);
263        set_global_alignment(self, gv, align);
264        llvm::SetUnnamedAddress(gv, llvm::UnnamedAddr::Global);
265        gv
266    }
267
268    /// Create a global constant.
269    ///
270    /// The returned global variable is a pointer in the default address space for globals.
271    pub(crate) fn static_addr_of_impl(
272        &self,
273        cv: &'ll Value,
274        align: Align,
275        kind: Option<&str>,
276    ) -> &'ll Value {
277        if let Some(&gv) = self.const_globals.borrow().get(&cv) {
278            unsafe {
279                // Upgrade the alignment in cases where the same constant is used with different
280                // alignment requirements
281                let llalign = align.bytes() as u32;
282                if llalign > llvm::LLVMGetAlignment(gv) {
283                    llvm::LLVMSetAlignment(gv, llalign);
284                }
285            }
286            return gv;
287        }
288        let gv = self.static_addr_of_mut(cv, align, kind);
289        unsafe {
290            llvm::LLVMSetGlobalConstant(gv, True);
291        }
292        self.const_globals.borrow_mut().insert(cv, gv);
293        gv
294    }
295
296    #[instrument(level = "debug", skip(self))]
297    pub(crate) fn get_static(&self, def_id: DefId) -> &'ll Value {
298        let instance = Instance::mono(self.tcx, def_id);
299        trace!(?instance);
300
301        let DefKind::Static { nested, .. } = self.tcx.def_kind(def_id) else { bug!() };
302        // Nested statics do not have a type, so pick a dummy type and let `codegen_static` figure
303        // out the llvm type from the actual evaluated initializer.
304        let llty = if nested {
305            self.type_i8()
306        } else {
307            let ty = instance.ty(self.tcx, self.typing_env());
308            trace!(?ty);
309            self.layout_of(ty).llvm_type(self)
310        };
311        self.get_static_inner(def_id, llty)
312    }
313
314    #[instrument(level = "debug", skip(self, llty))]
315    fn get_static_inner(&self, def_id: DefId, llty: &'ll Type) -> &'ll Value {
316        let instance = Instance::mono(self.tcx, def_id);
317        if let Some(&g) = self.instances.borrow().get(&instance) {
318            trace!("used cached value");
319            return g;
320        }
321
322        let defined_in_current_codegen_unit =
323            self.codegen_unit.items().contains_key(&MonoItem::Static(def_id));
324        assert!(
325            !defined_in_current_codegen_unit,
326            "consts::get_static() should always hit the cache for \
327                 statics defined in the same CGU, but did not for `{def_id:?}`"
328        );
329
330        let sym = self.tcx.symbol_name(instance).name;
331        let fn_attrs = self.tcx.codegen_fn_attrs(def_id);
332
333        debug!(?sym, ?fn_attrs);
334
335        let g = if def_id.is_local() && !self.tcx.is_foreign_item(def_id) {
336            if let Some(g) = self.get_declared_value(sym) {
337                if self.val_ty(g) != self.type_ptr() {
338                    span_bug!(self.tcx.def_span(def_id), "Conflicting types for static");
339                }
340            }
341
342            let g = self.declare_global(sym, llty);
343
344            if !self.tcx.is_reachable_non_generic(def_id) {
345                llvm::set_visibility(g, llvm::Visibility::Hidden);
346            }
347
348            g
349        } else {
350            check_and_apply_linkage(self, fn_attrs, llty, sym, def_id)
351        };
352
353        // Thread-local statics in some other crate need to *always* be linked
354        // against in a thread-local fashion, so we need to be sure to apply the
355        // thread-local attribute locally if it was present remotely. If we
356        // don't do this then linker errors can be generated where the linker
357        // complains that one object files has a thread local version of the
358        // symbol and another one doesn't.
359        if fn_attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) {
360            llvm::set_thread_local_mode(g, self.tls_model);
361        }
362
363        let dso_local = self.assume_dso_local(g, true);
364
365        if !def_id.is_local() {
366            let needs_dll_storage_attr = self.use_dll_storage_attrs
367                && !self.tcx.is_foreign_item(def_id)
368                // Local definitions can never be imported, so we must not apply
369                // the DLLImport annotation.
370                && !dso_local
371                // Linker plugin ThinLTO doesn't create the self-dllimport Rust uses for rlibs
372                // as the code generation happens out of process. Instead we assume static linkage
373                // and disallow dynamic linking when linker plugin based LTO is enabled.
374                // Regular in-process ThinLTO doesn't need this workaround.
375                && !self.tcx.sess.opts.cg.linker_plugin_lto.enabled();
376
377            // If this assertion triggers, there's something wrong with commandline
378            // argument validation.
379            assert!(
380                !(self.tcx.sess.opts.cg.linker_plugin_lto.enabled()
381                    && self.tcx.sess.target.is_like_windows
382                    && self.tcx.sess.opts.cg.prefer_dynamic)
383            );
384
385            if needs_dll_storage_attr {
386                // This item is external but not foreign, i.e., it originates from an external Rust
387                // crate. Since we don't know whether this crate will be linked dynamically or
388                // statically in the final application, we always mark such symbols as 'dllimport'.
389                // If final linkage happens to be static, we rely on compiler-emitted __imp_ stubs
390                // to make things work.
391                //
392                // However, in some scenarios we defer emission of statics to downstream
393                // crates, so there are cases where a static with an upstream DefId
394                // is actually present in the current crate. We can find out via the
395                // is_codegened_item query.
396                if !self.tcx.is_codegened_item(def_id) {
397                    llvm::set_dllimport_storage_class(g);
398                }
399            }
400        }
401
402        if self.use_dll_storage_attrs
403            && let Some(library) = self.tcx.native_library(def_id)
404            && library.kind.is_dllimport()
405        {
406            // For foreign (native) libs we know the exact storage type to use.
407            llvm::set_dllimport_storage_class(g);
408        }
409
410        self.instances.borrow_mut().insert(instance, g);
411        g
412    }
413
414    fn codegen_static_item(&self, def_id: DefId) {
415        unsafe {
416            assert!(
417                llvm::LLVMGetInitializer(
418                    self.instances.borrow().get(&Instance::mono(self.tcx, def_id)).unwrap()
419                )
420                .is_none()
421            );
422            let attrs = self.tcx.codegen_fn_attrs(def_id);
423
424            let Ok((v, alloc)) = codegen_static_initializer(self, def_id) else {
425                // Error has already been reported
426                return;
427            };
428            let alloc = alloc.inner();
429
430            let val_llty = self.val_ty(v);
431
432            let g = self.get_static_inner(def_id, val_llty);
433            let llty = llvm::LLVMGlobalGetValueType(g);
434
435            let g = if val_llty == llty {
436                g
437            } else {
438                // codegen_static_initializer creates the global value just from the
439                // `Allocation` data by generating one big struct value that is just
440                // all the bytes and pointers after each other. This will almost never
441                // match the type that the static was declared with. Unfortunately
442                // we can't just LLVMConstBitCast our way out of it because that has very
443                // specific rules on what can be cast. So instead of adding a new way to
444                // generate static initializers that match the static's type, we picked
445                // the easier option and retroactively change the type of the static item itself.
446                let name = llvm::get_value_name(g).to_vec();
447                llvm::set_value_name(g, b"");
448
449                let linkage = llvm::get_linkage(g);
450                let visibility = llvm::get_visibility(g);
451
452                let new_g = llvm::LLVMRustGetOrInsertGlobal(
453                    self.llmod,
454                    name.as_c_char_ptr(),
455                    name.len(),
456                    val_llty,
457                );
458
459                llvm::set_linkage(new_g, linkage);
460                llvm::set_visibility(new_g, visibility);
461
462                // The old global has had its name removed but is returned by
463                // get_static since it is in the instance cache. Provide an
464                // alternative lookup that points to the new global so that
465                // global_asm! can compute the correct mangled symbol name
466                // for the global.
467                self.renamed_statics.borrow_mut().insert(def_id, new_g);
468
469                // To avoid breaking any invariants, we leave around the old
470                // global for the moment; we'll replace all references to it
471                // with the new global later. (See base::codegen_backend.)
472                self.statics_to_rauw.borrow_mut().push((g, new_g));
473                new_g
474            };
475            set_global_alignment(self, g, alloc.align);
476            llvm::set_initializer(g, v);
477
478            self.assume_dso_local(g, true);
479
480            // Forward the allocation's mutability (picked by the const interner) to LLVM.
481            if alloc.mutability.is_not() {
482                llvm::LLVMSetGlobalConstant(g, llvm::True);
483            }
484
485            debuginfo::build_global_var_di_node(self, def_id, g);
486
487            if attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) {
488                llvm::set_thread_local_mode(g, self.tls_model);
489            }
490
491            // Wasm statics with custom link sections get special treatment as they
492            // go into custom sections of the wasm executable. The exception to this
493            // is the `.init_array` section which are treated specially by the wasm linker.
494            if self.tcx.sess.target.is_like_wasm
495                && attrs
496                    .link_section
497                    .map(|link_section| !link_section.as_str().starts_with(".init_array"))
498                    .unwrap_or(true)
499            {
500                if let Some(section) = attrs.link_section {
501                    let section = llvm::LLVMMDStringInContext2(
502                        self.llcx,
503                        section.as_str().as_c_char_ptr(),
504                        section.as_str().len(),
505                    );
506                    assert!(alloc.provenance().ptrs().is_empty());
507
508                    // The `inspect` method is okay here because we checked for provenance, and
509                    // because we are doing this access to inspect the final interpreter state (not
510                    // as part of the interpreter execution).
511                    let bytes =
512                        alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len());
513                    let alloc =
514                        llvm::LLVMMDStringInContext2(self.llcx, bytes.as_c_char_ptr(), bytes.len());
515                    let data = [section, alloc];
516                    let meta = llvm::LLVMMDNodeInContext2(self.llcx, data.as_ptr(), data.len());
517                    let val = self.get_metadata_value(meta);
518                    llvm::LLVMAddNamedMetadataOperand(
519                        self.llmod,
520                        c"wasm.custom_sections".as_ptr(),
521                        val,
522                    );
523                }
524            } else {
525                base::set_link_section(g, attrs);
526            }
527
528            base::set_variable_sanitizer_attrs(g, attrs);
529
530            if attrs.flags.contains(CodegenFnAttrFlags::USED) {
531                // `USED` and `USED_LINKER` can't be used together.
532                assert!(!attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER));
533
534                // The semantics of #[used] in Rust only require the symbol to make it into the
535                // object file. It is explicitly allowed for the linker to strip the symbol if it
536                // is dead, which means we are allowed to use `llvm.compiler.used` instead of
537                // `llvm.used` here.
538                //
539                // Additionally, https://reviews.llvm.org/D97448 in LLVM 13 started emitting unique
540                // sections with SHF_GNU_RETAIN flag for llvm.used symbols, which may trigger bugs
541                // in the handling of `.init_array` (the static constructor list) in versions of
542                // the gold linker (prior to the one released with binutils 2.36).
543                //
544                // That said, we only ever emit these when compiling for ELF targets, unless
545                // `#[used(compiler)]` is explicitly requested. This is to avoid similar breakage
546                // on other targets, in particular MachO targets have *their* static constructor
547                // lists broken if `llvm.compiler.used` is emitted rather than `llvm.used`. However,
548                // that check happens when assigning the `CodegenFnAttrFlags` in
549                // `rustc_hir_analysis`, so we don't need to take care of it here.
550                self.add_compiler_used_global(g);
551            }
552            if attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER) {
553                // `USED` and `USED_LINKER` can't be used together.
554                assert!(!attrs.flags.contains(CodegenFnAttrFlags::USED));
555
556                self.add_used_global(g);
557            }
558        }
559    }
560}
561
562impl<'ll> StaticCodegenMethods for CodegenCx<'ll, '_> {
563    /// Get a pointer to a global variable.
564    ///
565    /// The pointer will always be in the default address space. If global variables default to a
566    /// different address space, an addrspacecast is inserted.
567    fn static_addr_of(&self, cv: &'ll Value, align: Align, kind: Option<&str>) -> &'ll Value {
568        let gv = self.static_addr_of_impl(cv, align, kind);
569        // static_addr_of_impl returns the bare global variable, which might not be in the default
570        // address space. Cast to the default address space if necessary.
571        self.const_pointercast(gv, self.type_ptr())
572    }
573
574    fn codegen_static(&self, def_id: DefId) {
575        self.codegen_static_item(def_id)
576    }
577
578    /// Add a global value to a list to be stored in the `llvm.used` variable, an array of ptr.
579    fn add_used_global(&self, global: &'ll Value) {
580        self.used_statics.borrow_mut().push(global);
581    }
582
583    /// Add a global value to a list to be stored in the `llvm.compiler.used` variable,
584    /// an array of ptr.
585    fn add_compiler_used_global(&self, global: &'ll Value) {
586        self.compiler_used_statics.borrow_mut().push(global);
587    }
588}