rustc_codegen_llvm/consts.rs
1use std::ops::Range;
2
3use rustc_abi::{Align, HasDataLayout, Primitive, Scalar, Size, WrappingRange};
4use rustc_codegen_ssa::common;
5use rustc_codegen_ssa::traits::*;
6use rustc_hir::LangItem;
7use rustc_hir::def::DefKind;
8use rustc_hir::def_id::DefId;
9use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
10use rustc_middle::mir::interpret::{
11 Allocation, ConstAllocation, ErrorHandled, InitChunk, Pointer, Scalar as InterpScalar,
12 read_target_uint,
13};
14use rustc_middle::mir::mono::{Linkage, MonoItem};
15use rustc_middle::ty::layout::{HasTypingEnv, LayoutOf};
16use rustc_middle::ty::{self, Instance};
17use rustc_middle::{bug, span_bug};
18use tracing::{debug, instrument, trace};
19
20use crate::common::{AsCCharPtr, CodegenCx};
21use crate::errors::SymbolAlreadyDefined;
22use crate::llvm::{self, True};
23use crate::type_::Type;
24use crate::type_of::LayoutLlvmExt;
25use crate::value::Value;
26use crate::{base, debuginfo};
27
28pub(crate) fn const_alloc_to_llvm<'ll>(
29 cx: &CodegenCx<'ll, '_>,
30 alloc: ConstAllocation<'_>,
31 is_static: bool,
32) -> &'ll Value {
33 let alloc = alloc.inner();
34 // We expect that callers of const_alloc_to_llvm will instead directly codegen a pointer or
35 // integer for any &ZST where the ZST is a constant (i.e. not a static). We should never be
36 // producing empty LLVM allocations as they're just adding noise to binaries and forcing less
37 // optimal codegen.
38 //
39 // Statics have a guaranteed meaningful address so it's less clear that we want to do
40 // something like this; it's also harder.
41 if !is_static {
42 assert!(alloc.len() != 0);
43 }
44 let mut llvals = Vec::with_capacity(alloc.provenance().ptrs().len() + 1);
45 let dl = cx.data_layout();
46 let pointer_size = dl.pointer_size();
47 let pointer_size_bytes = pointer_size.bytes() as usize;
48
49 // Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`, so `range`
50 // must be within the bounds of `alloc` and not contain or overlap a pointer provenance.
51 fn append_chunks_of_init_and_uninit_bytes<'ll, 'a, 'b>(
52 llvals: &mut Vec<&'ll Value>,
53 cx: &'a CodegenCx<'ll, 'b>,
54 alloc: &'a Allocation,
55 range: Range<usize>,
56 ) {
57 let chunks = alloc.init_mask().range_as_init_chunks(range.clone().into());
58
59 let chunk_to_llval = move |chunk| match chunk {
60 InitChunk::Init(range) => {
61 let range = (range.start.bytes() as usize)..(range.end.bytes() as usize);
62 let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
63 cx.const_bytes(bytes)
64 }
65 InitChunk::Uninit(range) => {
66 let len = range.end.bytes() - range.start.bytes();
67 cx.const_undef(cx.type_array(cx.type_i8(), len))
68 }
69 };
70
71 // Generating partially-uninit consts is limited to small numbers of chunks,
72 // to avoid the cost of generating large complex const expressions.
73 // For example, `[(u32, u8); 1024 * 1024]` contains uninit padding in each element, and
74 // would result in `{ [5 x i8] zeroinitializer, [3 x i8] undef, ...repeat 1M times... }`.
75 let max = cx.sess().opts.unstable_opts.uninit_const_chunk_threshold;
76 let allow_uninit_chunks = chunks.clone().take(max.saturating_add(1)).count() <= max;
77
78 if allow_uninit_chunks {
79 llvals.extend(chunks.map(chunk_to_llval));
80 } else {
81 // If this allocation contains any uninit bytes, codegen as if it was initialized
82 // (using some arbitrary value for uninit bytes).
83 let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
84 llvals.push(cx.const_bytes(bytes));
85 }
86 }
87
88 let mut next_offset = 0;
89 for &(offset, prov) in alloc.provenance().ptrs().iter() {
90 let offset = offset.bytes();
91 assert_eq!(offset as usize as u64, offset);
92 let offset = offset as usize;
93 if offset > next_offset {
94 // This `inspect` is okay since we have checked that there is no provenance, it
95 // is within the bounds of the allocation, and it doesn't affect interpreter execution
96 // (we inspect the result after interpreter execution).
97 append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, next_offset..offset);
98 }
99 let ptr_offset = read_target_uint(
100 dl.endian,
101 // This `inspect` is okay since it is within the bounds of the allocation, it doesn't
102 // affect interpreter execution (we inspect the result after interpreter execution),
103 // and we properly interpret the provenance as a relocation pointer offset.
104 alloc.inspect_with_uninit_and_ptr_outside_interpreter(
105 offset..(offset + pointer_size_bytes),
106 ),
107 )
108 .expect("const_alloc_to_llvm: could not read relocation pointer")
109 as u64;
110
111 let address_space = cx.tcx.global_alloc(prov.alloc_id()).address_space(cx);
112
113 llvals.push(cx.scalar_to_backend(
114 InterpScalar::from_pointer(Pointer::new(prov, Size::from_bytes(ptr_offset)), &cx.tcx),
115 Scalar::Initialized {
116 value: Primitive::Pointer(address_space),
117 valid_range: WrappingRange::full(pointer_size),
118 },
119 cx.type_ptr_ext(address_space),
120 ));
121 next_offset = offset + pointer_size_bytes;
122 }
123 if alloc.len() >= next_offset {
124 let range = next_offset..alloc.len();
125 // This `inspect` is okay since we have check that it is after all provenance, it is
126 // within the bounds of the allocation, and it doesn't affect interpreter execution (we
127 // inspect the result after interpreter execution).
128 append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range);
129 }
130
131 // Avoid wrapping in a struct if there is only a single value. This ensures
132 // that LLVM is able to perform the string merging optimization if the constant
133 // is a valid C string. LLVM only considers bare arrays for this optimization,
134 // not arrays wrapped in a struct. LLVM handles this at:
135 // https://github.com/rust-lang/llvm-project/blob/acaea3d2bb8f351b740db7ebce7d7a40b9e21488/llvm/lib/Target/TargetLoweringObjectFile.cpp#L249-L280
136 if let &[data] = &*llvals { data } else { cx.const_struct(&llvals, true) }
137}
138
139fn codegen_static_initializer<'ll, 'tcx>(
140 cx: &CodegenCx<'ll, 'tcx>,
141 def_id: DefId,
142) -> Result<(&'ll Value, ConstAllocation<'tcx>), ErrorHandled> {
143 let alloc = cx.tcx.eval_static_initializer(def_id)?;
144 Ok((const_alloc_to_llvm(cx, alloc, /*static*/ true), alloc))
145}
146
147fn set_global_alignment<'ll>(cx: &CodegenCx<'ll, '_>, gv: &'ll Value, mut align: Align) {
148 // The target may require greater alignment for globals than the type does.
149 // Note: GCC and Clang also allow `__attribute__((aligned))` on variables,
150 // which can force it to be smaller. Rust doesn't support this yet.
151 if let Some(min_global) = cx.sess().target.min_global_align {
152 align = Ord::max(align, min_global);
153 }
154 llvm::set_alignment(gv, align);
155}
156
157fn check_and_apply_linkage<'ll, 'tcx>(
158 cx: &CodegenCx<'ll, 'tcx>,
159 attrs: &CodegenFnAttrs,
160 llty: &'ll Type,
161 sym: &str,
162 def_id: DefId,
163) -> &'ll Value {
164 if let Some(linkage) = attrs.import_linkage {
165 debug!("get_static: sym={} linkage={:?}", sym, linkage);
166
167 // Declare a symbol `foo`. If `foo` is an extern_weak symbol, we declare
168 // an extern_weak function, otherwise a global with the desired linkage.
169 let g1 = if matches!(attrs.import_linkage, Some(Linkage::ExternalWeak)) {
170 // An `extern_weak` function is represented as an `Option<unsafe extern ...>`,
171 // we extract the function signature and declare it as an extern_weak function
172 // instead of an extern_weak i8.
173 let instance = Instance::mono(cx.tcx, def_id);
174 if let ty::Adt(struct_def, args) = instance.ty(cx.tcx, cx.typing_env()).kind()
175 && cx.tcx.is_lang_item(struct_def.did(), LangItem::Option)
176 && let ty::FnPtr(sig, header) = args.type_at(0).kind()
177 {
178 let fn_sig = sig.with(*header);
179
180 let fn_abi = cx.fn_abi_of_fn_ptr(fn_sig, ty::List::empty());
181 cx.declare_fn(sym, &fn_abi, None)
182 } else {
183 cx.declare_global(sym, cx.type_i8())
184 }
185 } else {
186 cx.declare_global(sym, cx.type_i8())
187 };
188 llvm::set_linkage(g1, base::linkage_to_llvm(linkage));
189
190 // Declare an internal global `extern_with_linkage_foo` which
191 // is initialized with the address of `foo`. If `foo` is
192 // discarded during linking (for example, if `foo` has weak
193 // linkage and there are no definitions), then
194 // `extern_with_linkage_foo` will instead be initialized to
195 // zero.
196 let mut real_name = "_rust_extern_with_linkage_".to_string();
197 real_name.push_str(sym);
198 let g2 = cx.define_global(&real_name, llty).unwrap_or_else(|| {
199 cx.sess().dcx().emit_fatal(SymbolAlreadyDefined {
200 span: cx.tcx.def_span(def_id),
201 symbol_name: sym,
202 })
203 });
204 llvm::set_linkage(g2, llvm::Linkage::InternalLinkage);
205 llvm::set_initializer(g2, g1);
206 g2
207 } else if cx.tcx.sess.target.arch == "x86"
208 && common::is_mingw_gnu_toolchain(&cx.tcx.sess.target)
209 && let Some(dllimport) = crate::common::get_dllimport(cx.tcx, def_id, sym)
210 {
211 cx.declare_global(&common::i686_decorated_name(dllimport, true, true, false), llty)
212 } else {
213 // Generate an external declaration.
214 // FIXME(nagisa): investigate whether it can be changed into define_global
215 cx.declare_global(sym, llty)
216 }
217}
218
219impl<'ll> CodegenCx<'ll, '_> {
220 pub(crate) fn const_bitcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
221 unsafe { llvm::LLVMConstBitCast(val, ty) }
222 }
223
224 pub(crate) fn const_pointercast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
225 unsafe { llvm::LLVMConstPointerCast(val, ty) }
226 }
227
228 /// Create a global variable.
229 ///
230 /// The returned global variable is a pointer in the default address space for globals.
231 /// Fails if a symbol with the given name already exists.
232 pub(crate) fn static_addr_of_mut(
233 &self,
234 cv: &'ll Value,
235 align: Align,
236 kind: Option<&str>,
237 ) -> &'ll Value {
238 let gv = match kind {
239 Some(kind) if !self.tcx.sess.fewer_names() => {
240 let name = self.generate_local_symbol_name(kind);
241 let gv = self.define_global(&name, self.val_ty(cv)).unwrap_or_else(|| {
242 bug!("symbol `{}` is already defined", name);
243 });
244 llvm::set_linkage(gv, llvm::Linkage::PrivateLinkage);
245 gv
246 }
247 _ => self.define_private_global(self.val_ty(cv)),
248 };
249 llvm::set_initializer(gv, cv);
250 set_global_alignment(self, gv, align);
251 llvm::SetUnnamedAddress(gv, llvm::UnnamedAddr::Global);
252 gv
253 }
254
255 /// Create a global constant.
256 ///
257 /// The returned global variable is a pointer in the default address space for globals.
258 pub(crate) fn static_addr_of_impl(
259 &self,
260 cv: &'ll Value,
261 align: Align,
262 kind: Option<&str>,
263 ) -> &'ll Value {
264 if let Some(&gv) = self.const_globals.borrow().get(&cv) {
265 unsafe {
266 // Upgrade the alignment in cases where the same constant is used with different
267 // alignment requirements
268 let llalign = align.bytes() as u32;
269 if llalign > llvm::LLVMGetAlignment(gv) {
270 llvm::LLVMSetAlignment(gv, llalign);
271 }
272 }
273 return gv;
274 }
275 let gv = self.static_addr_of_mut(cv, align, kind);
276 unsafe {
277 llvm::LLVMSetGlobalConstant(gv, True);
278 }
279 self.const_globals.borrow_mut().insert(cv, gv);
280 gv
281 }
282
283 #[instrument(level = "debug", skip(self))]
284 pub(crate) fn get_static(&self, def_id: DefId) -> &'ll Value {
285 let instance = Instance::mono(self.tcx, def_id);
286 trace!(?instance);
287
288 let DefKind::Static { nested, .. } = self.tcx.def_kind(def_id) else { bug!() };
289 // Nested statics do not have a type, so pick a dummy type and let `codegen_static` figure
290 // out the llvm type from the actual evaluated initializer.
291 let llty = if nested {
292 self.type_i8()
293 } else {
294 let ty = instance.ty(self.tcx, self.typing_env());
295 trace!(?ty);
296 self.layout_of(ty).llvm_type(self)
297 };
298 self.get_static_inner(def_id, llty)
299 }
300
301 #[instrument(level = "debug", skip(self, llty))]
302 fn get_static_inner(&self, def_id: DefId, llty: &'ll Type) -> &'ll Value {
303 let instance = Instance::mono(self.tcx, def_id);
304 if let Some(&g) = self.instances.borrow().get(&instance) {
305 trace!("used cached value");
306 return g;
307 }
308
309 let defined_in_current_codegen_unit =
310 self.codegen_unit.items().contains_key(&MonoItem::Static(def_id));
311 assert!(
312 !defined_in_current_codegen_unit,
313 "consts::get_static() should always hit the cache for \
314 statics defined in the same CGU, but did not for `{def_id:?}`"
315 );
316
317 let sym = self.tcx.symbol_name(instance).name;
318 let fn_attrs = self.tcx.codegen_fn_attrs(def_id);
319
320 debug!(?sym, ?fn_attrs);
321
322 let g = if def_id.is_local() && !self.tcx.is_foreign_item(def_id) {
323 if let Some(g) = self.get_declared_value(sym) {
324 if self.val_ty(g) != self.type_ptr() {
325 span_bug!(self.tcx.def_span(def_id), "Conflicting types for static");
326 }
327 }
328
329 let g = self.declare_global(sym, llty);
330
331 if !self.tcx.is_reachable_non_generic(def_id) {
332 llvm::set_visibility(g, llvm::Visibility::Hidden);
333 }
334
335 g
336 } else {
337 check_and_apply_linkage(self, fn_attrs, llty, sym, def_id)
338 };
339
340 // Thread-local statics in some other crate need to *always* be linked
341 // against in a thread-local fashion, so we need to be sure to apply the
342 // thread-local attribute locally if it was present remotely. If we
343 // don't do this then linker errors can be generated where the linker
344 // complains that one object files has a thread local version of the
345 // symbol and another one doesn't.
346 if fn_attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) {
347 llvm::set_thread_local_mode(g, self.tls_model);
348 }
349
350 let dso_local = self.assume_dso_local(g, true);
351
352 if !def_id.is_local() {
353 let needs_dll_storage_attr = self.use_dll_storage_attrs
354 && !self.tcx.is_foreign_item(def_id)
355 // Local definitions can never be imported, so we must not apply
356 // the DLLImport annotation.
357 && !dso_local
358 // Linker plugin ThinLTO doesn't create the self-dllimport Rust uses for rlibs
359 // as the code generation happens out of process. Instead we assume static linkage
360 // and disallow dynamic linking when linker plugin based LTO is enabled.
361 // Regular in-process ThinLTO doesn't need this workaround.
362 && !self.tcx.sess.opts.cg.linker_plugin_lto.enabled();
363
364 // If this assertion triggers, there's something wrong with commandline
365 // argument validation.
366 assert!(
367 !(self.tcx.sess.opts.cg.linker_plugin_lto.enabled()
368 && self.tcx.sess.target.is_like_windows
369 && self.tcx.sess.opts.cg.prefer_dynamic)
370 );
371
372 if needs_dll_storage_attr {
373 // This item is external but not foreign, i.e., it originates from an external Rust
374 // crate. Since we don't know whether this crate will be linked dynamically or
375 // statically in the final application, we always mark such symbols as 'dllimport'.
376 // If final linkage happens to be static, we rely on compiler-emitted __imp_ stubs
377 // to make things work.
378 //
379 // However, in some scenarios we defer emission of statics to downstream
380 // crates, so there are cases where a static with an upstream DefId
381 // is actually present in the current crate. We can find out via the
382 // is_codegened_item query.
383 if !self.tcx.is_codegened_item(def_id) {
384 llvm::set_dllimport_storage_class(g);
385 }
386 }
387 }
388
389 if self.use_dll_storage_attrs
390 && let Some(library) = self.tcx.native_library(def_id)
391 && library.kind.is_dllimport()
392 {
393 // For foreign (native) libs we know the exact storage type to use.
394 llvm::set_dllimport_storage_class(g);
395 }
396
397 self.instances.borrow_mut().insert(instance, g);
398 g
399 }
400
401 fn codegen_static_item(&mut self, def_id: DefId) {
402 unsafe {
403 assert!(
404 llvm::LLVMGetInitializer(
405 self.instances.borrow().get(&Instance::mono(self.tcx, def_id)).unwrap()
406 )
407 .is_none()
408 );
409 let attrs = self.tcx.codegen_fn_attrs(def_id);
410
411 let Ok((v, alloc)) = codegen_static_initializer(self, def_id) else {
412 // Error has already been reported
413 return;
414 };
415 let alloc = alloc.inner();
416
417 let val_llty = self.val_ty(v);
418
419 let g = self.get_static_inner(def_id, val_llty);
420 let llty = self.get_type_of_global(g);
421
422 let g = if val_llty == llty {
423 g
424 } else {
425 // codegen_static_initializer creates the global value just from the
426 // `Allocation` data by generating one big struct value that is just
427 // all the bytes and pointers after each other. This will almost never
428 // match the type that the static was declared with. Unfortunately
429 // we can't just LLVMConstBitCast our way out of it because that has very
430 // specific rules on what can be cast. So instead of adding a new way to
431 // generate static initializers that match the static's type, we picked
432 // the easier option and retroactively change the type of the static item itself.
433 let name = llvm::get_value_name(g).to_vec();
434 llvm::set_value_name(g, b"");
435
436 let linkage = llvm::get_linkage(g);
437 let visibility = llvm::get_visibility(g);
438
439 let new_g = llvm::LLVMRustGetOrInsertGlobal(
440 self.llmod,
441 name.as_c_char_ptr(),
442 name.len(),
443 val_llty,
444 );
445
446 llvm::set_linkage(new_g, linkage);
447 llvm::set_visibility(new_g, visibility);
448
449 // The old global has had its name removed but is returned by
450 // get_static since it is in the instance cache. Provide an
451 // alternative lookup that points to the new global so that
452 // global_asm! can compute the correct mangled symbol name
453 // for the global.
454 self.renamed_statics.borrow_mut().insert(def_id, new_g);
455
456 // To avoid breaking any invariants, we leave around the old
457 // global for the moment; we'll replace all references to it
458 // with the new global later. (See base::codegen_backend.)
459 self.statics_to_rauw.borrow_mut().push((g, new_g));
460 new_g
461 };
462 set_global_alignment(self, g, alloc.align);
463 llvm::set_initializer(g, v);
464
465 self.assume_dso_local(g, true);
466
467 // Forward the allocation's mutability (picked by the const interner) to LLVM.
468 if alloc.mutability.is_not() {
469 llvm::LLVMSetGlobalConstant(g, llvm::True);
470 }
471
472 debuginfo::build_global_var_di_node(self, def_id, g);
473
474 if attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) {
475 llvm::set_thread_local_mode(g, self.tls_model);
476 }
477
478 // Wasm statics with custom link sections get special treatment as they
479 // go into custom sections of the wasm executable. The exception to this
480 // is the `.init_array` section which are treated specially by the wasm linker.
481 if self.tcx.sess.target.is_like_wasm
482 && attrs
483 .link_section
484 .map(|link_section| !link_section.as_str().starts_with(".init_array"))
485 .unwrap_or(true)
486 {
487 if let Some(section) = attrs.link_section {
488 let section = llvm::LLVMMDStringInContext2(
489 self.llcx,
490 section.as_str().as_c_char_ptr(),
491 section.as_str().len(),
492 );
493 assert!(alloc.provenance().ptrs().is_empty());
494
495 // The `inspect` method is okay here because we checked for provenance, and
496 // because we are doing this access to inspect the final interpreter state (not
497 // as part of the interpreter execution).
498 let bytes =
499 alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len());
500 let alloc =
501 llvm::LLVMMDStringInContext2(self.llcx, bytes.as_c_char_ptr(), bytes.len());
502 let data = [section, alloc];
503 let meta = llvm::LLVMMDNodeInContext2(self.llcx, data.as_ptr(), data.len());
504 let val = self.get_metadata_value(meta);
505 llvm::LLVMAddNamedMetadataOperand(
506 self.llmod,
507 c"wasm.custom_sections".as_ptr(),
508 val,
509 );
510 }
511 } else {
512 base::set_link_section(g, attrs);
513 }
514
515 base::set_variable_sanitizer_attrs(g, attrs);
516
517 if attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER) {
518 // `USED` and `USED_LINKER` can't be used together.
519 assert!(!attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER));
520
521 // The semantics of #[used] in Rust only require the symbol to make it into the
522 // object file. It is explicitly allowed for the linker to strip the symbol if it
523 // is dead, which means we are allowed to use `llvm.compiler.used` instead of
524 // `llvm.used` here.
525 //
526 // Additionally, https://reviews.llvm.org/D97448 in LLVM 13 started emitting unique
527 // sections with SHF_GNU_RETAIN flag for llvm.used symbols, which may trigger bugs
528 // in the handling of `.init_array` (the static constructor list) in versions of
529 // the gold linker (prior to the one released with binutils 2.36).
530 //
531 // That said, we only ever emit these when `#[used(compiler)]` is explicitly
532 // requested. This is to avoid similar breakage on other targets, in particular
533 // MachO targets have *their* static constructor lists broken if `llvm.compiler.used`
534 // is emitted rather than `llvm.used`. However, that check happens when assigning
535 // the `CodegenFnAttrFlags` in the `codegen_fn_attrs` query, so we don't need to
536 // take care of it here.
537 self.add_compiler_used_global(g);
538 }
539 if attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER) {
540 // `USED` and `USED_LINKER` can't be used together.
541 assert!(!attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER));
542
543 self.add_used_global(g);
544 }
545 }
546 }
547
548 /// Add a global value to a list to be stored in the `llvm.used` variable, an array of ptr.
549 pub(crate) fn add_used_global(&mut self, global: &'ll Value) {
550 self.used_statics.push(global);
551 }
552
553 /// Add a global value to a list to be stored in the `llvm.compiler.used` variable,
554 /// an array of ptr.
555 pub(crate) fn add_compiler_used_global(&mut self, global: &'ll Value) {
556 self.compiler_used_statics.push(global);
557 }
558}
559
560impl<'ll> StaticCodegenMethods for CodegenCx<'ll, '_> {
561 /// Get a pointer to a global variable.
562 ///
563 /// The pointer will always be in the default address space. If global variables default to a
564 /// different address space, an addrspacecast is inserted.
565 fn static_addr_of(&self, cv: &'ll Value, align: Align, kind: Option<&str>) -> &'ll Value {
566 let gv = self.static_addr_of_impl(cv, align, kind);
567 // static_addr_of_impl returns the bare global variable, which might not be in the default
568 // address space. Cast to the default address space if necessary.
569 self.const_pointercast(gv, self.type_ptr())
570 }
571
572 fn codegen_static(&mut self, def_id: DefId) {
573 self.codegen_static_item(def_id)
574 }
575}