rustc_codegen_llvm/consts.rs
1use std::ops::Range;
2
3use rustc_abi::{Align, HasDataLayout, Primitive, Scalar, Size, WrappingRange};
4use rustc_codegen_ssa::common;
5use rustc_codegen_ssa::traits::*;
6use rustc_hir::LangItem;
7use rustc_hir::def::DefKind;
8use rustc_hir::def_id::DefId;
9use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
10use rustc_middle::mir::interpret::{
11 Allocation, ConstAllocation, ErrorHandled, InitChunk, Pointer, Scalar as InterpScalar,
12 read_target_uint,
13};
14use rustc_middle::mir::mono::{Linkage, MonoItem};
15use rustc_middle::ty::layout::{HasTypingEnv, LayoutOf};
16use rustc_middle::ty::{self, Instance};
17use rustc_middle::{bug, span_bug};
18use tracing::{debug, instrument, trace};
19
20use crate::common::{AsCCharPtr, CodegenCx};
21use crate::errors::SymbolAlreadyDefined;
22use crate::llvm::{self, True};
23use crate::type_::Type;
24use crate::type_of::LayoutLlvmExt;
25use crate::value::Value;
26use crate::{base, debuginfo};
27
28pub(crate) fn const_alloc_to_llvm<'ll>(
29 cx: &CodegenCx<'ll, '_>,
30 alloc: ConstAllocation<'_>,
31 is_static: bool,
32) -> &'ll Value {
33 let alloc = alloc.inner();
34 // We expect that callers of const_alloc_to_llvm will instead directly codegen a pointer or
35 // integer for any &ZST where the ZST is a constant (i.e. not a static). We should never be
36 // producing empty LLVM allocations as they're just adding noise to binaries and forcing less
37 // optimal codegen.
38 //
39 // Statics have a guaranteed meaningful address so it's less clear that we want to do
40 // something like this; it's also harder.
41 if !is_static {
42 assert!(alloc.len() != 0);
43 }
44 let mut llvals = Vec::with_capacity(alloc.provenance().ptrs().len() + 1);
45 let dl = cx.data_layout();
46 let pointer_size = dl.pointer_size.bytes() as usize;
47
48 // Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`, so `range`
49 // must be within the bounds of `alloc` and not contain or overlap a pointer provenance.
50 fn append_chunks_of_init_and_uninit_bytes<'ll, 'a, 'b>(
51 llvals: &mut Vec<&'ll Value>,
52 cx: &'a CodegenCx<'ll, 'b>,
53 alloc: &'a Allocation,
54 range: Range<usize>,
55 ) {
56 let chunks = alloc.init_mask().range_as_init_chunks(range.clone().into());
57
58 let chunk_to_llval = move |chunk| match chunk {
59 InitChunk::Init(range) => {
60 let range = (range.start.bytes() as usize)..(range.end.bytes() as usize);
61 let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
62 cx.const_bytes(bytes)
63 }
64 InitChunk::Uninit(range) => {
65 let len = range.end.bytes() - range.start.bytes();
66 cx.const_undef(cx.type_array(cx.type_i8(), len))
67 }
68 };
69
70 // Generating partially-uninit consts is limited to small numbers of chunks,
71 // to avoid the cost of generating large complex const expressions.
72 // For example, `[(u32, u8); 1024 * 1024]` contains uninit padding in each element, and
73 // would result in `{ [5 x i8] zeroinitializer, [3 x i8] undef, ...repeat 1M times... }`.
74 let max = cx.sess().opts.unstable_opts.uninit_const_chunk_threshold;
75 let allow_uninit_chunks = chunks.clone().take(max.saturating_add(1)).count() <= max;
76
77 if allow_uninit_chunks {
78 llvals.extend(chunks.map(chunk_to_llval));
79 } else {
80 // If this allocation contains any uninit bytes, codegen as if it was initialized
81 // (using some arbitrary value for uninit bytes).
82 let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
83 llvals.push(cx.const_bytes(bytes));
84 }
85 }
86
87 let mut next_offset = 0;
88 for &(offset, prov) in alloc.provenance().ptrs().iter() {
89 let offset = offset.bytes();
90 assert_eq!(offset as usize as u64, offset);
91 let offset = offset as usize;
92 if offset > next_offset {
93 // This `inspect` is okay since we have checked that there is no provenance, it
94 // is within the bounds of the allocation, and it doesn't affect interpreter execution
95 // (we inspect the result after interpreter execution).
96 append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, next_offset..offset);
97 }
98 let ptr_offset = read_target_uint(
99 dl.endian,
100 // This `inspect` is okay since it is within the bounds of the allocation, it doesn't
101 // affect interpreter execution (we inspect the result after interpreter execution),
102 // and we properly interpret the provenance as a relocation pointer offset.
103 alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)),
104 )
105 .expect("const_alloc_to_llvm: could not read relocation pointer")
106 as u64;
107
108 let address_space = cx.tcx.global_alloc(prov.alloc_id()).address_space(cx);
109
110 llvals.push(cx.scalar_to_backend(
111 InterpScalar::from_pointer(Pointer::new(prov, Size::from_bytes(ptr_offset)), &cx.tcx),
112 Scalar::Initialized {
113 value: Primitive::Pointer(address_space),
114 valid_range: WrappingRange::full(dl.pointer_size),
115 },
116 cx.type_ptr_ext(address_space),
117 ));
118 next_offset = offset + pointer_size;
119 }
120 if alloc.len() >= next_offset {
121 let range = next_offset..alloc.len();
122 // This `inspect` is okay since we have check that it is after all provenance, it is
123 // within the bounds of the allocation, and it doesn't affect interpreter execution (we
124 // inspect the result after interpreter execution).
125 append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range);
126 }
127
128 // Avoid wrapping in a struct if there is only a single value. This ensures
129 // that LLVM is able to perform the string merging optimization if the constant
130 // is a valid C string. LLVM only considers bare arrays for this optimization,
131 // not arrays wrapped in a struct. LLVM handles this at:
132 // https://github.com/rust-lang/llvm-project/blob/acaea3d2bb8f351b740db7ebce7d7a40b9e21488/llvm/lib/Target/TargetLoweringObjectFile.cpp#L249-L280
133 if let &[data] = &*llvals { data } else { cx.const_struct(&llvals, true) }
134}
135
136fn codegen_static_initializer<'ll, 'tcx>(
137 cx: &CodegenCx<'ll, 'tcx>,
138 def_id: DefId,
139) -> Result<(&'ll Value, ConstAllocation<'tcx>), ErrorHandled> {
140 let alloc = cx.tcx.eval_static_initializer(def_id)?;
141 Ok((const_alloc_to_llvm(cx, alloc, /*static*/ true), alloc))
142}
143
144fn set_global_alignment<'ll>(cx: &CodegenCx<'ll, '_>, gv: &'ll Value, mut align: Align) {
145 // The target may require greater alignment for globals than the type does.
146 // Note: GCC and Clang also allow `__attribute__((aligned))` on variables,
147 // which can force it to be smaller. Rust doesn't support this yet.
148 if let Some(min_global) = cx.sess().target.min_global_align {
149 align = Ord::max(align, min_global);
150 }
151 llvm::set_alignment(gv, align);
152}
153
154fn check_and_apply_linkage<'ll, 'tcx>(
155 cx: &CodegenCx<'ll, 'tcx>,
156 attrs: &CodegenFnAttrs,
157 llty: &'ll Type,
158 sym: &str,
159 def_id: DefId,
160) -> &'ll Value {
161 if let Some(linkage) = attrs.import_linkage {
162 debug!("get_static: sym={} linkage={:?}", sym, linkage);
163
164 // Declare a symbol `foo`. If `foo` is an extern_weak symbol, we declare
165 // an extern_weak function, otherwise a global with the desired linkage.
166 let g1 = if matches!(attrs.import_linkage, Some(Linkage::ExternalWeak)) {
167 // An `extern_weak` function is represented as an `Option<unsafe extern ...>`,
168 // we extract the function signature and declare it as an extern_weak function
169 // instead of an extern_weak i8.
170 let instance = Instance::mono(cx.tcx, def_id);
171 if let ty::Adt(struct_def, args) = instance.ty(cx.tcx, cx.typing_env()).kind()
172 && cx.tcx.is_lang_item(struct_def.did(), LangItem::Option)
173 && let ty::FnPtr(sig, header) = args.type_at(0).kind()
174 {
175 let fn_sig = sig.with(*header);
176
177 let fn_abi = cx.fn_abi_of_fn_ptr(fn_sig, ty::List::empty());
178 cx.declare_fn(sym, &fn_abi, None)
179 } else {
180 cx.declare_global(sym, cx.type_i8())
181 }
182 } else {
183 cx.declare_global(sym, cx.type_i8())
184 };
185 llvm::set_linkage(g1, base::linkage_to_llvm(linkage));
186
187 // Declare an internal global `extern_with_linkage_foo` which
188 // is initialized with the address of `foo`. If `foo` is
189 // discarded during linking (for example, if `foo` has weak
190 // linkage and there are no definitions), then
191 // `extern_with_linkage_foo` will instead be initialized to
192 // zero.
193 let mut real_name = "_rust_extern_with_linkage_".to_string();
194 real_name.push_str(sym);
195 let g2 = cx.define_global(&real_name, llty).unwrap_or_else(|| {
196 cx.sess().dcx().emit_fatal(SymbolAlreadyDefined {
197 span: cx.tcx.def_span(def_id),
198 symbol_name: sym,
199 })
200 });
201 llvm::set_linkage(g2, llvm::Linkage::InternalLinkage);
202 llvm::set_initializer(g2, g1);
203 g2
204 } else if cx.tcx.sess.target.arch == "x86"
205 && common::is_mingw_gnu_toolchain(&cx.tcx.sess.target)
206 && let Some(dllimport) = crate::common::get_dllimport(cx.tcx, def_id, sym)
207 {
208 cx.declare_global(&common::i686_decorated_name(dllimport, true, true, false), llty)
209 } else {
210 // Generate an external declaration.
211 // FIXME(nagisa): investigate whether it can be changed into define_global
212 cx.declare_global(sym, llty)
213 }
214}
215
216impl<'ll> CodegenCx<'ll, '_> {
217 pub(crate) fn const_bitcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
218 unsafe { llvm::LLVMConstBitCast(val, ty) }
219 }
220
221 pub(crate) fn const_pointercast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
222 unsafe { llvm::LLVMConstPointerCast(val, ty) }
223 }
224
225 /// Create a global variable.
226 ///
227 /// The returned global variable is a pointer in the default address space for globals.
228 /// Fails if a symbol with the given name already exists.
229 pub(crate) fn static_addr_of_mut(
230 &self,
231 cv: &'ll Value,
232 align: Align,
233 kind: Option<&str>,
234 ) -> &'ll Value {
235 let gv = match kind {
236 Some(kind) if !self.tcx.sess.fewer_names() => {
237 let name = self.generate_local_symbol_name(kind);
238 let gv = self.define_global(&name, self.val_ty(cv)).unwrap_or_else(|| {
239 bug!("symbol `{}` is already defined", name);
240 });
241 llvm::set_linkage(gv, llvm::Linkage::PrivateLinkage);
242 gv
243 }
244 _ => self.define_private_global(self.val_ty(cv)),
245 };
246 llvm::set_initializer(gv, cv);
247 set_global_alignment(self, gv, align);
248 llvm::SetUnnamedAddress(gv, llvm::UnnamedAddr::Global);
249 gv
250 }
251
252 /// Create a global constant.
253 ///
254 /// The returned global variable is a pointer in the default address space for globals.
255 pub(crate) fn static_addr_of_impl(
256 &self,
257 cv: &'ll Value,
258 align: Align,
259 kind: Option<&str>,
260 ) -> &'ll Value {
261 if let Some(&gv) = self.const_globals.borrow().get(&cv) {
262 unsafe {
263 // Upgrade the alignment in cases where the same constant is used with different
264 // alignment requirements
265 let llalign = align.bytes() as u32;
266 if llalign > llvm::LLVMGetAlignment(gv) {
267 llvm::LLVMSetAlignment(gv, llalign);
268 }
269 }
270 return gv;
271 }
272 let gv = self.static_addr_of_mut(cv, align, kind);
273 unsafe {
274 llvm::LLVMSetGlobalConstant(gv, True);
275 }
276 self.const_globals.borrow_mut().insert(cv, gv);
277 gv
278 }
279
280 #[instrument(level = "debug", skip(self))]
281 pub(crate) fn get_static(&self, def_id: DefId) -> &'ll Value {
282 let instance = Instance::mono(self.tcx, def_id);
283 trace!(?instance);
284
285 let DefKind::Static { nested, .. } = self.tcx.def_kind(def_id) else { bug!() };
286 // Nested statics do not have a type, so pick a dummy type and let `codegen_static` figure
287 // out the llvm type from the actual evaluated initializer.
288 let llty = if nested {
289 self.type_i8()
290 } else {
291 let ty = instance.ty(self.tcx, self.typing_env());
292 trace!(?ty);
293 self.layout_of(ty).llvm_type(self)
294 };
295 self.get_static_inner(def_id, llty)
296 }
297
298 #[instrument(level = "debug", skip(self, llty))]
299 fn get_static_inner(&self, def_id: DefId, llty: &'ll Type) -> &'ll Value {
300 let instance = Instance::mono(self.tcx, def_id);
301 if let Some(&g) = self.instances.borrow().get(&instance) {
302 trace!("used cached value");
303 return g;
304 }
305
306 let defined_in_current_codegen_unit =
307 self.codegen_unit.items().contains_key(&MonoItem::Static(def_id));
308 assert!(
309 !defined_in_current_codegen_unit,
310 "consts::get_static() should always hit the cache for \
311 statics defined in the same CGU, but did not for `{def_id:?}`"
312 );
313
314 let sym = self.tcx.symbol_name(instance).name;
315 let fn_attrs = self.tcx.codegen_fn_attrs(def_id);
316
317 debug!(?sym, ?fn_attrs);
318
319 let g = if def_id.is_local() && !self.tcx.is_foreign_item(def_id) {
320 if let Some(g) = self.get_declared_value(sym) {
321 if self.val_ty(g) != self.type_ptr() {
322 span_bug!(self.tcx.def_span(def_id), "Conflicting types for static");
323 }
324 }
325
326 let g = self.declare_global(sym, llty);
327
328 if !self.tcx.is_reachable_non_generic(def_id) {
329 llvm::set_visibility(g, llvm::Visibility::Hidden);
330 }
331
332 g
333 } else {
334 check_and_apply_linkage(self, fn_attrs, llty, sym, def_id)
335 };
336
337 // Thread-local statics in some other crate need to *always* be linked
338 // against in a thread-local fashion, so we need to be sure to apply the
339 // thread-local attribute locally if it was present remotely. If we
340 // don't do this then linker errors can be generated where the linker
341 // complains that one object files has a thread local version of the
342 // symbol and another one doesn't.
343 if fn_attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) {
344 llvm::set_thread_local_mode(g, self.tls_model);
345 }
346
347 let dso_local = self.assume_dso_local(g, true);
348
349 if !def_id.is_local() {
350 let needs_dll_storage_attr = self.use_dll_storage_attrs
351 && !self.tcx.is_foreign_item(def_id)
352 // Local definitions can never be imported, so we must not apply
353 // the DLLImport annotation.
354 && !dso_local
355 // Linker plugin ThinLTO doesn't create the self-dllimport Rust uses for rlibs
356 // as the code generation happens out of process. Instead we assume static linkage
357 // and disallow dynamic linking when linker plugin based LTO is enabled.
358 // Regular in-process ThinLTO doesn't need this workaround.
359 && !self.tcx.sess.opts.cg.linker_plugin_lto.enabled();
360
361 // If this assertion triggers, there's something wrong with commandline
362 // argument validation.
363 assert!(
364 !(self.tcx.sess.opts.cg.linker_plugin_lto.enabled()
365 && self.tcx.sess.target.is_like_windows
366 && self.tcx.sess.opts.cg.prefer_dynamic)
367 );
368
369 if needs_dll_storage_attr {
370 // This item is external but not foreign, i.e., it originates from an external Rust
371 // crate. Since we don't know whether this crate will be linked dynamically or
372 // statically in the final application, we always mark such symbols as 'dllimport'.
373 // If final linkage happens to be static, we rely on compiler-emitted __imp_ stubs
374 // to make things work.
375 //
376 // However, in some scenarios we defer emission of statics to downstream
377 // crates, so there are cases where a static with an upstream DefId
378 // is actually present in the current crate. We can find out via the
379 // is_codegened_item query.
380 if !self.tcx.is_codegened_item(def_id) {
381 llvm::set_dllimport_storage_class(g);
382 }
383 }
384 }
385
386 if self.use_dll_storage_attrs
387 && let Some(library) = self.tcx.native_library(def_id)
388 && library.kind.is_dllimport()
389 {
390 // For foreign (native) libs we know the exact storage type to use.
391 llvm::set_dllimport_storage_class(g);
392 }
393
394 self.instances.borrow_mut().insert(instance, g);
395 g
396 }
397
398 fn codegen_static_item(&mut self, def_id: DefId) {
399 unsafe {
400 assert!(
401 llvm::LLVMGetInitializer(
402 self.instances.borrow().get(&Instance::mono(self.tcx, def_id)).unwrap()
403 )
404 .is_none()
405 );
406 let attrs = self.tcx.codegen_fn_attrs(def_id);
407
408 let Ok((v, alloc)) = codegen_static_initializer(self, def_id) else {
409 // Error has already been reported
410 return;
411 };
412 let alloc = alloc.inner();
413
414 let val_llty = self.val_ty(v);
415
416 let g = self.get_static_inner(def_id, val_llty);
417 let llty = self.get_type_of_global(g);
418
419 let g = if val_llty == llty {
420 g
421 } else {
422 // codegen_static_initializer creates the global value just from the
423 // `Allocation` data by generating one big struct value that is just
424 // all the bytes and pointers after each other. This will almost never
425 // match the type that the static was declared with. Unfortunately
426 // we can't just LLVMConstBitCast our way out of it because that has very
427 // specific rules on what can be cast. So instead of adding a new way to
428 // generate static initializers that match the static's type, we picked
429 // the easier option and retroactively change the type of the static item itself.
430 let name = llvm::get_value_name(g).to_vec();
431 llvm::set_value_name(g, b"");
432
433 let linkage = llvm::get_linkage(g);
434 let visibility = llvm::get_visibility(g);
435
436 let new_g = llvm::LLVMRustGetOrInsertGlobal(
437 self.llmod,
438 name.as_c_char_ptr(),
439 name.len(),
440 val_llty,
441 );
442
443 llvm::set_linkage(new_g, linkage);
444 llvm::set_visibility(new_g, visibility);
445
446 // The old global has had its name removed but is returned by
447 // get_static since it is in the instance cache. Provide an
448 // alternative lookup that points to the new global so that
449 // global_asm! can compute the correct mangled symbol name
450 // for the global.
451 self.renamed_statics.borrow_mut().insert(def_id, new_g);
452
453 // To avoid breaking any invariants, we leave around the old
454 // global for the moment; we'll replace all references to it
455 // with the new global later. (See base::codegen_backend.)
456 self.statics_to_rauw.borrow_mut().push((g, new_g));
457 new_g
458 };
459 set_global_alignment(self, g, alloc.align);
460 llvm::set_initializer(g, v);
461
462 self.assume_dso_local(g, true);
463
464 // Forward the allocation's mutability (picked by the const interner) to LLVM.
465 if alloc.mutability.is_not() {
466 llvm::LLVMSetGlobalConstant(g, llvm::True);
467 }
468
469 debuginfo::build_global_var_di_node(self, def_id, g);
470
471 if attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) {
472 llvm::set_thread_local_mode(g, self.tls_model);
473 }
474
475 // Wasm statics with custom link sections get special treatment as they
476 // go into custom sections of the wasm executable. The exception to this
477 // is the `.init_array` section which are treated specially by the wasm linker.
478 if self.tcx.sess.target.is_like_wasm
479 && attrs
480 .link_section
481 .map(|link_section| !link_section.as_str().starts_with(".init_array"))
482 .unwrap_or(true)
483 {
484 if let Some(section) = attrs.link_section {
485 let section = llvm::LLVMMDStringInContext2(
486 self.llcx,
487 section.as_str().as_c_char_ptr(),
488 section.as_str().len(),
489 );
490 assert!(alloc.provenance().ptrs().is_empty());
491
492 // The `inspect` method is okay here because we checked for provenance, and
493 // because we are doing this access to inspect the final interpreter state (not
494 // as part of the interpreter execution).
495 let bytes =
496 alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len());
497 let alloc =
498 llvm::LLVMMDStringInContext2(self.llcx, bytes.as_c_char_ptr(), bytes.len());
499 let data = [section, alloc];
500 let meta = llvm::LLVMMDNodeInContext2(self.llcx, data.as_ptr(), data.len());
501 let val = self.get_metadata_value(meta);
502 llvm::LLVMAddNamedMetadataOperand(
503 self.llmod,
504 c"wasm.custom_sections".as_ptr(),
505 val,
506 );
507 }
508 } else {
509 base::set_link_section(g, attrs);
510 }
511
512 base::set_variable_sanitizer_attrs(g, attrs);
513
514 if attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER) {
515 // `USED` and `USED_LINKER` can't be used together.
516 assert!(!attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER));
517
518 // The semantics of #[used] in Rust only require the symbol to make it into the
519 // object file. It is explicitly allowed for the linker to strip the symbol if it
520 // is dead, which means we are allowed to use `llvm.compiler.used` instead of
521 // `llvm.used` here.
522 //
523 // Additionally, https://reviews.llvm.org/D97448 in LLVM 13 started emitting unique
524 // sections with SHF_GNU_RETAIN flag for llvm.used symbols, which may trigger bugs
525 // in the handling of `.init_array` (the static constructor list) in versions of
526 // the gold linker (prior to the one released with binutils 2.36).
527 //
528 // That said, we only ever emit these when `#[used(compiler)]` is explicitly
529 // requested. This is to avoid similar breakage on other targets, in particular
530 // MachO targets have *their* static constructor lists broken if `llvm.compiler.used`
531 // is emitted rather than `llvm.used`. However, that check happens when assigning
532 // the `CodegenFnAttrFlags` in the `codegen_fn_attrs` query, so we don't need to
533 // take care of it here.
534 self.add_compiler_used_global(g);
535 }
536 if attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER) {
537 // `USED` and `USED_LINKER` can't be used together.
538 assert!(!attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER));
539
540 self.add_used_global(g);
541 }
542 }
543 }
544
545 /// Add a global value to a list to be stored in the `llvm.used` variable, an array of ptr.
546 pub(crate) fn add_used_global(&mut self, global: &'ll Value) {
547 self.used_statics.push(global);
548 }
549
550 /// Add a global value to a list to be stored in the `llvm.compiler.used` variable,
551 /// an array of ptr.
552 pub(crate) fn add_compiler_used_global(&mut self, global: &'ll Value) {
553 self.compiler_used_statics.push(global);
554 }
555}
556
557impl<'ll> StaticCodegenMethods for CodegenCx<'ll, '_> {
558 /// Get a pointer to a global variable.
559 ///
560 /// The pointer will always be in the default address space. If global variables default to a
561 /// different address space, an addrspacecast is inserted.
562 fn static_addr_of(&self, cv: &'ll Value, align: Align, kind: Option<&str>) -> &'ll Value {
563 let gv = self.static_addr_of_impl(cv, align, kind);
564 // static_addr_of_impl returns the bare global variable, which might not be in the default
565 // address space. Cast to the default address space if necessary.
566 self.const_pointercast(gv, self.type_ptr())
567 }
568
569 fn codegen_static(&mut self, def_id: DefId) {
570 self.codegen_static_item(def_id)
571 }
572}