1use std::borrow::Borrow;
2use std::cmp;
3
4use libc::c_uint;
5use rustc_abi::{
6 ArmCall, BackendRepr, CanonAbi, HasDataLayout, InterruptKind, Primitive, Reg, RegKind, Size,
7 X86Call,
8};
9use rustc_codegen_ssa::MemFlags;
10use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
11use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
12use rustc_codegen_ssa::traits::*;
13use rustc_middle::ty::Ty;
14use rustc_middle::ty::layout::LayoutOf;
15use rustc_middle::{bug, ty};
16use rustc_session::config;
17use rustc_target::callconv::{
18 ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, CastTarget, FnAbi, PassMode,
19};
20use rustc_target::spec::SanitizerSet;
21use smallvec::SmallVec;
22
23use crate::attributes::{self, llfn_attrs_from_instance};
24use crate::builder::Builder;
25use crate::context::CodegenCx;
26use crate::llvm::{self, Attribute, AttributePlace};
27use crate::type_::Type;
28use crate::type_of::LayoutLlvmExt;
29use crate::value::Value;
30
31trait ArgAttributesExt {
32 fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value);
33 fn apply_attrs_to_callsite(
34 &self,
35 idx: AttributePlace,
36 cx: &CodegenCx<'_, '_>,
37 callsite: &Value,
38 );
39}
40
41const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 1] =
42 [(ArgAttribute::InReg, llvm::AttributeKind::InReg)];
43
44const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 5] = [
45 (ArgAttribute::NoAlias, llvm::AttributeKind::NoAlias),
46 (ArgAttribute::NoCapture, llvm::AttributeKind::NoCapture),
47 (ArgAttribute::NonNull, llvm::AttributeKind::NonNull),
48 (ArgAttribute::ReadOnly, llvm::AttributeKind::ReadOnly),
49 (ArgAttribute::NoUndef, llvm::AttributeKind::NoUndef),
50];
51
52fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 8]> {
53 let mut regular = this.regular;
54
55 let mut attrs = SmallVec::new();
56
57 for (attr, llattr) in ABI_AFFECTING_ATTRIBUTES {
59 if regular.contains(attr) {
60 attrs.push(llattr.create_attr(cx.llcx));
61 }
62 }
63 if let Some(align) = this.pointee_align {
64 attrs.push(llvm::CreateAlignmentAttr(cx.llcx, align.bytes()));
65 }
66 match this.arg_ext {
67 ArgExtension::None => {}
68 ArgExtension::Zext => attrs.push(llvm::AttributeKind::ZExt.create_attr(cx.llcx)),
69 ArgExtension::Sext => attrs.push(llvm::AttributeKind::SExt.create_attr(cx.llcx)),
70 }
71
72 if cx.sess().opts.optimize != config::OptLevel::No {
74 let deref = this.pointee_size.bytes();
75 if deref != 0 {
76 if regular.contains(ArgAttribute::NonNull) {
77 attrs.push(llvm::CreateDereferenceableAttr(cx.llcx, deref));
78 } else {
79 attrs.push(llvm::CreateDereferenceableOrNullAttr(cx.llcx, deref));
80 }
81 regular -= ArgAttribute::NonNull;
82 }
83 for (attr, llattr) in OPTIMIZATION_ATTRIBUTES {
84 if regular.contains(attr) {
85 attrs.push(llattr.create_attr(cx.llcx));
86 }
87 }
88 } else if cx.tcx.sess.opts.unstable_opts.sanitizer.contains(SanitizerSet::MEMORY) {
89 if regular.contains(ArgAttribute::NoUndef) {
93 attrs.push(llvm::AttributeKind::NoUndef.create_attr(cx.llcx));
94 }
95 }
96
97 attrs
98}
99
100impl ArgAttributesExt for ArgAttributes {
101 fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value) {
102 let attrs = get_attrs(self, cx);
103 attributes::apply_to_llfn(llfn, idx, &attrs);
104 }
105
106 fn apply_attrs_to_callsite(
107 &self,
108 idx: AttributePlace,
109 cx: &CodegenCx<'_, '_>,
110 callsite: &Value,
111 ) {
112 let attrs = get_attrs(self, cx);
113 attributes::apply_to_callsite(callsite, idx, &attrs);
114 }
115}
116
117pub(crate) trait LlvmType {
118 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type;
119}
120
121impl LlvmType for Reg {
122 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type {
123 match self.kind {
124 RegKind::Integer => cx.type_ix(self.size.bits()),
125 RegKind::Float => match self.size.bits() {
126 16 => cx.type_f16(),
127 32 => cx.type_f32(),
128 64 => cx.type_f64(),
129 128 => cx.type_f128(),
130 _ => bug!("unsupported float: {:?}", self),
131 },
132 RegKind::Vector => cx.type_vector(cx.type_i8(), self.size.bytes()),
133 }
134 }
135}
136
137impl LlvmType for CastTarget {
138 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type {
139 let rest_ll_unit = self.rest.unit.llvm_type(cx);
140 let rest_count = if self.rest.total == Size::ZERO {
141 0
142 } else {
143 assert_ne!(
144 self.rest.unit.size,
145 Size::ZERO,
146 "total size {:?} cannot be divided into units of zero size",
147 self.rest.total
148 );
149 if self.rest.total.bytes() % self.rest.unit.size.bytes() != 0 {
150 assert_eq!(self.rest.unit.kind, RegKind::Integer, "only int regs can be split");
151 }
152 self.rest.total.bytes().div_ceil(self.rest.unit.size.bytes())
153 };
154
155 if self.prefix.iter().all(|x| x.is_none()) {
158 if rest_count == 1 && (!self.rest.is_consecutive || self.rest.unit != Reg::i128()) {
162 return rest_ll_unit;
163 }
164
165 return cx.type_array(rest_ll_unit, rest_count);
166 }
167
168 let prefix_args =
170 self.prefix.iter().flat_map(|option_reg| option_reg.map(|reg| reg.llvm_type(cx)));
171 let rest_args = (0..rest_count).map(|_| rest_ll_unit);
172 let args: Vec<_> = prefix_args.chain(rest_args).collect();
173 cx.type_struct(&args, false)
174 }
175}
176
177trait ArgAbiExt<'ll, 'tcx> {
178 fn store(
179 &self,
180 bx: &mut Builder<'_, 'll, 'tcx>,
181 val: &'ll Value,
182 dst: PlaceRef<'tcx, &'ll Value>,
183 );
184 fn store_fn_arg(
185 &self,
186 bx: &mut Builder<'_, 'll, 'tcx>,
187 idx: &mut usize,
188 dst: PlaceRef<'tcx, &'ll Value>,
189 );
190}
191
192impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
193 fn store(
198 &self,
199 bx: &mut Builder<'_, 'll, 'tcx>,
200 val: &'ll Value,
201 dst: PlaceRef<'tcx, &'ll Value>,
202 ) {
203 match &self.mode {
204 PassMode::Ignore => {}
205 PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
207 let align = attrs.pointee_align.unwrap_or(self.layout.align.abi);
208 OperandValue::Ref(PlaceValue::new_sized(val, align)).store(bx, dst);
209 }
210 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
212 bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
213 }
214 PassMode::Cast { cast, pad_i32: _ } => {
215 let scratch_size = cast.size(bx);
219 let scratch_align = cast.align(bx);
220 let copy_bytes =
227 cmp::min(cast.unaligned_size(bx).bytes(), self.layout.size.bytes());
228 let llscratch = bx.alloca(scratch_size, scratch_align);
230 bx.lifetime_start(llscratch, scratch_size);
231 rustc_codegen_ssa::mir::store_cast(bx, cast, val, llscratch, scratch_align);
233 bx.memcpy(
235 dst.val.llval,
236 self.layout.align.abi,
237 llscratch,
238 scratch_align,
239 bx.const_usize(copy_bytes),
240 MemFlags::empty(),
241 );
242 bx.lifetime_end(llscratch, scratch_size);
243 }
244 _ => {
245 OperandRef::from_immediate_or_packed_pair(bx, val, self.layout).val.store(bx, dst);
246 }
247 }
248 }
249
250 fn store_fn_arg(
251 &self,
252 bx: &mut Builder<'_, 'll, 'tcx>,
253 idx: &mut usize,
254 dst: PlaceRef<'tcx, &'ll Value>,
255 ) {
256 let mut next = || {
257 let val = llvm::get_param(bx.llfn(), *idx as c_uint);
258 *idx += 1;
259 val
260 };
261 match self.mode {
262 PassMode::Ignore => {}
263 PassMode::Pair(..) => {
264 OperandValue::Pair(next(), next()).store(bx, dst);
265 }
266 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
267 let place_val = PlaceValue {
268 llval: next(),
269 llextra: Some(next()),
270 align: self.layout.align.abi,
271 };
272 OperandValue::Ref(place_val).store(bx, dst);
273 }
274 PassMode::Direct(_)
275 | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ }
276 | PassMode::Cast { .. } => {
277 let next_arg = next();
278 self.store(bx, next_arg, dst);
279 }
280 }
281 }
282}
283
284impl<'ll, 'tcx> ArgAbiBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
285 fn store_fn_arg(
286 &mut self,
287 arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
288 idx: &mut usize,
289 dst: PlaceRef<'tcx, Self::Value>,
290 ) {
291 arg_abi.store_fn_arg(self, idx, dst)
292 }
293 fn store_arg(
294 &mut self,
295 arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
296 val: &'ll Value,
297 dst: PlaceRef<'tcx, &'ll Value>,
298 ) {
299 arg_abi.store(self, val, dst)
300 }
301}
302
303pub(crate) trait FnAbiLlvmExt<'ll, 'tcx> {
304 fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
305 fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
306 fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv;
307
308 fn apply_attrs_llfn(
310 &self,
311 cx: &CodegenCx<'ll, 'tcx>,
312 llfn: &'ll Value,
313 instance: Option<ty::Instance<'tcx>>,
314 );
315
316 fn apply_attrs_callsite(&self, bx: &mut Builder<'_, 'll, 'tcx>, callsite: &'ll Value);
318}
319
320impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
321 fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
322 let args =
325 if self.c_variadic { &self.args[..self.fixed_count as usize] } else { &self.args };
326
327 let mut llargument_tys = Vec::with_capacity(
329 self.args.len() + if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 },
330 );
331
332 let llreturn_ty = match &self.ret.mode {
333 PassMode::Ignore => cx.type_void(),
334 PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_llvm_type(cx),
335 PassMode::Cast { cast, pad_i32: _ } => cast.llvm_type(cx),
336 PassMode::Indirect { .. } => {
337 llargument_tys.push(cx.type_ptr());
338 cx.type_void()
339 }
340 };
341
342 for arg in args {
343 let llarg_ty = match &arg.mode {
347 PassMode::Ignore => continue,
348 PassMode::Direct(_) => {
349 arg.layout.immediate_llvm_type(cx)
353 }
354 PassMode::Pair(..) => {
355 llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 0, true));
359 llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 1, true));
360 continue;
361 }
362 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
363 let ptr_ty = Ty::new_mut_ptr(cx.tcx, arg.layout.ty);
368 let ptr_layout = cx.layout_of(ptr_ty);
369 llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 0, true));
370 llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 1, true));
371 continue;
372 }
373 PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => cx.type_ptr(),
374 PassMode::Cast { cast, pad_i32 } => {
375 if *pad_i32 {
377 llargument_tys.push(Reg::i32().llvm_type(cx));
378 }
379 cast.llvm_type(cx)
382 }
383 };
384 llargument_tys.push(llarg_ty);
385 }
386
387 if self.c_variadic {
388 cx.type_variadic_func(&llargument_tys, llreturn_ty)
389 } else {
390 cx.type_func(&llargument_tys, llreturn_ty)
391 }
392 }
393
394 fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
395 cx.type_ptr_ext(cx.data_layout().instruction_address_space)
396 }
397
398 fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv {
399 llvm::CallConv::from_conv(self.conv, cx.tcx.sess.target.arch.borrow())
400 }
401
402 fn apply_attrs_llfn(
403 &self,
404 cx: &CodegenCx<'ll, 'tcx>,
405 llfn: &'ll Value,
406 instance: Option<ty::Instance<'tcx>>,
407 ) {
408 let mut func_attrs = SmallVec::<[_; 3]>::new();
409 if self.ret.layout.is_uninhabited() {
410 func_attrs.push(llvm::AttributeKind::NoReturn.create_attr(cx.llcx));
411 }
412 if !self.can_unwind {
413 func_attrs.push(llvm::AttributeKind::NoUnwind.create_attr(cx.llcx));
414 }
415 match self.conv {
416 CanonAbi::Interrupt(InterruptKind::RiscvMachine) => {
417 func_attrs.push(llvm::CreateAttrStringValue(cx.llcx, "interrupt", "machine"))
418 }
419 CanonAbi::Interrupt(InterruptKind::RiscvSupervisor) => {
420 func_attrs.push(llvm::CreateAttrStringValue(cx.llcx, "interrupt", "supervisor"))
421 }
422 CanonAbi::Arm(ArmCall::CCmseNonSecureEntry) => {
423 func_attrs.push(llvm::CreateAttrString(cx.llcx, "cmse_nonsecure_entry"))
424 }
425 _ => (),
426 }
427 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &{ func_attrs });
428
429 let mut i = 0;
430 let mut apply = |attrs: &ArgAttributes| {
431 attrs.apply_attrs_to_llfn(llvm::AttributePlace::Argument(i), cx, llfn);
432 i += 1;
433 i - 1
434 };
435
436 let apply_range_attr = |idx: AttributePlace, scalar: rustc_abi::Scalar| {
437 if cx.sess().opts.optimize != config::OptLevel::No
438 && matches!(scalar.primitive(), Primitive::Int(..))
439 && !scalar.is_bool()
443 && !scalar.is_always_valid(cx)
445 {
446 attributes::apply_to_llfn(
447 llfn,
448 idx,
449 &[llvm::CreateRangeAttr(cx.llcx, scalar.size(cx), scalar.valid_range(cx))],
450 );
451 }
452 };
453
454 match &self.ret.mode {
455 PassMode::Direct(attrs) => {
456 attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
457 if let BackendRepr::Scalar(scalar) = self.ret.layout.backend_repr {
458 apply_range_attr(llvm::AttributePlace::ReturnValue, scalar);
459 }
460 }
461 PassMode::Indirect { attrs, meta_attrs: _, on_stack } => {
462 assert!(!on_stack);
463 let i = apply(attrs);
464 let sret = llvm::CreateStructRetAttr(
465 cx.llcx,
466 cx.type_array(cx.type_i8(), self.ret.layout.size.bytes()),
467 );
468 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[sret]);
469 if cx.sess().opts.optimize != config::OptLevel::No {
470 attributes::apply_to_llfn(
471 llfn,
472 llvm::AttributePlace::Argument(i),
473 &[
474 llvm::AttributeKind::Writable.create_attr(cx.llcx),
475 llvm::AttributeKind::DeadOnUnwind.create_attr(cx.llcx),
476 ],
477 );
478 }
479 }
480 PassMode::Cast { cast, pad_i32: _ } => {
481 cast.attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
482 }
483 _ => {}
484 }
485 for arg in self.args.iter() {
486 match &arg.mode {
487 PassMode::Ignore => {}
488 PassMode::Indirect { attrs, meta_attrs: None, on_stack: true } => {
489 let i = apply(attrs);
490 let byval = llvm::CreateByValAttr(
491 cx.llcx,
492 cx.type_array(cx.type_i8(), arg.layout.size.bytes()),
493 );
494 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[byval]);
495 }
496 PassMode::Direct(attrs) => {
497 let i = apply(attrs);
498 if let BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
499 apply_range_attr(llvm::AttributePlace::Argument(i), scalar);
500 }
501 }
502 PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
503 apply(attrs);
504 }
505 PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
506 assert!(!on_stack);
507 apply(attrs);
508 apply(meta_attrs);
509 }
510 PassMode::Pair(a, b) => {
511 let i = apply(a);
512 let ii = apply(b);
513 if let BackendRepr::ScalarPair(scalar_a, scalar_b) = arg.layout.backend_repr {
514 apply_range_attr(llvm::AttributePlace::Argument(i), scalar_a);
515 apply_range_attr(llvm::AttributePlace::Argument(ii), scalar_b);
516 }
517 }
518 PassMode::Cast { cast, pad_i32 } => {
519 if *pad_i32 {
520 apply(&ArgAttributes::new());
521 }
522 apply(&cast.attrs);
523 }
524 }
525 }
526
527 if let Some(instance) = instance {
529 llfn_attrs_from_instance(cx, llfn, instance);
530 }
531 }
532
533 fn apply_attrs_callsite(&self, bx: &mut Builder<'_, 'll, 'tcx>, callsite: &'ll Value) {
534 let mut func_attrs = SmallVec::<[_; 2]>::new();
535 if self.ret.layout.is_uninhabited() {
536 func_attrs.push(llvm::AttributeKind::NoReturn.create_attr(bx.cx.llcx));
537 }
538 if !self.can_unwind {
539 func_attrs.push(llvm::AttributeKind::NoUnwind.create_attr(bx.cx.llcx));
540 }
541 attributes::apply_to_callsite(callsite, llvm::AttributePlace::Function, &{ func_attrs });
542
543 let mut i = 0;
544 let mut apply = |cx: &CodegenCx<'_, '_>, attrs: &ArgAttributes| {
545 attrs.apply_attrs_to_callsite(llvm::AttributePlace::Argument(i), cx, callsite);
546 i += 1;
547 i - 1
548 };
549 match &self.ret.mode {
550 PassMode::Direct(attrs) => {
551 attrs.apply_attrs_to_callsite(llvm::AttributePlace::ReturnValue, bx.cx, callsite);
552 }
553 PassMode::Indirect { attrs, meta_attrs: _, on_stack } => {
554 assert!(!on_stack);
555 let i = apply(bx.cx, attrs);
556 let sret = llvm::CreateStructRetAttr(
557 bx.cx.llcx,
558 bx.cx.type_array(bx.cx.type_i8(), self.ret.layout.size.bytes()),
559 );
560 attributes::apply_to_callsite(callsite, llvm::AttributePlace::Argument(i), &[sret]);
561 }
562 PassMode::Cast { cast, pad_i32: _ } => {
563 cast.attrs.apply_attrs_to_callsite(
564 llvm::AttributePlace::ReturnValue,
565 bx.cx,
566 callsite,
567 );
568 }
569 _ => {}
570 }
571 for arg in self.args.iter() {
572 match &arg.mode {
573 PassMode::Ignore => {}
574 PassMode::Indirect { attrs, meta_attrs: None, on_stack: true } => {
575 let i = apply(bx.cx, attrs);
576 let byval = llvm::CreateByValAttr(
577 bx.cx.llcx,
578 bx.cx.type_array(bx.cx.type_i8(), arg.layout.size.bytes()),
579 );
580 attributes::apply_to_callsite(
581 callsite,
582 llvm::AttributePlace::Argument(i),
583 &[byval],
584 );
585 }
586 PassMode::Direct(attrs)
587 | PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
588 apply(bx.cx, attrs);
589 }
590 PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack: _ } => {
591 apply(bx.cx, attrs);
592 apply(bx.cx, meta_attrs);
593 }
594 PassMode::Pair(a, b) => {
595 apply(bx.cx, a);
596 apply(bx.cx, b);
597 }
598 PassMode::Cast { cast, pad_i32 } => {
599 if *pad_i32 {
600 apply(bx.cx, &ArgAttributes::new());
601 }
602 apply(bx.cx, &cast.attrs);
603 }
604 }
605 }
606
607 let cconv = self.llvm_cconv(&bx.cx);
608 if cconv != llvm::CCallConv {
609 llvm::SetInstructionCallConv(callsite, cconv);
610 }
611
612 if self.conv == CanonAbi::Arm(ArmCall::CCmseNonSecureCall) {
613 let cmse_nonsecure_call = llvm::CreateAttrString(bx.cx.llcx, "cmse_nonsecure_call");
616 attributes::apply_to_callsite(
617 callsite,
618 llvm::AttributePlace::Function,
619 &[cmse_nonsecure_call],
620 );
621 }
622
623 let element_type_index = unsafe { llvm::LLVMRustGetElementTypeArgIndex(callsite) };
626 if element_type_index >= 0 {
627 let arg_ty = self.args[element_type_index as usize].layout.ty;
628 let pointee_ty = arg_ty.builtin_deref(true).expect("Must be pointer argument");
629 let element_type_attr = unsafe {
630 llvm::LLVMRustCreateElementTypeAttr(bx.llcx, bx.layout_of(pointee_ty).llvm_type(bx))
631 };
632 attributes::apply_to_callsite(
633 callsite,
634 llvm::AttributePlace::Argument(element_type_index as u32),
635 &[element_type_attr],
636 );
637 }
638 }
639}
640
641impl AbiBuilderMethods for Builder<'_, '_, '_> {
642 fn get_param(&mut self, index: usize) -> Self::Value {
643 llvm::get_param(self.llfn(), index as c_uint)
644 }
645}
646
647impl llvm::CallConv {
648 pub(crate) fn from_conv(conv: CanonAbi, arch: &str) -> Self {
649 match conv {
650 CanonAbi::C | CanonAbi::Rust => llvm::CCallConv,
651 CanonAbi::RustCold => llvm::PreserveMost,
652 CanonAbi::Custom => llvm::CCallConv,
656 CanonAbi::GpuKernel => {
657 if arch == "amdgpu" {
658 llvm::AmdgpuKernel
659 } else if arch == "nvptx64" {
660 llvm::PtxKernel
661 } else {
662 panic!("Architecture {arch} does not support GpuKernel calling convention");
663 }
664 }
665 CanonAbi::Interrupt(interrupt_kind) => match interrupt_kind {
666 InterruptKind::Avr => llvm::AvrInterrupt,
667 InterruptKind::AvrNonBlocking => llvm::AvrNonBlockingInterrupt,
668 InterruptKind::Msp430 => llvm::Msp430Intr,
669 InterruptKind::RiscvMachine | InterruptKind::RiscvSupervisor => llvm::CCallConv,
670 InterruptKind::X86 => llvm::X86_Intr,
671 },
672 CanonAbi::Arm(arm_call) => match arm_call {
673 ArmCall::Aapcs => llvm::ArmAapcsCallConv,
674 ArmCall::CCmseNonSecureCall | ArmCall::CCmseNonSecureEntry => llvm::CCallConv,
675 },
676 CanonAbi::X86(x86_call) => match x86_call {
677 X86Call::Fastcall => llvm::X86FastcallCallConv,
678 X86Call::Stdcall => llvm::X86StdcallCallConv,
679 X86Call::SysV64 => llvm::X86_64_SysV,
680 X86Call::Thiscall => llvm::X86_ThisCall,
681 X86Call::Vectorcall => llvm::X86_VectorCall,
682 X86Call::Win64 => llvm::X86_64_Win64,
683 },
684 }
685 }
686}