1use std::iter;
2
3use rustc_abi::Primitive::Pointer;
4use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
5use rustc_hir as hir;
6use rustc_hir::lang_items::LangItem;
7use rustc_middle::bug;
8use rustc_middle::query::Providers;
9use rustc_middle::ty::layout::{
10 FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
11};
12use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
13use rustc_session::config::OptLevel;
14use rustc_span::DUMMY_SP;
15use rustc_span::def_id::DefId;
16use rustc_target::callconv::{
17 AbiMap, ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, FnAbi, PassMode,
18};
19use tracing::debug;
20
21pub(crate) fn provide(providers: &mut Providers) {
22 *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
23}
24
25#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
31fn fn_sig_for_fn_abi<'tcx>(
32 tcx: TyCtxt<'tcx>,
33 instance: ty::Instance<'tcx>,
34 typing_env: ty::TypingEnv<'tcx>,
35) -> ty::FnSig<'tcx> {
36 if let InstanceKind::ThreadLocalShim(..) = instance.def {
37 return tcx.mk_fn_sig(
38 [],
39 tcx.thread_local_ptr_ty(instance.def_id()),
40 false,
41 hir::Safety::Safe,
42 rustc_abi::ExternAbi::Unadjusted,
43 );
44 }
45
46 let ty = instance.ty(tcx, typing_env);
47 match *ty.kind() {
48 ty::FnDef(def_id, args) => {
49 let mut sig = tcx
50 .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
51
52 if let ty::InstanceKind::VTableShim(..) = instance.def {
54 let mut inputs_and_output = sig.inputs_and_output.to_vec();
55 inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
56 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
57 }
58
59 sig
60 }
61 ty::Closure(def_id, args) => {
62 let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
63 let env_ty = tcx.closure_env_ty(
64 Ty::new_closure(tcx, def_id, args),
65 args.as_closure().kind(),
66 tcx.lifetimes.re_erased,
67 );
68
69 tcx.mk_fn_sig(
70 iter::once(env_ty).chain(sig.inputs().iter().cloned()),
71 sig.output(),
72 sig.c_variadic,
73 sig.safety,
74 sig.abi,
75 )
76 }
77 ty::CoroutineClosure(def_id, args) => {
78 let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
79 let sig = args.as_coroutine_closure().coroutine_closure_sig();
80
81 let mut coroutine_kind = args.as_coroutine_closure().kind();
86
87 let env_ty =
88 if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
89 instance.def
90 {
91 coroutine_kind = ty::ClosureKind::FnOnce;
92
93 if receiver_by_ref {
96 Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
97 } else {
98 coroutine_ty
99 }
100 } else {
101 tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
102 };
103
104 let sig = tcx.instantiate_bound_regions_with_erased(sig);
105
106 tcx.mk_fn_sig(
107 iter::once(env_ty).chain([sig.tupled_inputs_ty]),
108 sig.to_coroutine_given_kind_and_upvars(
109 tcx,
110 args.as_coroutine_closure().parent_args(),
111 tcx.coroutine_for_closure(def_id),
112 coroutine_kind,
113 tcx.lifetimes.re_erased,
114 args.as_coroutine_closure().tupled_upvars_ty(),
115 args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
116 ),
117 sig.c_variadic,
118 sig.safety,
119 sig.abi,
120 )
121 }
122 ty::Coroutine(did, args) => {
123 let coroutine_kind = tcx.coroutine_kind(did).unwrap();
124 let sig = args.as_coroutine().sig();
125
126 let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
127
128 let pin_did = tcx.require_lang_item(LangItem::Pin, DUMMY_SP);
129 let pin_adt_ref = tcx.adt_def(pin_did);
130 let pin_args = tcx.mk_args(&[env_ty.into()]);
131 let env_ty = match coroutine_kind {
132 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
133 env_ty
136 }
137 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
138 | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
139 | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
140 };
141
142 let (resume_ty, ret_ty) = match coroutine_kind {
149 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
150 assert_eq!(sig.yield_ty, tcx.types.unit);
152
153 let poll_did = tcx.require_lang_item(LangItem::Poll, DUMMY_SP);
154 let poll_adt_ref = tcx.adt_def(poll_did);
155 let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
156 let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
157
158 #[cfg(debug_assertions)]
161 {
162 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
163 let expected_adt =
164 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
165 assert_eq!(*resume_ty_adt, expected_adt);
166 } else {
167 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
168 };
169 }
170 let context_mut_ref = Ty::new_task_context(tcx);
171
172 (Some(context_mut_ref), ret_ty)
173 }
174 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
175 let option_did = tcx.require_lang_item(LangItem::Option, DUMMY_SP);
177 let option_adt_ref = tcx.adt_def(option_did);
178 let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
179 let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
180
181 assert_eq!(sig.return_ty, tcx.types.unit);
182 assert_eq!(sig.resume_ty, tcx.types.unit);
183
184 (None, ret_ty)
185 }
186 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
187 assert_eq!(sig.return_ty, tcx.types.unit);
190
191 let ret_ty = sig.yield_ty;
193
194 #[cfg(debug_assertions)]
197 {
198 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
199 let expected_adt =
200 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
201 assert_eq!(*resume_ty_adt, expected_adt);
202 } else {
203 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
204 };
205 }
206 let context_mut_ref = Ty::new_task_context(tcx);
207
208 (Some(context_mut_ref), ret_ty)
209 }
210 hir::CoroutineKind::Coroutine(_) => {
211 let state_did = tcx.require_lang_item(LangItem::CoroutineState, DUMMY_SP);
213 let state_adt_ref = tcx.adt_def(state_did);
214 let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
215 let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
216
217 (Some(sig.resume_ty), ret_ty)
218 }
219 };
220
221 if let Some(resume_ty) = resume_ty {
222 tcx.mk_fn_sig(
223 [env_ty, resume_ty],
224 ret_ty,
225 false,
226 hir::Safety::Safe,
227 rustc_abi::ExternAbi::Rust,
228 )
229 } else {
230 tcx.mk_fn_sig(
232 [env_ty],
233 ret_ty,
234 false,
235 hir::Safety::Safe,
236 rustc_abi::ExternAbi::Rust,
237 )
238 }
239 }
240 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
241 }
242}
243
244fn fn_abi_of_fn_ptr<'tcx>(
245 tcx: TyCtxt<'tcx>,
246 query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
247) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
248 let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
249 fn_abi_new_uncached(
250 &LayoutCx::new(tcx, typing_env),
251 tcx.instantiate_bound_regions_with_erased(sig),
252 extra_args,
253 None,
254 )
255}
256
257fn fn_abi_of_instance<'tcx>(
258 tcx: TyCtxt<'tcx>,
259 query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
260) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
261 let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
262 fn_abi_new_uncached(
263 &LayoutCx::new(tcx, typing_env),
264 fn_sig_for_fn_abi(tcx, instance, typing_env),
265 extra_args,
266 Some(instance),
267 )
268}
269
270fn adjust_for_rust_scalar<'tcx>(
272 cx: LayoutCx<'tcx>,
273 attrs: &mut ArgAttributes,
274 scalar: Scalar,
275 layout: TyAndLayout<'tcx>,
276 offset: Size,
277 is_return: bool,
278 drop_target_pointee: Option<Ty<'tcx>>,
279) {
280 if scalar.is_bool() {
282 attrs.ext(ArgExtension::Zext);
283 attrs.set(ArgAttribute::NoUndef);
284 return;
285 }
286
287 if !scalar.is_uninit_valid() {
288 attrs.set(ArgAttribute::NoUndef);
289 }
290
291 let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return };
293
294 if !valid_range.contains(0) || drop_target_pointee.is_some() {
297 attrs.set(ArgAttribute::NonNull);
298 }
299
300 let tcx = cx.tcx();
301
302 if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
303 let kind = if let Some(kind) = pointee.safe {
304 Some(kind)
305 } else if let Some(pointee) = drop_target_pointee {
306 Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
308 } else {
309 None
310 };
311 if let Some(kind) = kind {
312 attrs.pointee_align =
313 Some(pointee.align.min(cx.tcx().sess.target.max_reliable_alignment()));
314
315 attrs.pointee_size = match kind {
322 PointerKind::Box { .. }
323 | PointerKind::SharedRef { frozen: false }
324 | PointerKind::MutableRef { unpin: false } => Size::ZERO,
325 PointerKind::SharedRef { frozen: true }
326 | PointerKind::MutableRef { unpin: true } => pointee.size,
327 };
328
329 let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
333
334 let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
338
339 let no_alias = match kind {
346 PointerKind::SharedRef { frozen } => frozen,
347 PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
348 PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
349 };
350 if no_alias && !is_return {
353 attrs.set(ArgAttribute::NoAlias);
354 }
355
356 if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
357 attrs.set(ArgAttribute::ReadOnly);
358 }
359 }
360 }
361}
362
363fn fn_abi_sanity_check<'tcx>(
365 cx: &LayoutCx<'tcx>,
366 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
367 spec_abi: ExternAbi,
368) {
369 fn fn_arg_sanity_check<'tcx>(
370 cx: &LayoutCx<'tcx>,
371 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
372 spec_abi: ExternAbi,
373 arg: &ArgAbi<'tcx, Ty<'tcx>>,
374 ) {
375 let tcx = cx.tcx();
376
377 if spec_abi.is_rustic_abi() {
378 if arg.layout.is_zst() {
379 assert!(arg.is_ignore());
382 }
383 if let PassMode::Indirect { on_stack, .. } = arg.mode {
384 assert!(!on_stack, "rust abi shouldn't use on_stack");
385 }
386 }
387
388 match &arg.mode {
389 PassMode::Ignore => {
390 assert!(arg.layout.is_zst());
391 }
392 PassMode::Direct(_) => {
393 match arg.layout.backend_repr {
398 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => {}
399 BackendRepr::ScalarPair(..) => {
400 panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
401 }
402 BackendRepr::Memory { sized } => {
403 assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
406
407 assert!(
413 matches!(spec_abi, ExternAbi::Unadjusted),
414 "`PassMode::Direct` for aggregates only allowed for \"unadjusted\"\n\
415 Problematic type: {:#?}",
416 arg.layout,
417 );
418 }
419 }
420 }
421 PassMode::Pair(_, _) => {
422 assert!(
425 matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
426 "PassMode::Pair for type {}",
427 arg.layout.ty
428 );
429 }
430 PassMode::Cast { .. } => {
431 assert!(arg.layout.is_sized());
433 }
434 PassMode::Indirect { meta_attrs: None, .. } => {
435 assert!(arg.layout.is_sized());
440 }
441 PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
442 assert!(arg.layout.is_unsized() && !on_stack);
444 let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
446 if matches!(tail.kind(), ty::Foreign(..)) {
447 panic!("unsized arguments must not be `extern` types");
452 }
453 }
454 }
455 }
456
457 for arg in fn_abi.args.iter() {
458 fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
459 }
460 fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
461}
462
463#[tracing::instrument(level = "debug", skip(cx, instance))]
464fn fn_abi_new_uncached<'tcx>(
465 cx: &LayoutCx<'tcx>,
466 sig: ty::FnSig<'tcx>,
467 extra_args: &[Ty<'tcx>],
468 instance: Option<ty::Instance<'tcx>>,
469) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
470 let tcx = cx.tcx();
471 let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
472 {
473 let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
474 (
475 instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
476 if is_virtual_call { None } else { Some(instance.def_id()) },
477 is_virtual_call,
478 )
479 } else {
480 (None, None, false)
481 };
482 let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
483
484 let abi_map = AbiMap::from_target(&tcx.sess.target);
485 let conv = abi_map.canonize_abi(sig.abi, sig.c_variadic).unwrap();
486
487 let mut inputs = sig.inputs();
488 let extra_args = if sig.abi == ExternAbi::RustCall {
489 assert!(!sig.c_variadic && extra_args.is_empty());
490
491 if let Some(input) = sig.inputs().last()
492 && let ty::Tuple(tupled_arguments) = input.kind()
493 {
494 inputs = &sig.inputs()[0..sig.inputs().len() - 1];
495 tupled_arguments
496 } else {
497 bug!(
498 "argument to function with \"rust-call\" ABI \
499 is not a tuple"
500 );
501 }
502 } else {
503 assert!(sig.c_variadic || extra_args.is_empty());
504 extra_args
505 };
506
507 let is_drop_in_place = determined_fn_def_id.is_some_and(|def_id| {
508 tcx.is_lang_item(def_id, LangItem::DropInPlace)
509 || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
510 });
511
512 let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
513 let span = tracing::debug_span!("arg_of");
514 let _entered = span.enter();
515 let is_return = arg_idx.is_none();
516 let is_drop_target = is_drop_in_place && arg_idx == Some(0);
517 let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
518 ty::RawPtr(ty, _) => *ty,
519 _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
520 });
521
522 let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
523 let layout = if is_virtual_call && arg_idx == Some(0) {
524 make_thin_self_ptr(cx, layout)
528 } else {
529 layout
530 };
531
532 let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
533 let mut attrs = ArgAttributes::new();
534 adjust_for_rust_scalar(
535 *cx,
536 &mut attrs,
537 scalar,
538 *layout,
539 offset,
540 is_return,
541 drop_target_pointee,
542 );
543 attrs
544 });
545
546 if arg.layout.is_zst() {
547 arg.mode = PassMode::Ignore;
548 }
549
550 Ok(arg)
551 };
552
553 let mut fn_abi = FnAbi {
554 ret: arg_of(sig.output(), None)?,
555 args: inputs
556 .iter()
557 .copied()
558 .chain(extra_args.iter().copied())
559 .chain(caller_location)
560 .enumerate()
561 .map(|(i, ty)| arg_of(ty, Some(i)))
562 .collect::<Result<_, _>>()?,
563 c_variadic: sig.c_variadic,
564 fixed_count: inputs.len() as u32,
565 conv,
566 can_unwind: fn_can_unwind(
567 tcx,
568 determined_fn_def_id,
570 sig.abi,
571 ),
572 };
573 fn_abi_adjust_for_abi(
574 cx,
575 &mut fn_abi,
576 sig.abi,
577 determined_fn_def_id,
581 );
582 debug!("fn_abi_new_uncached = {:?}", fn_abi);
583 fn_abi_sanity_check(cx, &fn_abi, sig.abi);
584 Ok(tcx.arena.alloc(fn_abi))
585}
586
587#[tracing::instrument(level = "trace", skip(cx))]
588fn fn_abi_adjust_for_abi<'tcx>(
589 cx: &LayoutCx<'tcx>,
590 fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
591 abi: ExternAbi,
592 fn_def_id: Option<DefId>,
593) {
594 if abi == ExternAbi::Unadjusted {
595 fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
598 if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
601 assert!(
602 arg.layout.backend_repr.is_sized(),
603 "'unadjusted' ABI does not support unsized arguments"
604 );
605 }
606 arg.make_direct_deprecated();
607 }
608
609 unadjust(&mut fn_abi.ret);
610 for arg in fn_abi.args.iter_mut() {
611 unadjust(arg);
612 }
613 return;
614 }
615
616 let tcx = cx.tcx();
617
618 if abi.is_rustic_abi() {
619 fn_abi.adjust_for_rust_abi(cx);
620
621 let deduced_param_attrs =
625 if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
626 fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
627 } else {
628 &[]
629 };
630
631 for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
632 if arg.is_ignore() {
633 continue;
634 }
635
636 if let &mut PassMode::Indirect { ref mut attrs, .. } = &mut arg.mode {
642 if let Some(deduced_param_attrs) = deduced_param_attrs.get(arg_idx) {
646 if deduced_param_attrs.read_only {
647 attrs.regular.insert(ArgAttribute::ReadOnly);
648 debug!("added deduced read-only attribute");
649 }
650 }
651 }
652 }
653 } else {
654 fn_abi.adjust_for_foreign_abi(cx, abi);
655 }
656}
657
658#[tracing::instrument(level = "debug", skip(cx))]
659fn make_thin_self_ptr<'tcx>(
660 cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
661 layout: TyAndLayout<'tcx>,
662) -> TyAndLayout<'tcx> {
663 let tcx = cx.tcx();
664 let wide_pointer_ty = if layout.is_unsized() {
665 Ty::new_mut_ptr(tcx, layout.ty)
668 } else {
669 match layout.backend_repr {
670 BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
671 _ => bug!("receiver type has unsupported layout: {:?}", layout),
672 }
673
674 let mut wide_pointer_layout = layout;
680 while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
681 wide_pointer_layout = wide_pointer_layout
682 .non_1zst_field(cx)
683 .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
684 .1
685 }
686
687 wide_pointer_layout.ty
688 };
689
690 let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
694
695 TyAndLayout {
696 ty: wide_pointer_ty,
697
698 ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
701 }
702}