1use std::iter;
40
41use ast::visit::Visitor;
42use hir::def::{DefKind, PartialRes, Res};
43use hir::{BodyId, HirId};
44use rustc_abi::ExternAbi;
45use rustc_ast::*;
46use rustc_errors::ErrorGuaranteed;
47use rustc_hir::def_id::DefId;
48use rustc_middle::span_bug;
49use rustc_middle::ty::{Asyncness, ResolverAstLowering};
50use rustc_span::{Ident, Span};
51use {rustc_ast as ast, rustc_hir as hir};
52
53use super::{GenericArgsMode, ImplTraitContext, LoweringContext, ParamMode};
54use crate::{AllowReturnTypeNotation, ImplTraitPosition, ResolverAstLoweringExt};
55
56pub(crate) struct DelegationResults<'hir> {
57 pub body_id: hir::BodyId,
58 pub sig: hir::FnSig<'hir>,
59 pub generics: &'hir hir::Generics<'hir>,
60}
61
62impl<'hir> LoweringContext<'_, 'hir> {
63 pub(crate) fn delegatee_is_method(
65 &self,
66 item_id: NodeId,
67 path_id: NodeId,
68 span: Span,
69 is_in_trait_impl: bool,
70 ) -> bool {
71 let sig_id = self.get_delegation_sig_id(item_id, path_id, span, is_in_trait_impl);
72 let Ok(sig_id) = sig_id else {
73 return false;
74 };
75 self.is_method(sig_id, span)
76 }
77
78 fn is_method(&self, def_id: DefId, span: Span) -> bool {
79 match self.tcx.def_kind(def_id) {
80 DefKind::Fn => false,
81 DefKind::AssocFn => match def_id.as_local() {
82 Some(local_def_id) => self
83 .resolver
84 .delegation_fn_sigs
85 .get(&local_def_id)
86 .is_some_and(|sig| sig.has_self),
87 None => self.tcx.associated_item(def_id).fn_has_self_parameter,
88 },
89 _ => span_bug!(span, "unexpected DefKind for delegation item"),
90 }
91 }
92
93 pub(crate) fn lower_delegation(
94 &mut self,
95 delegation: &Delegation,
96 item_id: NodeId,
97 is_in_trait_impl: bool,
98 ) -> DelegationResults<'hir> {
99 let span = self.lower_span(delegation.path.segments.last().unwrap().ident.span);
100 let sig_id = self.get_delegation_sig_id(item_id, delegation.id, span, is_in_trait_impl);
101 match sig_id {
102 Ok(sig_id) => {
103 let (param_count, c_variadic) = self.param_count(sig_id);
104 let decl = self.lower_delegation_decl(sig_id, param_count, c_variadic, span);
105 let sig = self.lower_delegation_sig(sig_id, decl, span);
106 let body_id = self.lower_delegation_body(delegation, param_count, span);
107
108 let generics = self.lower_delegation_generics(span);
109 DelegationResults { body_id, sig, generics }
110 }
111 Err(err) => self.generate_delegation_error(err, span),
112 }
113 }
114
115 fn get_delegation_sig_id(
116 &self,
117 item_id: NodeId,
118 path_id: NodeId,
119 span: Span,
120 is_in_trait_impl: bool,
121 ) -> Result<DefId, ErrorGuaranteed> {
122 let sig_id = if is_in_trait_impl { item_id } else { path_id };
123 self.get_resolution_id(sig_id, span)
124 }
125
126 fn get_resolution_id(&self, node_id: NodeId, span: Span) -> Result<DefId, ErrorGuaranteed> {
127 let def_id =
128 self.resolver.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id());
129 def_id.ok_or_else(|| {
130 self.tcx.dcx().span_delayed_bug(
131 span,
132 format!("LoweringContext: couldn't resolve node {:?} in delegation item", node_id),
133 )
134 })
135 }
136
137 fn lower_delegation_generics(&mut self, span: Span) -> &'hir hir::Generics<'hir> {
138 self.arena.alloc(hir::Generics {
139 params: &[],
140 predicates: &[],
141 has_where_clause_predicates: false,
142 where_clause_span: span,
143 span,
144 })
145 }
146
147 fn param_count(&self, sig_id: DefId) -> (usize, bool ) {
149 if let Some(local_sig_id) = sig_id.as_local() {
150 match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
153 Some(sig) => (sig.param_count, sig.c_variadic),
154 None => (0, false),
155 }
156 } else {
157 let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
158 (sig.inputs().len() + usize::from(sig.c_variadic), sig.c_variadic)
159 }
160 }
161
162 fn lower_delegation_decl(
163 &mut self,
164 sig_id: DefId,
165 param_count: usize,
166 c_variadic: bool,
167 span: Span,
168 ) -> &'hir hir::FnDecl<'hir> {
169 let decl_param_count = param_count - c_variadic as usize;
172 let inputs = self.arena.alloc_from_iter((0..decl_param_count).map(|arg| hir::Ty {
173 hir_id: self.next_id(),
174 kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Input(arg)),
175 span,
176 }));
177
178 let output = self.arena.alloc(hir::Ty {
179 hir_id: self.next_id(),
180 kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Output),
181 span,
182 });
183
184 self.arena.alloc(hir::FnDecl {
185 inputs,
186 output: hir::FnRetTy::Return(output),
187 c_variadic,
188 lifetime_elision_allowed: true,
189 implicit_self: hir::ImplicitSelfKind::None,
190 })
191 }
192
193 fn lower_delegation_sig(
194 &mut self,
195 sig_id: DefId,
196 decl: &'hir hir::FnDecl<'hir>,
197 span: Span,
198 ) -> hir::FnSig<'hir> {
199 let header = if let Some(local_sig_id) = sig_id.as_local() {
200 match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
201 Some(sig) => {
202 let parent = self.tcx.parent(sig_id);
203 let default_safety =
207 if sig.target_feature || self.tcx.def_kind(parent) == DefKind::ForeignMod {
208 hir::Safety::Unsafe
209 } else {
210 hir::Safety::Safe
211 };
212 self.lower_fn_header(sig.header, default_safety, &[])
213 }
214 None => self.generate_header_error(),
215 }
216 } else {
217 let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
218 let asyncness = match self.tcx.asyncness(sig_id) {
219 Asyncness::Yes => hir::IsAsync::Async(span),
220 Asyncness::No => hir::IsAsync::NotAsync,
221 };
222 hir::FnHeader {
223 safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features {
224 hir::HeaderSafety::SafeTargetFeatures
225 } else {
226 hir::HeaderSafety::Normal(sig.safety)
227 },
228 constness: self.tcx.constness(sig_id),
229 asyncness,
230 abi: sig.abi,
231 }
232 };
233 hir::FnSig { decl, header, span }
234 }
235
236 fn generate_param(&mut self, span: Span) -> (hir::Param<'hir>, NodeId) {
237 let pat_node_id = self.next_node_id();
238 let pat_id = self.lower_node_id(pat_node_id);
239 let pat = self.arena.alloc(hir::Pat {
240 hir_id: pat_id,
241 kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, Ident::empty(), None),
242 span,
243 default_binding_modes: false,
244 });
245
246 (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
247 }
248
249 fn generate_arg(&mut self, param_id: HirId, span: Span) -> hir::Expr<'hir> {
250 let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
251 ident: Ident::empty(),
252 hir_id: self.next_id(),
253 res: Res::Local(param_id),
254 args: None,
255 infer_args: false,
256 }));
257
258 let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
259 self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
260 }
261
262 fn lower_delegation_body(
263 &mut self,
264 delegation: &Delegation,
265 param_count: usize,
266 span: Span,
267 ) -> BodyId {
268 let block = delegation.body.as_deref();
269
270 self.lower_body(|this| {
271 let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
272 let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
273
274 for idx in 0..param_count {
275 let (param, pat_node_id) = this.generate_param(span);
276 parameters.push(param);
277
278 let arg = if let Some(block) = block
279 && idx == 0
280 {
281 let mut self_resolver = SelfResolver {
282 resolver: this.resolver,
283 path_id: delegation.id,
284 self_param_id: pat_node_id,
285 };
286 self_resolver.visit_block(block);
287 this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
289 this.lower_target_expr(&block)
290 } else {
291 this.generate_arg(param.pat.hir_id, span)
292 };
293 args.push(arg);
294 }
295
296 let final_expr = this.finalize_body_lowering(delegation, args, span);
297 (this.arena.alloc_from_iter(parameters), final_expr)
298 })
299 }
300
301 fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> {
304 if let [stmt] = block.stmts.as_slice()
305 && let StmtKind::Expr(expr) = &stmt.kind
306 {
307 return self.lower_expr_mut(expr);
308 }
309
310 let block = self.lower_block(block, false);
311 self.mk_expr(hir::ExprKind::Block(block, None), block.span)
312 }
313
314 fn finalize_body_lowering(
330 &mut self,
331 delegation: &Delegation,
332 args: Vec<hir::Expr<'hir>>,
333 span: Span,
334 ) -> hir::Expr<'hir> {
335 let args = self.arena.alloc_from_iter(args);
336
337 let has_generic_args =
338 delegation.path.segments.iter().rev().skip(1).any(|segment| segment.args.is_some());
339
340 let call = if self
341 .get_resolution_id(delegation.id, span)
342 .and_then(|def_id| Ok(self.is_method(def_id, span)))
343 .unwrap_or_default()
344 && delegation.qself.is_none()
345 && !has_generic_args
346 && !args.is_empty()
347 {
348 let ast_segment = delegation.path.segments.last().unwrap();
349 let segment = self.lower_path_segment(
350 delegation.path.span,
351 ast_segment,
352 ParamMode::Optional,
353 GenericArgsMode::Err,
354 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
355 None,
356 );
357 let segment = self.arena.alloc(segment);
358
359 self.arena.alloc(hir::Expr {
360 hir_id: self.next_id(),
361 kind: hir::ExprKind::MethodCall(segment, &args[0], &args[1..], span),
362 span,
363 })
364 } else {
365 let path = self.lower_qpath(
366 delegation.id,
367 &delegation.qself,
368 &delegation.path,
369 ParamMode::Optional,
370 AllowReturnTypeNotation::No,
371 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
372 None,
373 );
374
375 let callee_path = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(path), span));
376 self.arena.alloc(self.mk_expr(hir::ExprKind::Call(callee_path, args), span))
377 };
378 let block = self.arena.alloc(hir::Block {
379 stmts: &[],
380 expr: Some(call),
381 hir_id: self.next_id(),
382 rules: hir::BlockCheckMode::DefaultBlock,
383 span,
384 targeted_by_break: false,
385 });
386
387 self.mk_expr(hir::ExprKind::Block(block, None), span)
388 }
389
390 fn generate_delegation_error(
391 &mut self,
392 err: ErrorGuaranteed,
393 span: Span,
394 ) -> DelegationResults<'hir> {
395 let generics = self.lower_delegation_generics(span);
396
397 let decl = self.arena.alloc(hir::FnDecl {
398 inputs: &[],
399 output: hir::FnRetTy::DefaultReturn(span),
400 c_variadic: false,
401 lifetime_elision_allowed: true,
402 implicit_self: hir::ImplicitSelfKind::None,
403 });
404
405 let header = self.generate_header_error();
406 let sig = hir::FnSig { decl, header, span };
407
408 let body_id = self.lower_body(|this| (&[], this.mk_expr(hir::ExprKind::Err(err), span)));
409 DelegationResults { generics, body_id, sig }
410 }
411
412 fn generate_header_error(&self) -> hir::FnHeader {
413 hir::FnHeader {
414 safety: hir::Safety::Safe.into(),
415 constness: hir::Constness::NotConst,
416 asyncness: hir::IsAsync::NotAsync,
417 abi: ExternAbi::Rust,
418 }
419 }
420
421 #[inline]
422 fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
423 hir::Expr { hir_id: self.next_id(), kind, span }
424 }
425}
426
427struct SelfResolver<'a> {
428 resolver: &'a mut ResolverAstLowering,
429 path_id: NodeId,
430 self_param_id: NodeId,
431}
432
433impl<'a> SelfResolver<'a> {
434 fn try_replace_id(&mut self, id: NodeId) {
435 if let Some(res) = self.resolver.partial_res_map.get(&id)
436 && let Some(Res::Local(sig_id)) = res.full_res()
437 && sig_id == self.path_id
438 {
439 let new_res = PartialRes::new(Res::Local(self.self_param_id));
440 self.resolver.partial_res_map.insert(id, new_res);
441 }
442 }
443}
444
445impl<'ast, 'a> Visitor<'ast> for SelfResolver<'a> {
446 fn visit_path(&mut self, path: &'ast Path, id: NodeId) {
447 self.try_replace_id(id);
448 visit::walk_path(self, path);
449 }
450
451 fn visit_path_segment(&mut self, seg: &'ast PathSegment) {
452 self.try_replace_id(seg.id);
453 visit::walk_path_segment(self, seg);
454 }
455}