1use std::mem;
2
3use rustc_ast::token::{
4 self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Token, TokenKind,
5};
6use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
7use rustc_ast::{ExprKind, StmtKind, TyKind, UnOp};
8use rustc_data_structures::fx::FxHashMap;
9use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
10use rustc_parse::lexer::nfc_normalize;
11use rustc_parse::parser::ParseNtResult;
12use rustc_session::parse::ParseSess;
13use rustc_span::hygiene::{LocalExpnId, Transparency};
14use rustc_span::{
15 Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans,
16};
17use smallvec::{SmallVec, smallvec};
18
19use crate::errors::{
20 CountRepetitionMisplaced, MetaVarExprUnrecognizedVar, MetaVarsDifSeqMatchers, MustRepeatOnce,
21 NoSyntaxVarsExprRepeat, VarStillRepeating,
22};
23use crate::mbe::macro_parser::NamedMatch;
24use crate::mbe::macro_parser::NamedMatch::*;
25use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
26use crate::mbe::{self, KleeneOp, MetaVarExpr};
27
28struct TranscrCtx<'psess, 'itp> {
30 psess: &'psess ParseSess,
31
32 interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
34
35 marker: Marker,
37
38 stack: SmallVec<[Frame<'itp>; 1]>,
44
45 repeats: Vec<(usize, usize)>,
51
52 result: Vec<TokenTree>,
65
66 result_stack: Vec<Vec<TokenTree>>,
69}
70
71impl<'psess> TranscrCtx<'psess, '_> {
72 fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
74 let mut span = dspan.entire();
75 self.marker.mark_span(&mut span);
76 span
77 }
78}
79
80struct Marker {
82 expand_id: LocalExpnId,
83 transparency: Transparency,
84 cache: FxHashMap<SyntaxContext, SyntaxContext>,
85}
86
87impl Marker {
88 fn mark_span(&mut self, span: &mut Span) {
90 *span = span.map_ctxt(|ctxt| {
95 *self
96 .cache
97 .entry(ctxt)
98 .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
99 });
100 }
101}
102
103struct Frame<'a> {
105 tts: &'a [mbe::TokenTree],
106 idx: usize,
107 kind: FrameKind,
108}
109
110enum FrameKind {
111 Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
112 Sequence { sep: Option<Token>, kleene_op: KleeneOp },
113}
114
115impl<'a> Frame<'a> {
116 fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
117 Frame {
118 tts: &src.tts,
119 idx: 0,
120 kind: FrameKind::Delimited { delim: src.delim, span, spacing },
121 }
122 }
123
124 fn new_sequence(
125 src: &'a mbe::SequenceRepetition,
126 sep: Option<Token>,
127 kleene_op: KleeneOp,
128 ) -> Frame<'a> {
129 Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
130 }
131}
132
133impl<'a> Iterator for Frame<'a> {
134 type Item = &'a mbe::TokenTree;
135
136 fn next(&mut self) -> Option<&'a mbe::TokenTree> {
137 let res = self.tts.get(self.idx);
138 self.idx += 1;
139 res
140 }
141}
142
143pub(super) fn transcribe<'a>(
164 psess: &'a ParseSess,
165 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
166 src: &mbe::Delimited,
167 src_span: DelimSpan,
168 transparency: Transparency,
169 expand_id: LocalExpnId,
170) -> PResult<'a, TokenStream> {
171 if src.tts.is_empty() {
173 return Ok(TokenStream::default());
174 }
175
176 let mut tscx = TranscrCtx {
177 psess,
178 interp,
179 marker: Marker { expand_id, transparency, cache: Default::default() },
180 repeats: Vec::new(),
181 stack: smallvec![Frame::new_delimited(
182 src,
183 src_span,
184 DelimSpacing::new(Spacing::Alone, Spacing::Alone)
185 )],
186 result: Vec::new(),
187 result_stack: Vec::new(),
188 };
189
190 loop {
191 let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
194 let frame = tscx.stack.last_mut().unwrap();
199 if let FrameKind::Sequence { sep, .. } = &frame.kind {
200 let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
201 *repeat_idx += 1;
202 if repeat_idx < repeat_len {
203 frame.idx = 0;
204 if let Some(sep) = sep {
205 tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
206 }
207 continue;
208 }
209 }
210
211 match tscx.stack.pop().unwrap().kind {
215 FrameKind::Sequence { .. } => {
217 tscx.repeats.pop();
218 }
219
220 FrameKind::Delimited { delim, span, mut spacing, .. } => {
224 if delim == Delimiter::Bracket {
227 spacing.close = Spacing::Alone;
228 }
229 if tscx.result_stack.is_empty() {
230 return Ok(TokenStream::new(tscx.result));
232 }
233
234 let tree =
236 TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
237 tscx.result = tscx.result_stack.pop().unwrap();
238 tscx.result.push(tree);
239 }
240 }
241 continue;
242 };
243
244 match tree {
247 seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
249 transcribe_sequence(&mut tscx, seq, seq_rep)?;
250 }
251
252 &mbe::TokenTree::MetaVar(sp, original_ident) => {
254 transcribe_metavar(&mut tscx, sp, original_ident)?;
255 }
256
257 mbe::TokenTree::MetaVarExpr(dspan, expr) => {
259 transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
260 }
261
262 &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
268 tscx.marker.mark_span(&mut span.open);
269 tscx.marker.mark_span(&mut span.close);
270 tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
271 tscx.result_stack.push(mem::take(&mut tscx.result));
272 }
273
274 &mbe::TokenTree::Token(mut token) => {
277 tscx.marker.mark_span(&mut token.span);
278 if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
279 tscx.marker.mark_span(&mut ident.span);
280 }
281 let tt = TokenTree::Token(token, Spacing::Alone);
282 tscx.result.push(tt);
283 }
284
285 mbe::TokenTree::MetaVarDecl { .. } => panic!("unexpected `TokenTree::MetaVarDecl`"),
287 }
288 }
289}
290
291fn transcribe_sequence<'tx, 'itp>(
293 tscx: &mut TranscrCtx<'tx, 'itp>,
294 seq: &mbe::TokenTree,
295 seq_rep: &'itp mbe::SequenceRepetition,
296) -> PResult<'tx, ()> {
297 let dcx = tscx.psess.dcx();
298
299 match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
303 LockstepIterSize::Unconstrained => {
304 return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
305 }
306
307 LockstepIterSize::Contradiction(msg) => {
308 return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
313 }
314
315 LockstepIterSize::Constraint(len, _) => {
316 let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
319
320 if len == 0 {
322 if seq.kleene.op == KleeneOp::OneOrMore {
323 return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
327 }
328 } else {
329 tscx.repeats.push((0, len));
332
333 tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator.clone(), seq.kleene.op));
337 }
338 }
339 }
340
341 Ok(())
342}
343
344fn transcribe_metavar<'tx>(
361 tscx: &mut TranscrCtx<'tx, '_>,
362 mut sp: Span,
363 mut original_ident: Ident,
364) -> PResult<'tx, ()> {
365 let dcx = tscx.psess.dcx();
366
367 let ident = MacroRulesNormalizedIdent::new(original_ident);
368 let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
369 tscx.marker.mark_span(&mut sp);
372 tscx.marker.mark_span(&mut original_ident.span);
373 tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
374 tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
375 return Ok(());
376 };
377
378 let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
383 if stream.len() == 1 {
384 let tree = stream.iter().next().unwrap();
385 if let TokenTree::Delimited(_, _, delim, inner) = tree
386 && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
387 && mv_kind == *mvk
388 {
389 stream = inner.clone();
390 }
391 }
392
393 tscx.marker.mark_span(&mut sp);
396 with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
397 TokenTree::Delimited(
400 DelimSpan::from_single(sp),
401 DelimSpacing::new(Spacing::Alone, Spacing::Alone),
402 Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
403 stream,
404 )
405 };
406
407 let tt = match cur_matched {
408 MatchedSingle(ParseNtResult::Tt(tt)) => {
409 maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
414 }
415 MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
416 tscx.marker.mark_span(&mut sp);
417 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
418 let kind = token::NtIdent(*ident, *is_raw);
419 TokenTree::token_alone(kind, sp)
420 }
421 MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
422 tscx.marker.mark_span(&mut sp);
423 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
424 let kind = token::NtLifetime(*ident, *is_raw);
425 TokenTree::token_alone(kind, sp)
426 }
427 MatchedSingle(ParseNtResult::Item(item)) => {
428 mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
429 }
430 MatchedSingle(ParseNtResult::Block(block)) => {
431 mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
432 }
433 MatchedSingle(ParseNtResult::Stmt(stmt)) => {
434 let stream = if let StmtKind::Empty = stmt.kind {
435 TokenStream::token_alone(token::Semi, stmt.span)
437 } else {
438 TokenStream::from_ast(stmt)
439 };
440 mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
441 }
442 MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => {
443 mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
444 }
445 MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
446 let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
447 ExprKind::Lit(_) => (true, true),
448 ExprKind::Unary(UnOp::Neg, e) if matches!(&e.kind, ExprKind::Lit(_)) => {
449 (true, false)
450 }
451 _ => (false, false),
452 };
453 mk_delimited(
454 expr.span,
455 MetaVarKind::Expr {
456 kind: *kind,
457 can_begin_literal_maybe_minus,
458 can_begin_string_literal,
459 },
460 TokenStream::from_ast(expr),
461 )
462 }
463 MatchedSingle(ParseNtResult::Literal(lit)) => {
464 mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
465 }
466 MatchedSingle(ParseNtResult::Ty(ty)) => {
467 let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
468 mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
469 }
470 MatchedSingle(ParseNtResult::Meta(attr_item)) => {
471 let has_meta_form = attr_item.meta_kind().is_some();
472 mk_delimited(
473 attr_item.span(),
474 MetaVarKind::Meta { has_meta_form },
475 TokenStream::from_ast(attr_item),
476 )
477 }
478 MatchedSingle(ParseNtResult::Path(path)) => {
479 mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
480 }
481 MatchedSingle(ParseNtResult::Vis(vis)) => {
482 mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
483 }
484 MatchedSeq(..) => {
485 return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
487 }
488 };
489
490 tscx.result.push(tt);
491 Ok(())
492}
493
494fn transcribe_metavar_expr<'tx>(
496 tscx: &mut TranscrCtx<'tx, '_>,
497 dspan: DelimSpan,
498 expr: &MetaVarExpr,
499) -> PResult<'tx, ()> {
500 let dcx = tscx.psess.dcx();
501 let tt = match *expr {
502 MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
503 MetaVarExpr::Count(original_ident, depth) => {
504 let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
505 let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
506 TokenTree::token_alone(
507 TokenKind::lit(token::Integer, sym::integer(count), None),
508 tscx.visited_dspan(dspan),
509 )
510 }
511 MetaVarExpr::Ignore(original_ident) => {
512 let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
514 return Ok(());
515 }
516 MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
517 Some((index, _)) => TokenTree::token_alone(
518 TokenKind::lit(token::Integer, sym::integer(*index), None),
519 tscx.visited_dspan(dspan),
520 ),
521 None => {
522 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
523 }
524 },
525 MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
526 Some((_, length)) => TokenTree::token_alone(
527 TokenKind::lit(token::Integer, sym::integer(*length), None),
528 tscx.visited_dspan(dspan),
529 ),
530 None => {
531 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
532 }
533 },
534 };
535 tscx.result.push(tt);
536 Ok(())
537}
538
539fn metavar_expr_concat<'tx>(
541 tscx: &mut TranscrCtx<'tx, '_>,
542 dspan: DelimSpan,
543 elements: &[MetaVarExprConcatElem],
544) -> PResult<'tx, TokenTree> {
545 let dcx = tscx.psess.dcx();
546 let mut concatenated = String::new();
547 for element in elements.into_iter() {
548 let symbol = match element {
549 MetaVarExprConcatElem::Ident(elem) => elem.name,
550 MetaVarExprConcatElem::Literal(elem) => *elem,
551 MetaVarExprConcatElem::Var(ident) => {
552 match matched_from_ident(dcx, *ident, tscx.interp)? {
553 NamedMatch::MatchedSeq(named_matches) => {
554 let Some((curr_idx, _)) = tscx.repeats.last() else {
555 return Err(dcx.struct_span_err(dspan.entire(), "invalid syntax"));
556 };
557 match &named_matches[*curr_idx] {
558 MatchedSeq(_) => unimplemented!(),
560 MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?,
561 }
562 }
563 NamedMatch::MatchedSingle(pnr) => {
564 extract_symbol_from_pnr(dcx, pnr, ident.span)?
565 }
566 }
567 }
568 };
569 concatenated.push_str(symbol.as_str());
570 }
571 let symbol = nfc_normalize(&concatenated);
572 let concatenated_span = tscx.visited_dspan(dspan);
573 if !rustc_lexer::is_ident(symbol.as_str()) {
574 return Err(dcx.struct_span_err(
575 concatenated_span,
576 "`${concat(..)}` is not generating a valid identifier",
577 ));
578 }
579 tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
580
581 Ok(TokenTree::Token(
585 Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
586 Spacing::Alone,
587 ))
588}
589
590fn maybe_use_metavar_location(
621 psess: &ParseSess,
622 stack: &[Frame<'_>],
623 mut metavar_span: Span,
624 orig_tt: &TokenTree,
625 marker: &mut Marker,
626) -> TokenTree {
627 let undelimited_seq = matches!(
628 stack.last(),
629 Some(Frame {
630 tts: [_],
631 kind: FrameKind::Sequence {
632 sep: None,
633 kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
634 ..
635 },
636 ..
637 })
638 );
639 if undelimited_seq {
640 return orig_tt.clone();
642 }
643
644 marker.mark_span(&mut metavar_span);
645 let no_collision = match orig_tt {
646 TokenTree::Token(token, ..) => {
647 with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span))
648 }
649 TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| {
650 mspans.insert(dspan.open, metavar_span)
651 && mspans.insert(dspan.close, metavar_span)
652 && mspans.insert(dspan.entire(), metavar_span)
653 }),
654 };
655 if no_collision || psess.source_map().is_imported(metavar_span) {
656 return orig_tt.clone();
657 }
658
659 match orig_tt {
662 TokenTree::Token(Token { kind, span }, spacing) => {
663 let span = metavar_span.with_ctxt(span.ctxt());
664 with_metavar_spans(|mspans| mspans.insert(span, metavar_span));
665 TokenTree::Token(Token { kind: kind.clone(), span }, *spacing)
666 }
667 TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
668 let open = metavar_span.with_ctxt(dspan.open.ctxt());
669 let close = metavar_span.with_ctxt(dspan.close.ctxt());
670 with_metavar_spans(|mspans| {
671 mspans.insert(open, metavar_span) && mspans.insert(close, metavar_span)
672 });
673 let dspan = DelimSpan::from_pair(open, close);
674 TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
675 }
676 }
677}
678
679fn lookup_cur_matched<'a>(
686 ident: MacroRulesNormalizedIdent,
687 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
688 repeats: &[(usize, usize)],
689) -> Option<&'a NamedMatch> {
690 interpolations.get(&ident).map(|mut matched| {
691 for &(idx, _) in repeats {
692 match matched {
693 MatchedSingle(_) => break,
694 MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
695 }
696 }
697
698 matched
699 })
700}
701
702#[derive(Clone)]
707enum LockstepIterSize {
708 Unconstrained,
711
712 Constraint(usize, MacroRulesNormalizedIdent),
715
716 Contradiction(String),
718}
719
720impl LockstepIterSize {
721 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
726 match self {
727 LockstepIterSize::Unconstrained => other,
728 LockstepIterSize::Contradiction(_) => self,
729 LockstepIterSize::Constraint(l_len, l_id) => match other {
730 LockstepIterSize::Unconstrained => self,
731 LockstepIterSize::Contradiction(_) => other,
732 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
733 LockstepIterSize::Constraint(r_len, r_id) => {
734 let msg = format!(
735 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
736 l_id,
737 l_len,
738 pluralize!(l_len),
739 r_id,
740 r_len,
741 pluralize!(r_len),
742 );
743 LockstepIterSize::Contradiction(msg)
744 }
745 },
746 }
747 }
748}
749
750fn lockstep_iter_size(
763 tree: &mbe::TokenTree,
764 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
765 repeats: &[(usize, usize)],
766) -> LockstepIterSize {
767 use mbe::TokenTree;
768 match tree {
769 TokenTree::Delimited(.., delimited) => {
770 delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
771 size.with(lockstep_iter_size(tt, interpolations, repeats))
772 })
773 }
774 TokenTree::Sequence(_, seq) => {
775 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
776 size.with(lockstep_iter_size(tt, interpolations, repeats))
777 })
778 }
779 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl { name, .. } => {
780 let name = MacroRulesNormalizedIdent::new(*name);
781 match lookup_cur_matched(name, interpolations, repeats) {
782 Some(matched) => match matched {
783 MatchedSingle(_) => LockstepIterSize::Unconstrained,
784 MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
785 },
786 _ => LockstepIterSize::Unconstrained,
787 }
788 }
789 TokenTree::MetaVarExpr(_, expr) => {
790 expr.for_each_metavar(LockstepIterSize::Unconstrained, |lis, ident| {
791 lis.with(lockstep_iter_size(
792 &TokenTree::MetaVar(ident.span, *ident),
793 interpolations,
794 repeats,
795 ))
796 })
797 }
798 TokenTree::Token(..) => LockstepIterSize::Unconstrained,
799 }
800}
801
802fn count_repetitions<'dx>(
812 dcx: DiagCtxtHandle<'dx>,
813 depth_user: usize,
814 mut matched: &NamedMatch,
815 repeats: &[(usize, usize)],
816 sp: &DelimSpan,
817) -> PResult<'dx, usize> {
818 fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
821 match matched {
822 MatchedSingle(_) => Ok(1),
823 MatchedSeq(named_matches) => {
824 if depth_curr == depth_max {
825 Ok(named_matches.len())
826 } else {
827 named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum()
828 }
829 }
830 }
831 }
832
833 fn depth(counter: usize, matched: &NamedMatch) -> usize {
835 match matched {
836 MatchedSingle(_) => counter,
837 MatchedSeq(named_matches) => {
838 let rslt = counter + 1;
839 if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
840 }
841 }
842 }
843
844 let depth_max = depth(0, matched)
845 .checked_sub(1)
846 .and_then(|el| el.checked_sub(repeats.len()))
847 .unwrap_or_default();
848 if depth_user > depth_max {
849 return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
850 }
851
852 for &(idx, _) in repeats {
859 if let MatchedSeq(ads) = matched {
860 matched = &ads[idx];
861 }
862 }
863
864 if let MatchedSingle(_) = matched {
865 return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
866 }
867
868 count(depth_user, depth_max, matched)
869}
870
871fn matched_from_ident<'ctx, 'interp, 'rslt>(
873 dcx: DiagCtxtHandle<'ctx>,
874 ident: Ident,
875 interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
876) -> PResult<'ctx, &'rslt NamedMatch>
877where
878 'interp: 'rslt,
879{
880 let span = ident.span;
881 let key = MacroRulesNormalizedIdent::new(ident);
882 interp.get(&key).ok_or_else(|| dcx.create_err(MetaVarExprUnrecognizedVar { span, key }))
883}
884
885fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
888 let msg = if max == 0 {
889 format!(
890 "meta-variable expression `{ty}` with depth parameter \
891 must be called inside of a macro repetition"
892 )
893 } else {
894 format!(
895 "depth parameter of meta-variable expression `{ty}` \
896 must be less than {max}"
897 )
898 };
899 dcx.struct_span_err(span, msg)
900}
901
902fn extract_symbol_from_pnr<'a>(
904 dcx: DiagCtxtHandle<'a>,
905 pnr: &ParseNtResult,
906 span_err: Span,
907) -> PResult<'a, Symbol> {
908 match pnr {
909 ParseNtResult::Ident(nt_ident, is_raw) => {
910 if let IdentIsRaw::Yes = is_raw {
911 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
912 } else {
913 Ok(nt_ident.name)
914 }
915 }
916 ParseNtResult::Tt(TokenTree::Token(
917 Token { kind: TokenKind::Ident(symbol, is_raw), .. },
918 _,
919 )) => {
920 if let IdentIsRaw::Yes = is_raw {
921 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
922 } else {
923 Ok(*symbol)
924 }
925 }
926 ParseNtResult::Tt(TokenTree::Token(
927 Token {
928 kind: TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }),
929 ..
930 },
931 _,
932 )) => Ok(*symbol),
933 ParseNtResult::Literal(expr)
934 if let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind =>
935 {
936 Ok(*symbol)
937 }
938 _ => Err(dcx
939 .struct_err(
940 "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
941 )
942 .with_note("currently only string literals are supported")
943 .with_span(span_err)),
944 }
945}