rustc_expand/mbe/
macro_rules.rs

1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
12use rustc_ast_pretty::pprust;
13use rustc_attr_data_structures::{AttributeKind, find_attr};
14use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
15use rustc_errors::{Applicability, Diag, ErrorGuaranteed};
16use rustc_feature::Features;
17use rustc_hir as hir;
18use rustc_lint_defs::BuiltinLintDiag;
19use rustc_lint_defs::builtin::{
20    RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
21};
22use rustc_parse::exp;
23use rustc_parse::parser::{Parser, Recovery};
24use rustc_session::Session;
25use rustc_session::parse::ParseSess;
26use rustc_span::edition::Edition;
27use rustc_span::hygiene::Transparency;
28use rustc_span::{Ident, Span, kw, sym};
29use tracing::{debug, instrument, trace, trace_span};
30
31use super::macro_parser::{NamedMatches, NamedParseResult};
32use super::{SequenceRepetition, diagnostics};
33use crate::base::{
34    DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult, SyntaxExtension,
35    SyntaxExtensionKind, TTMacroExpander,
36};
37use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
38use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
39use crate::mbe::quoted::{RulePart, parse_one_tt};
40use crate::mbe::transcribe::transcribe;
41use crate::mbe::{self, KleeneOp, macro_check};
42
43pub(crate) struct ParserAnyMacro<'a> {
44    parser: Parser<'a>,
45
46    /// Span of the expansion site of the macro this parser is for
47    site_span: Span,
48    /// The ident of the macro we're parsing
49    macro_ident: Ident,
50    lint_node_id: NodeId,
51    is_trailing_mac: bool,
52    arm_span: Span,
53    /// Whether or not this macro is defined in the current crate
54    is_local: bool,
55}
56
57impl<'a> ParserAnyMacro<'a> {
58    pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
59        let ParserAnyMacro {
60            site_span,
61            macro_ident,
62            ref mut parser,
63            lint_node_id,
64            arm_span,
65            is_trailing_mac,
66            is_local,
67        } = *self;
68        let snapshot = &mut parser.create_snapshot_for_diagnostic();
69        let fragment = match parse_ast_fragment(parser, kind) {
70            Ok(f) => f,
71            Err(err) => {
72                let guar = diagnostics::emit_frag_parse_err(
73                    err, parser, snapshot, site_span, arm_span, kind,
74                );
75                return kind.dummy(site_span, guar);
76            }
77        };
78
79        // We allow semicolons at the end of expressions -- e.g., the semicolon in
80        // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`,
81        // but `m!()` is allowed in expression positions (cf. issue #34706).
82        if kind == AstFragmentKind::Expr && parser.token == token::Semi {
83            if is_local {
84                parser.psess.buffer_lint(
85                    SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
86                    parser.token.span,
87                    lint_node_id,
88                    BuiltinLintDiag::TrailingMacro(is_trailing_mac, macro_ident),
89                );
90            }
91            parser.bump();
92        }
93
94        // Make sure we don't have any tokens left to parse so we don't silently drop anything.
95        let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
96        ensure_complete_parse(parser, &path, kind.name(), site_span);
97        fragment
98    }
99
100    #[instrument(skip(cx, tts))]
101    pub(crate) fn from_tts<'cx>(
102        cx: &'cx mut ExtCtxt<'a>,
103        tts: TokenStream,
104        site_span: Span,
105        arm_span: Span,
106        is_local: bool,
107        macro_ident: Ident,
108    ) -> Self {
109        Self {
110            parser: Parser::new(&cx.sess.psess, tts, None),
111
112            // Pass along the original expansion site and the name of the macro
113            // so we can print a useful error message if the parse of the expanded
114            // macro leaves unparsed tokens.
115            site_span,
116            macro_ident,
117            lint_node_id: cx.current_expansion.lint_node_id,
118            is_trailing_mac: cx.current_expansion.is_trailing_mac,
119            arm_span,
120            is_local,
121        }
122    }
123}
124
125pub(super) struct MacroRule {
126    pub(super) lhs: Vec<MatcherLoc>,
127    lhs_span: Span,
128    rhs: mbe::TokenTree,
129}
130
131struct MacroRulesMacroExpander {
132    node_id: NodeId,
133    name: Ident,
134    span: Span,
135    transparency: Transparency,
136    rules: Vec<MacroRule>,
137}
138
139impl TTMacroExpander for MacroRulesMacroExpander {
140    fn expand<'cx>(
141        &self,
142        cx: &'cx mut ExtCtxt<'_>,
143        sp: Span,
144        input: TokenStream,
145    ) -> MacroExpanderResult<'cx> {
146        ExpandResult::Ready(expand_macro(
147            cx,
148            sp,
149            self.span,
150            self.node_id,
151            self.name,
152            self.transparency,
153            input,
154            &self.rules,
155        ))
156    }
157
158    fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
159        // If the rhs contains an invocation like `compile_error!`, don't report it as unused.
160        let rule = &self.rules[rule_i];
161        if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
162    }
163}
164
165struct DummyExpander(ErrorGuaranteed);
166
167impl TTMacroExpander for DummyExpander {
168    fn expand<'cx>(
169        &self,
170        _: &'cx mut ExtCtxt<'_>,
171        span: Span,
172        _: TokenStream,
173    ) -> ExpandResult<Box<dyn MacResult + 'cx>, ()> {
174        ExpandResult::Ready(DummyResult::any(span, self.0))
175    }
176}
177
178fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
179    let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
180    cx_expansions.entry(sp).or_default().push(message);
181}
182
183pub(super) trait Tracker<'matcher> {
184    /// The contents of `ParseResult::Failure`.
185    type Failure;
186
187    /// Arm failed to match. If the token is `token::Eof`, it indicates an unexpected
188    /// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
189    /// The usize is the approximate position of the token in the input token stream.
190    fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
191
192    /// This is called before trying to match next MatcherLoc on the current token.
193    fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
194
195    /// This is called after an arm has been parsed, either successfully or unsuccessfully. When
196    /// this is called, `before_match_loc` was called at least once (with a `MatcherLoc::Eof`).
197    fn after_arm(&mut self, _result: &NamedParseResult<Self::Failure>) {}
198
199    /// For tracing.
200    fn description() -> &'static str;
201
202    fn recovery() -> Recovery {
203        Recovery::Forbidden
204    }
205}
206
207/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to
208/// monomorphization.
209pub(super) struct NoopTracker;
210
211impl<'matcher> Tracker<'matcher> for NoopTracker {
212    type Failure = ();
213
214    fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
215
216    fn description() -> &'static str {
217        "none"
218    }
219}
220
221/// Expands the rules based macro defined by `rules` for a given input `arg`.
222#[instrument(skip(cx, transparency, arg, rules))]
223fn expand_macro<'cx>(
224    cx: &'cx mut ExtCtxt<'_>,
225    sp: Span,
226    def_span: Span,
227    node_id: NodeId,
228    name: Ident,
229    transparency: Transparency,
230    arg: TokenStream,
231    rules: &[MacroRule],
232) -> Box<dyn MacResult + 'cx> {
233    let psess = &cx.sess.psess;
234
235    if cx.trace_macros() {
236        let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
237        trace_macros_note(&mut cx.expansions, sp, msg);
238    }
239
240    // Track nothing for the best performance.
241    let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
242
243    match try_success_result {
244        Ok((rule_index, rule, named_matches)) => {
245            let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rule.rhs else {
246                cx.dcx().span_bug(sp, "malformed macro rhs");
247            };
248            let arm_span = rule.rhs.span();
249
250            // rhs has holes ( `$id` and `$(...)` that need filled)
251            let id = cx.current_expansion.id;
252            let tts = match transcribe(psess, &named_matches, rhs, rhs_span, transparency, id) {
253                Ok(tts) => tts,
254                Err(err) => {
255                    let guar = err.emit();
256                    return DummyResult::any(arm_span, guar);
257                }
258            };
259
260            if cx.trace_macros() {
261                let msg = format!("to `{}`", pprust::tts_to_string(&tts));
262                trace_macros_note(&mut cx.expansions, sp, msg);
263            }
264
265            let is_local = is_defined_in_current_crate(node_id);
266            if is_local {
267                cx.resolver.record_macro_rule_usage(node_id, rule_index);
268            }
269
270            // Let the context choose how to interpret the result. Weird, but useful for X-macros.
271            Box::new(ParserAnyMacro::from_tts(cx, tts, sp, arm_span, is_local, name))
272        }
273        Err(CanRetry::No(guar)) => {
274            debug!("Will not retry matching as an error was emitted already");
275            DummyResult::any(sp, guar)
276        }
277        Err(CanRetry::Yes) => {
278            // Retry and emit a better error.
279            let (span, guar) =
280                diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, rules);
281            cx.trace_macros_diag();
282            DummyResult::any(span, guar)
283        }
284    }
285}
286
287pub(super) enum CanRetry {
288    Yes,
289    /// We are not allowed to retry macro expansion as a fatal error has been emitted already.
290    No(ErrorGuaranteed),
291}
292
293/// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful,
294/// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors
295/// correctly.
296#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
297pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
298    psess: &ParseSess,
299    name: Ident,
300    arg: &TokenStream,
301    rules: &'matcher [MacroRule],
302    track: &mut T,
303) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
304    // We create a base parser that can be used for the "black box" parts.
305    // Every iteration needs a fresh copy of that parser. However, the parser
306    // is not mutated on many of the iterations, particularly when dealing with
307    // macros like this:
308    //
309    // macro_rules! foo {
310    //     ("a") => (A);
311    //     ("b") => (B);
312    //     ("c") => (C);
313    //     // ... etc. (maybe hundreds more)
314    // }
315    //
316    // as seen in the `html5ever` benchmark. We use a `Cow` so that the base
317    // parser is only cloned when necessary (upon mutation). Furthermore, we
318    // reinitialize the `Cow` with the base parser at the start of every
319    // iteration, so that any mutated parsers are not reused. This is all quite
320    // hacky, but speeds up the `html5ever` benchmark significantly. (Issue
321    // 68836 suggests a more comprehensive but more complex change to deal with
322    // this situation.)
323    let parser = parser_from_cx(psess, arg.clone(), T::recovery());
324    // Try each arm's matchers.
325    let mut tt_parser = TtParser::new(name);
326    for (i, rule) in rules.iter().enumerate() {
327        let _tracing_span = trace_span!("Matching arm", %i);
328
329        // Take a snapshot of the state of pre-expansion gating at this point.
330        // This is used so that if a matcher is not `Success(..)`ful,
331        // then the spans which became gated when parsing the unsuccessful matcher
332        // are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
333        let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
334
335        let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, track);
336
337        track.after_arm(&result);
338
339        match result {
340            Success(named_matches) => {
341                debug!("Parsed arm successfully");
342                // The matcher was `Success(..)`ful.
343                // Merge the gated spans from parsing the matcher with the preexisting ones.
344                psess.gated_spans.merge(gated_spans_snapshot);
345
346                return Ok((i, rule, named_matches));
347            }
348            Failure(_) => {
349                trace!("Failed to match arm, trying the next one");
350                // Try the next arm.
351            }
352            Error(_, _) => {
353                debug!("Fatal error occurred during matching");
354                // We haven't emitted an error yet, so we can retry.
355                return Err(CanRetry::Yes);
356            }
357            ErrorReported(guarantee) => {
358                debug!("Fatal error occurred and was reported during matching");
359                // An error has been reported already, we cannot retry as that would cause duplicate errors.
360                return Err(CanRetry::No(guarantee));
361            }
362        }
363
364        // The matcher was not `Success(..)`ful.
365        // Restore to the state before snapshotting and maybe try again.
366        mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
367    }
368
369    Err(CanRetry::Yes)
370}
371
372/// Converts a macro item into a syntax extension.
373pub fn compile_declarative_macro(
374    sess: &Session,
375    features: &Features,
376    macro_def: &ast::MacroDef,
377    ident: Ident,
378    attrs: &[hir::Attribute],
379    span: Span,
380    node_id: NodeId,
381    edition: Edition,
382) -> (SyntaxExtension, usize) {
383    let mk_syn_ext = |expander| {
384        let kind = SyntaxExtensionKind::LegacyBang(expander);
385        let is_local = is_defined_in_current_crate(node_id);
386        SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
387    };
388    let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), 0);
389
390    let macro_rules = macro_def.macro_rules;
391    let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
392
393    let body = macro_def.body.tokens.clone();
394    let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
395
396    // Don't abort iteration early, so that multiple errors can be reported. We only abort early on
397    // parse failures we can't recover from.
398    let mut guar = None;
399    let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
400
401    let mut rules = Vec::new();
402
403    while p.token != token::Eof {
404        let lhs_tt = p.parse_token_tree();
405        let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
406        check_emission(check_lhs(sess, node_id, &lhs_tt));
407        if let Err(e) = p.expect(exp!(FatArrow)) {
408            return dummy_syn_ext(e.emit());
409        }
410        if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
411            return dummy_syn_ext(guar);
412        }
413        let rhs_tt = p.parse_token_tree();
414        let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
415        check_emission(check_rhs(sess, &rhs_tt));
416        check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt));
417        let lhs_span = lhs_tt.span();
418        // Convert the lhs into `MatcherLoc` form, which is better for doing the
419        // actual matching.
420        let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
421            mbe::macro_parser::compute_locs(&delimited.tts)
422        } else {
423            return dummy_syn_ext(guar.unwrap());
424        };
425        rules.push(MacroRule { lhs, lhs_span, rhs: rhs_tt });
426        if p.token == token::Eof {
427            break;
428        }
429        if let Err(e) = p.expect(exp_sep) {
430            return dummy_syn_ext(e.emit());
431        }
432    }
433
434    if rules.is_empty() {
435        let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
436        return dummy_syn_ext(guar);
437    }
438
439    let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
440        .unwrap_or(Transparency::fallback(macro_rules));
441
442    if let Some(guar) = guar {
443        // To avoid warning noise, only consider the rules of this
444        // macro for the lint, if all rules are valid.
445        return dummy_syn_ext(guar);
446    }
447
448    // Return the number of rules for unused rule linting, if this is a local macro.
449    let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 };
450
451    let expander =
452        Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules });
453    (mk_syn_ext(expander), nrules)
454}
455
456fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
457    if p.token == token::Eof {
458        let err_sp = p.token.span.shrink_to_hi();
459        let guar = sess
460            .dcx()
461            .struct_span_err(err_sp, "macro definition ended unexpectedly")
462            .with_span_label(err_sp, msg)
463            .emit();
464        return Some(guar);
465    }
466    None
467}
468
469fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
470    let e1 = check_lhs_nt_follows(sess, node_id, lhs);
471    let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
472    e1.and(e2)
473}
474
475fn check_lhs_nt_follows(
476    sess: &Session,
477    node_id: NodeId,
478    lhs: &mbe::TokenTree,
479) -> Result<(), ErrorGuaranteed> {
480    // lhs is going to be like TokenTree::Delimited(...), where the
481    // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
482    if let mbe::TokenTree::Delimited(.., delimited) = lhs {
483        check_matcher(sess, node_id, &delimited.tts)
484    } else {
485        let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
486        Err(sess.dcx().span_err(lhs.span(), msg))
487    }
488}
489
490fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
491    if seq.separator.is_some() {
492        false
493    } else {
494        let mut is_empty = true;
495        let mut iter = seq.tts.iter().peekable();
496        while let Some(tt) = iter.next() {
497            match tt {
498                mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
499                mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
500                    let mut now = t;
501                    while let Some(&mbe::TokenTree::Token(
502                        next @ Token { kind: DocComment(..), .. },
503                    )) = iter.peek()
504                    {
505                        now = next;
506                        iter.next();
507                    }
508                    let span = t.span.to(now.span);
509                    sess.dcx().span_note(span, "doc comments are ignored in matcher position");
510                }
511                mbe::TokenTree::Sequence(_, sub_seq)
512                    if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
513                        || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
514                _ => is_empty = false,
515            }
516        }
517        is_empty
518    }
519}
520
521/// Checks if a `vis` nonterminal fragment is unnecessarily wrapped in an optional repetition.
522///
523/// When a `vis` fragment (which can already be empty) is wrapped in `$(...)?`,
524/// this suggests removing the redundant repetition syntax since it provides no additional benefit.
525fn check_redundant_vis_repetition(
526    err: &mut Diag<'_>,
527    sess: &Session,
528    seq: &SequenceRepetition,
529    span: &DelimSpan,
530) {
531    let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne;
532    let is_vis = seq.tts.first().map_or(false, |tt| {
533        matches!(tt, mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
534    });
535
536    if is_vis && is_zero_or_one {
537        err.note("a `vis` fragment can already be empty");
538        err.multipart_suggestion(
539            "remove the `$(` and `)?`",
540            vec![
541                (
542                    sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
543                    "".to_string(),
544                ),
545                (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
546            ],
547            Applicability::MaybeIncorrect,
548        );
549    }
550}
551
552/// Checks that the lhs contains no repetition which could match an empty token
553/// tree, because then the matcher would hang indefinitely.
554fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
555    use mbe::TokenTree;
556    for tt in tts {
557        match tt {
558            TokenTree::Token(..)
559            | TokenTree::MetaVar(..)
560            | TokenTree::MetaVarDecl { .. }
561            | TokenTree::MetaVarExpr(..) => (),
562            TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
563            TokenTree::Sequence(span, seq) => {
564                if is_empty_token_tree(sess, seq) {
565                    let sp = span.entire();
566                    let mut err =
567                        sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
568                    check_redundant_vis_repetition(&mut err, sess, seq, span);
569                    return Err(err.emit());
570                }
571                check_lhs_no_empty_seq(sess, &seq.tts)?
572            }
573        }
574    }
575
576    Ok(())
577}
578
579fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
580    match *rhs {
581        mbe::TokenTree::Delimited(..) => Ok(()),
582        _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
583    }
584}
585
586fn check_matcher(
587    sess: &Session,
588    node_id: NodeId,
589    matcher: &[mbe::TokenTree],
590) -> Result<(), ErrorGuaranteed> {
591    let first_sets = FirstSets::new(matcher);
592    let empty_suffix = TokenSet::empty();
593    check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
594    Ok(())
595}
596
597fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
598    match rhs {
599        mbe::TokenTree::Delimited(.., d) => {
600            let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
601                if let mbe::TokenTree::Token(ident) = ident
602                    && let TokenKind::Ident(ident, _) = ident.kind
603                    && ident == sym::compile_error
604                    && let mbe::TokenTree::Token(bang) = bang
605                    && let TokenKind::Bang = bang.kind
606                    && let mbe::TokenTree::Delimited(.., del) = args
607                    && !del.delim.skip()
608                {
609                    true
610                } else {
611                    false
612                }
613            });
614            if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
615        }
616        _ => false,
617    }
618}
619
620// `The FirstSets` for a matcher is a mapping from subsequences in the
621// matcher to the FIRST set for that subsequence.
622//
623// This mapping is partially precomputed via a backwards scan over the
624// token trees of the matcher, which provides a mapping from each
625// repetition sequence to its *first* set.
626//
627// (Hypothetically, sequences should be uniquely identifiable via their
628// spans, though perhaps that is false, e.g., for macro-generated macros
629// that do not try to inject artificial span information. My plan is
630// to try to catch such cases ahead of time and not include them in
631// the precomputed mapping.)
632struct FirstSets<'tt> {
633    // this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
634    // span in the original matcher to the First set for the inner sequence `tt ...`.
635    //
636    // If two sequences have the same span in a matcher, then map that
637    // span to None (invalidating the mapping here and forcing the code to
638    // use a slow path).
639    first: FxHashMap<Span, Option<TokenSet<'tt>>>,
640}
641
642impl<'tt> FirstSets<'tt> {
643    fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
644        use mbe::TokenTree;
645
646        let mut sets = FirstSets { first: FxHashMap::default() };
647        build_recur(&mut sets, tts);
648        return sets;
649
650        // walks backward over `tts`, returning the FIRST for `tts`
651        // and updating `sets` at the same time for all sequence
652        // substructure we find within `tts`.
653        fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
654            let mut first = TokenSet::empty();
655            for tt in tts.iter().rev() {
656                match tt {
657                    TokenTree::Token(..)
658                    | TokenTree::MetaVar(..)
659                    | TokenTree::MetaVarDecl { .. }
660                    | TokenTree::MetaVarExpr(..) => {
661                        first.replace_with(TtHandle::TtRef(tt));
662                    }
663                    TokenTree::Delimited(span, _, delimited) => {
664                        build_recur(sets, &delimited.tts);
665                        first.replace_with(TtHandle::from_token_kind(
666                            delimited.delim.as_open_token_kind(),
667                            span.open,
668                        ));
669                    }
670                    TokenTree::Sequence(sp, seq_rep) => {
671                        let subfirst = build_recur(sets, &seq_rep.tts);
672
673                        match sets.first.entry(sp.entire()) {
674                            Entry::Vacant(vac) => {
675                                vac.insert(Some(subfirst.clone()));
676                            }
677                            Entry::Occupied(mut occ) => {
678                                // if there is already an entry, then a span must have collided.
679                                // This should not happen with typical macro_rules macros,
680                                // but syntax extensions need not maintain distinct spans,
681                                // so distinct syntax trees can be assigned the same span.
682                                // In such a case, the map cannot be trusted; so mark this
683                                // entry as unusable.
684                                occ.insert(None);
685                            }
686                        }
687
688                        // If the sequence contents can be empty, then the first
689                        // token could be the separator token itself.
690
691                        if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
692                            first.add_one_maybe(TtHandle::from_token(*sep));
693                        }
694
695                        // Reverse scan: Sequence comes before `first`.
696                        if subfirst.maybe_empty
697                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
698                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
699                        {
700                            // If sequence is potentially empty, then
701                            // union them (preserving first emptiness).
702                            first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
703                        } else {
704                            // Otherwise, sequence guaranteed
705                            // non-empty; replace first.
706                            first = subfirst;
707                        }
708                    }
709                }
710            }
711
712            first
713        }
714    }
715
716    // walks forward over `tts` until all potential FIRST tokens are
717    // identified.
718    fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
719        use mbe::TokenTree;
720
721        let mut first = TokenSet::empty();
722        for tt in tts.iter() {
723            assert!(first.maybe_empty);
724            match tt {
725                TokenTree::Token(..)
726                | TokenTree::MetaVar(..)
727                | TokenTree::MetaVarDecl { .. }
728                | TokenTree::MetaVarExpr(..) => {
729                    first.add_one(TtHandle::TtRef(tt));
730                    return first;
731                }
732                TokenTree::Delimited(span, _, delimited) => {
733                    first.add_one(TtHandle::from_token_kind(
734                        delimited.delim.as_open_token_kind(),
735                        span.open,
736                    ));
737                    return first;
738                }
739                TokenTree::Sequence(sp, seq_rep) => {
740                    let subfirst_owned;
741                    let subfirst = match self.first.get(&sp.entire()) {
742                        Some(Some(subfirst)) => subfirst,
743                        Some(&None) => {
744                            subfirst_owned = self.first(&seq_rep.tts);
745                            &subfirst_owned
746                        }
747                        None => {
748                            panic!("We missed a sequence during FirstSets construction");
749                        }
750                    };
751
752                    // If the sequence contents can be empty, then the first
753                    // token could be the separator token itself.
754                    if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
755                        first.add_one_maybe(TtHandle::from_token(*sep));
756                    }
757
758                    assert!(first.maybe_empty);
759                    first.add_all(subfirst);
760                    if subfirst.maybe_empty
761                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
762                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
763                    {
764                        // Continue scanning for more first
765                        // tokens, but also make sure we
766                        // restore empty-tracking state.
767                        first.maybe_empty = true;
768                        continue;
769                    } else {
770                        return first;
771                    }
772                }
773            }
774        }
775
776        // we only exit the loop if `tts` was empty or if every
777        // element of `tts` matches the empty sequence.
778        assert!(first.maybe_empty);
779        first
780    }
781}
782
783// Most `mbe::TokenTree`s are preexisting in the matcher, but some are defined
784// implicitly, such as opening/closing delimiters and sequence repetition ops.
785// This type encapsulates both kinds. It implements `Clone` while avoiding the
786// need for `mbe::TokenTree` to implement `Clone`.
787#[derive(Debug)]
788enum TtHandle<'tt> {
789    /// This is used in most cases.
790    TtRef(&'tt mbe::TokenTree),
791
792    /// This is only used for implicit token trees. The `mbe::TokenTree` *must*
793    /// be `mbe::TokenTree::Token`. No other variants are allowed. We store an
794    /// `mbe::TokenTree` rather than a `Token` so that `get()` can return a
795    /// `&mbe::TokenTree`.
796    Token(mbe::TokenTree),
797}
798
799impl<'tt> TtHandle<'tt> {
800    fn from_token(tok: Token) -> Self {
801        TtHandle::Token(mbe::TokenTree::Token(tok))
802    }
803
804    fn from_token_kind(kind: TokenKind, span: Span) -> Self {
805        TtHandle::from_token(Token::new(kind, span))
806    }
807
808    // Get a reference to a token tree.
809    fn get(&'tt self) -> &'tt mbe::TokenTree {
810        match self {
811            TtHandle::TtRef(tt) => tt,
812            TtHandle::Token(token_tt) => token_tt,
813        }
814    }
815}
816
817impl<'tt> PartialEq for TtHandle<'tt> {
818    fn eq(&self, other: &TtHandle<'tt>) -> bool {
819        self.get() == other.get()
820    }
821}
822
823impl<'tt> Clone for TtHandle<'tt> {
824    fn clone(&self) -> Self {
825        match self {
826            TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
827
828            // This variant *must* contain a `mbe::TokenTree::Token`, and not
829            // any other variant of `mbe::TokenTree`.
830            TtHandle::Token(mbe::TokenTree::Token(tok)) => {
831                TtHandle::Token(mbe::TokenTree::Token(*tok))
832            }
833
834            _ => unreachable!(),
835        }
836    }
837}
838
839// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
840// (for macro-by-example syntactic variables). It also carries the
841// `maybe_empty` flag; that is true if and only if the matcher can
842// match an empty token sequence.
843//
844// The First set is computed on submatchers like `$($a:expr b),* $(c)* d`,
845// which has corresponding FIRST = {$a:expr, c, d}.
846// Likewise, `$($a:expr b),* $(c)+ d` has FIRST = {$a:expr, c}.
847//
848// (Notably, we must allow for *-op to occur zero times.)
849#[derive(Clone, Debug)]
850struct TokenSet<'tt> {
851    tokens: Vec<TtHandle<'tt>>,
852    maybe_empty: bool,
853}
854
855impl<'tt> TokenSet<'tt> {
856    // Returns a set for the empty sequence.
857    fn empty() -> Self {
858        TokenSet { tokens: Vec::new(), maybe_empty: true }
859    }
860
861    // Returns the set `{ tok }` for the single-token (and thus
862    // non-empty) sequence [tok].
863    fn singleton(tt: TtHandle<'tt>) -> Self {
864        TokenSet { tokens: vec![tt], maybe_empty: false }
865    }
866
867    // Changes self to be the set `{ tok }`.
868    // Since `tok` is always present, marks self as non-empty.
869    fn replace_with(&mut self, tt: TtHandle<'tt>) {
870        self.tokens.clear();
871        self.tokens.push(tt);
872        self.maybe_empty = false;
873    }
874
875    // Changes self to be the empty set `{}`; meant for use when
876    // the particular token does not matter, but we want to
877    // record that it occurs.
878    fn replace_with_irrelevant(&mut self) {
879        self.tokens.clear();
880        self.maybe_empty = false;
881    }
882
883    // Adds `tok` to the set for `self`, marking sequence as non-empty.
884    fn add_one(&mut self, tt: TtHandle<'tt>) {
885        if !self.tokens.contains(&tt) {
886            self.tokens.push(tt);
887        }
888        self.maybe_empty = false;
889    }
890
891    // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
892    fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
893        if !self.tokens.contains(&tt) {
894            self.tokens.push(tt);
895        }
896    }
897
898    // Adds all elements of `other` to this.
899    //
900    // (Since this is a set, we filter out duplicates.)
901    //
902    // If `other` is potentially empty, then preserves the previous
903    // setting of the empty flag of `self`. If `other` is guaranteed
904    // non-empty, then `self` is marked non-empty.
905    fn add_all(&mut self, other: &Self) {
906        for tt in &other.tokens {
907            if !self.tokens.contains(tt) {
908                self.tokens.push(tt.clone());
909            }
910        }
911        if !other.maybe_empty {
912            self.maybe_empty = false;
913        }
914    }
915}
916
917// Checks that `matcher` is internally consistent and that it
918// can legally be followed by a token `N`, for all `N` in `follow`.
919// (If `follow` is empty, then it imposes no constraint on
920// the `matcher`.)
921//
922// Returns the set of NT tokens that could possibly come last in
923// `matcher`. (If `matcher` matches the empty sequence, then
924// `maybe_empty` will be set to true.)
925//
926// Requires that `first_sets` is pre-computed for `matcher`;
927// see `FirstSets::new`.
928fn check_matcher_core<'tt>(
929    sess: &Session,
930    node_id: NodeId,
931    first_sets: &FirstSets<'tt>,
932    matcher: &'tt [mbe::TokenTree],
933    follow: &TokenSet<'tt>,
934) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
935    use mbe::TokenTree;
936
937    let mut last = TokenSet::empty();
938
939    let mut errored = Ok(());
940
941    // 2. For each token and suffix  [T, SUFFIX] in M:
942    // ensure that T can be followed by SUFFIX, and if SUFFIX may be empty,
943    // then ensure T can also be followed by any element of FOLLOW.
944    'each_token: for i in 0..matcher.len() {
945        let token = &matcher[i];
946        let suffix = &matcher[i + 1..];
947
948        let build_suffix_first = || {
949            let mut s = first_sets.first(suffix);
950            if s.maybe_empty {
951                s.add_all(follow);
952            }
953            s
954        };
955
956        // (we build `suffix_first` on demand below; you can tell
957        // which cases are supposed to fall through by looking for the
958        // initialization of this variable.)
959        let suffix_first;
960
961        // First, update `last` so that it corresponds to the set
962        // of NT tokens that might end the sequence `... token`.
963        match token {
964            TokenTree::Token(..)
965            | TokenTree::MetaVar(..)
966            | TokenTree::MetaVarDecl { .. }
967            | TokenTree::MetaVarExpr(..) => {
968                if token_can_be_followed_by_any(token) {
969                    // don't need to track tokens that work with any,
970                    last.replace_with_irrelevant();
971                    // ... and don't need to check tokens that can be
972                    // followed by anything against SUFFIX.
973                    continue 'each_token;
974                } else {
975                    last.replace_with(TtHandle::TtRef(token));
976                    suffix_first = build_suffix_first();
977                }
978            }
979            TokenTree::Delimited(span, _, d) => {
980                let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
981                    d.delim.as_close_token_kind(),
982                    span.close,
983                ));
984                check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
985                // don't track non NT tokens
986                last.replace_with_irrelevant();
987
988                // also, we don't need to check delimited sequences
989                // against SUFFIX
990                continue 'each_token;
991            }
992            TokenTree::Sequence(_, seq_rep) => {
993                suffix_first = build_suffix_first();
994                // The trick here: when we check the interior, we want
995                // to include the separator (if any) as a potential
996                // (but not guaranteed) element of FOLLOW. So in that
997                // case, we make a temp copy of suffix and stuff
998                // delimiter in there.
999                //
1000                // FIXME: Should I first scan suffix_first to see if
1001                // delimiter is already in it before I go through the
1002                // work of cloning it? But then again, this way I may
1003                // get a "tighter" span?
1004                let mut new;
1005                let my_suffix = if let Some(sep) = &seq_rep.separator {
1006                    new = suffix_first.clone();
1007                    new.add_one_maybe(TtHandle::from_token(*sep));
1008                    &new
1009                } else {
1010                    &suffix_first
1011                };
1012
1013                // At this point, `suffix_first` is built, and
1014                // `my_suffix` is some TokenSet that we can use
1015                // for checking the interior of `seq_rep`.
1016                let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1017                if next.maybe_empty {
1018                    last.add_all(&next);
1019                } else {
1020                    last = next;
1021                }
1022
1023                // the recursive call to check_matcher_core already ran the 'each_last
1024                // check below, so we can just keep going forward here.
1025                continue 'each_token;
1026            }
1027        }
1028
1029        // (`suffix_first` guaranteed initialized once reaching here.)
1030
1031        // Now `last` holds the complete set of NT tokens that could
1032        // end the sequence before SUFFIX. Check that every one works with `suffix`.
1033        for tt in &last.tokens {
1034            if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1035                for next_token in &suffix_first.tokens {
1036                    let next_token = next_token.get();
1037
1038                    // Check if the old pat is used and the next token is `|`
1039                    // to warn about incompatibility with Rust 2021.
1040                    // We only emit this lint if we're parsing the original
1041                    // definition of this macro_rules, not while (re)parsing
1042                    // the macro when compiling another crate that is using the
1043                    // macro. (See #86567.)
1044                    if is_defined_in_current_crate(node_id)
1045                        && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1046                        && matches!(
1047                            next_token,
1048                            TokenTree::Token(token) if *token == token::Or
1049                        )
1050                    {
1051                        // It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
1052                        let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1053                            span,
1054                            name,
1055                            kind: NonterminalKind::Pat(PatParam { inferred: false }),
1056                        });
1057                        sess.psess.buffer_lint(
1058                            RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1059                            span,
1060                            ast::CRATE_NODE_ID,
1061                            BuiltinLintDiag::OrPatternsBackCompat(span, suggestion),
1062                        );
1063                    }
1064                    match is_in_follow(next_token, kind) {
1065                        IsInFollow::Yes => {}
1066                        IsInFollow::No(possible) => {
1067                            let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1068                            {
1069                                "is"
1070                            } else {
1071                                "may be"
1072                            };
1073
1074                            let sp = next_token.span();
1075                            let mut err = sess.dcx().struct_span_err(
1076                                sp,
1077                                format!(
1078                                    "`${name}:{frag}` {may_be} followed by `{next}`, which \
1079                                     is not allowed for `{frag}` fragments",
1080                                    name = name,
1081                                    frag = kind,
1082                                    next = quoted_tt_to_string(next_token),
1083                                    may_be = may_be
1084                                ),
1085                            );
1086                            err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1087
1088                            if kind == NonterminalKind::Pat(PatWithOr)
1089                                && sess.psess.edition.at_least_rust_2021()
1090                                && next_token.is_token(&token::Or)
1091                            {
1092                                let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1093                                    span,
1094                                    name,
1095                                    kind: NonterminalKind::Pat(PatParam { inferred: false }),
1096                                });
1097                                err.span_suggestion(
1098                                    span,
1099                                    "try a `pat_param` fragment specifier instead",
1100                                    suggestion,
1101                                    Applicability::MaybeIncorrect,
1102                                );
1103                            }
1104
1105                            let msg = "allowed there are: ";
1106                            match possible {
1107                                &[] => {}
1108                                &[t] => {
1109                                    err.note(format!(
1110                                        "only {t} is allowed after `{kind}` fragments",
1111                                    ));
1112                                }
1113                                ts => {
1114                                    err.note(format!(
1115                                        "{}{} or {}",
1116                                        msg,
1117                                        ts[..ts.len() - 1].to_vec().join(", "),
1118                                        ts[ts.len() - 1],
1119                                    ));
1120                                }
1121                            }
1122                            errored = Err(err.emit());
1123                        }
1124                    }
1125                }
1126            }
1127        }
1128    }
1129    errored?;
1130    Ok(last)
1131}
1132
1133fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1134    if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1135        frag_can_be_followed_by_any(kind)
1136    } else {
1137        // (Non NT's can always be followed by anything in matchers.)
1138        true
1139    }
1140}
1141
1142/// Returns `true` if a fragment of type `frag` can be followed by any sort of
1143/// token. We use this (among other things) as a useful approximation
1144/// for when `frag` can be followed by a repetition like `$(...)*` or
1145/// `$(...)+`. In general, these can be a bit tricky to reason about,
1146/// so we adopt a conservative position that says that any fragment
1147/// specifier which consumes at most one token tree can be followed by
1148/// a fragment specifier (indeed, these fragments can be followed by
1149/// ANYTHING without fear of future compatibility hazards).
1150fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1151    matches!(
1152        kind,
1153        NonterminalKind::Item           // always terminated by `}` or `;`
1154        | NonterminalKind::Block        // exactly one token tree
1155        | NonterminalKind::Ident        // exactly one token tree
1156        | NonterminalKind::Literal      // exactly one token tree
1157        | NonterminalKind::Meta         // exactly one token tree
1158        | NonterminalKind::Lifetime     // exactly one token tree
1159        | NonterminalKind::TT // exactly one token tree
1160    )
1161}
1162
1163enum IsInFollow {
1164    Yes,
1165    No(&'static [&'static str]),
1166}
1167
1168/// Returns `true` if `frag` can legally be followed by the token `tok`. For
1169/// fragments that can consume an unbounded number of tokens, `tok`
1170/// must be within a well-defined follow set. This is intended to
1171/// guarantee future compatibility: for example, without this rule, if
1172/// we expanded `expr` to include a new binary operator, we might
1173/// break macros that were relying on that binary operator as a
1174/// separator.
1175// when changing this do not forget to update doc/book/macros.md!
1176fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1177    use mbe::TokenTree;
1178
1179    if let TokenTree::Token(Token { kind, .. }) = tok
1180        && kind.close_delim().is_some()
1181    {
1182        // closing a token tree can never be matched by any fragment;
1183        // iow, we always require that `(` and `)` match, etc.
1184        IsInFollow::Yes
1185    } else {
1186        match kind {
1187            NonterminalKind::Item => {
1188                // since items *must* be followed by either a `;` or a `}`, we can
1189                // accept anything after them
1190                IsInFollow::Yes
1191            }
1192            NonterminalKind::Block => {
1193                // anything can follow block, the braces provide an easy boundary to
1194                // maintain
1195                IsInFollow::Yes
1196            }
1197            NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1198                const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1199                match tok {
1200                    TokenTree::Token(token) => match token.kind {
1201                        FatArrow | Comma | Semi => IsInFollow::Yes,
1202                        _ => IsInFollow::No(TOKENS),
1203                    },
1204                    _ => IsInFollow::No(TOKENS),
1205                }
1206            }
1207            NonterminalKind::Pat(PatParam { .. }) => {
1208                const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1209                match tok {
1210                    TokenTree::Token(token) => match token.kind {
1211                        FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1212                        Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1213                            IsInFollow::Yes
1214                        }
1215                        _ => IsInFollow::No(TOKENS),
1216                    },
1217                    _ => IsInFollow::No(TOKENS),
1218                }
1219            }
1220            NonterminalKind::Pat(PatWithOr) => {
1221                const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1222                match tok {
1223                    TokenTree::Token(token) => match token.kind {
1224                        FatArrow | Comma | Eq => IsInFollow::Yes,
1225                        Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1226                            IsInFollow::Yes
1227                        }
1228                        _ => IsInFollow::No(TOKENS),
1229                    },
1230                    _ => IsInFollow::No(TOKENS),
1231                }
1232            }
1233            NonterminalKind::Path | NonterminalKind::Ty => {
1234                const TOKENS: &[&str] = &[
1235                    "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1236                    "`where`",
1237                ];
1238                match tok {
1239                    TokenTree::Token(token) => match token.kind {
1240                        OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1241                        | Semi | Or => IsInFollow::Yes,
1242                        Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1243                            IsInFollow::Yes
1244                        }
1245                        _ => IsInFollow::No(TOKENS),
1246                    },
1247                    TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1248                    _ => IsInFollow::No(TOKENS),
1249                }
1250            }
1251            NonterminalKind::Ident | NonterminalKind::Lifetime => {
1252                // being a single token, idents and lifetimes are harmless
1253                IsInFollow::Yes
1254            }
1255            NonterminalKind::Literal => {
1256                // literals may be of a single token, or two tokens (negative numbers)
1257                IsInFollow::Yes
1258            }
1259            NonterminalKind::Meta | NonterminalKind::TT => {
1260                // being either a single token or a delimited sequence, tt is
1261                // harmless
1262                IsInFollow::Yes
1263            }
1264            NonterminalKind::Vis => {
1265                // Explicitly disallow `priv`, on the off chance it comes back.
1266                const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1267                match tok {
1268                    TokenTree::Token(token) => match token.kind {
1269                        Comma => IsInFollow::Yes,
1270                        Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1271                        Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1272                        _ => {
1273                            if token.can_begin_type() {
1274                                IsInFollow::Yes
1275                            } else {
1276                                IsInFollow::No(TOKENS)
1277                            }
1278                        }
1279                    },
1280                    TokenTree::MetaVarDecl {
1281                        kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1282                        ..
1283                    } => IsInFollow::Yes,
1284                    _ => IsInFollow::No(TOKENS),
1285                }
1286            }
1287        }
1288    }
1289}
1290
1291fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1292    match tt {
1293        mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1294        mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1295        mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1296        _ => panic!(
1297            "{}",
1298            "unexpected mbe::TokenTree::{Sequence or Delimited} \
1299             in follow set checker"
1300        ),
1301    }
1302}
1303
1304fn is_defined_in_current_crate(node_id: NodeId) -> bool {
1305    // Macros defined in the current crate have a real node id,
1306    // whereas macros from an external crate have a dummy id.
1307    node_id != DUMMY_NODE_ID
1308}
1309
1310pub(super) fn parser_from_cx(
1311    psess: &ParseSess,
1312    mut tts: TokenStream,
1313    recovery: Recovery,
1314) -> Parser<'_> {
1315    tts.desugar_doc_comments();
1316    Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1317}