rustc_expand/mbe/
macro_rules.rs

1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
12use rustc_ast_pretty::pprust;
13use rustc_attr_data_structures::{AttributeKind, find_attr};
14use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
15use rustc_errors::{Applicability, Diag, ErrorGuaranteed};
16use rustc_feature::Features;
17use rustc_hir as hir;
18use rustc_lint_defs::BuiltinLintDiag;
19use rustc_lint_defs::builtin::{
20    RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
21};
22use rustc_parse::exp;
23use rustc_parse::parser::{Parser, Recovery};
24use rustc_session::Session;
25use rustc_session::parse::ParseSess;
26use rustc_span::edition::Edition;
27use rustc_span::hygiene::Transparency;
28use rustc_span::{Ident, Span, kw, sym};
29use tracing::{debug, instrument, trace, trace_span};
30
31use super::macro_parser::{NamedMatches, NamedParseResult};
32use super::{SequenceRepetition, diagnostics};
33use crate::base::{
34    DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult, SyntaxExtension,
35    SyntaxExtensionKind, TTMacroExpander,
36};
37use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
38use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
39use crate::mbe::quoted::{RulePart, parse_one_tt};
40use crate::mbe::transcribe::transcribe;
41use crate::mbe::{self, KleeneOp, macro_check};
42
43pub(crate) struct ParserAnyMacro<'a> {
44    parser: Parser<'a>,
45
46    /// Span of the expansion site of the macro this parser is for
47    site_span: Span,
48    /// The ident of the macro we're parsing
49    macro_ident: Ident,
50    lint_node_id: NodeId,
51    is_trailing_mac: bool,
52    arm_span: Span,
53    /// Whether or not this macro is defined in the current crate
54    is_local: bool,
55}
56
57impl<'a> ParserAnyMacro<'a> {
58    pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
59        let ParserAnyMacro {
60            site_span,
61            macro_ident,
62            ref mut parser,
63            lint_node_id,
64            arm_span,
65            is_trailing_mac,
66            is_local,
67        } = *self;
68        let snapshot = &mut parser.create_snapshot_for_diagnostic();
69        let fragment = match parse_ast_fragment(parser, kind) {
70            Ok(f) => f,
71            Err(err) => {
72                let guar = diagnostics::emit_frag_parse_err(
73                    err, parser, snapshot, site_span, arm_span, kind,
74                );
75                return kind.dummy(site_span, guar);
76            }
77        };
78
79        // We allow semicolons at the end of expressions -- e.g., the semicolon in
80        // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`,
81        // but `m!()` is allowed in expression positions (cf. issue #34706).
82        if kind == AstFragmentKind::Expr && parser.token == token::Semi {
83            if is_local {
84                parser.psess.buffer_lint(
85                    SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
86                    parser.token.span,
87                    lint_node_id,
88                    BuiltinLintDiag::TrailingMacro(is_trailing_mac, macro_ident),
89                );
90            }
91            parser.bump();
92        }
93
94        // Make sure we don't have any tokens left to parse so we don't silently drop anything.
95        let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
96        ensure_complete_parse(parser, &path, kind.name(), site_span);
97        fragment
98    }
99}
100
101pub(super) struct MacroRule {
102    pub(super) lhs: Vec<MatcherLoc>,
103    lhs_span: Span,
104    rhs: mbe::TokenTree,
105}
106
107struct MacroRulesMacroExpander {
108    node_id: NodeId,
109    name: Ident,
110    span: Span,
111    transparency: Transparency,
112    rules: Vec<MacroRule>,
113}
114
115impl TTMacroExpander for MacroRulesMacroExpander {
116    fn expand<'cx>(
117        &self,
118        cx: &'cx mut ExtCtxt<'_>,
119        sp: Span,
120        input: TokenStream,
121    ) -> MacroExpanderResult<'cx> {
122        ExpandResult::Ready(expand_macro(
123            cx,
124            sp,
125            self.span,
126            self.node_id,
127            self.name,
128            self.transparency,
129            input,
130            &self.rules,
131        ))
132    }
133
134    fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
135        // If the rhs contains an invocation like `compile_error!`, don't report it as unused.
136        let rule = &self.rules[rule_i];
137        if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
138    }
139}
140
141struct DummyExpander(ErrorGuaranteed);
142
143impl TTMacroExpander for DummyExpander {
144    fn expand<'cx>(
145        &self,
146        _: &'cx mut ExtCtxt<'_>,
147        span: Span,
148        _: TokenStream,
149    ) -> ExpandResult<Box<dyn MacResult + 'cx>, ()> {
150        ExpandResult::Ready(DummyResult::any(span, self.0))
151    }
152}
153
154fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
155    let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
156    cx_expansions.entry(sp).or_default().push(message);
157}
158
159pub(super) trait Tracker<'matcher> {
160    /// The contents of `ParseResult::Failure`.
161    type Failure;
162
163    /// Arm failed to match. If the token is `token::Eof`, it indicates an unexpected
164    /// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
165    /// The usize is the approximate position of the token in the input token stream.
166    fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
167
168    /// This is called before trying to match next MatcherLoc on the current token.
169    fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
170
171    /// This is called after an arm has been parsed, either successfully or unsuccessfully. When
172    /// this is called, `before_match_loc` was called at least once (with a `MatcherLoc::Eof`).
173    fn after_arm(&mut self, _result: &NamedParseResult<Self::Failure>) {}
174
175    /// For tracing.
176    fn description() -> &'static str;
177
178    fn recovery() -> Recovery {
179        Recovery::Forbidden
180    }
181}
182
183/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to
184/// monomorphization.
185pub(super) struct NoopTracker;
186
187impl<'matcher> Tracker<'matcher> for NoopTracker {
188    type Failure = ();
189
190    fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
191
192    fn description() -> &'static str {
193        "none"
194    }
195}
196
197/// Expands the rules based macro defined by `rules` for a given input `arg`.
198#[instrument(skip(cx, transparency, arg, rules))]
199fn expand_macro<'cx>(
200    cx: &'cx mut ExtCtxt<'_>,
201    sp: Span,
202    def_span: Span,
203    node_id: NodeId,
204    name: Ident,
205    transparency: Transparency,
206    arg: TokenStream,
207    rules: &[MacroRule],
208) -> Box<dyn MacResult + 'cx> {
209    let psess = &cx.sess.psess;
210    // Macros defined in the current crate have a real node id,
211    // whereas macros from an external crate have a dummy id.
212    let is_local = node_id != DUMMY_NODE_ID;
213
214    if cx.trace_macros() {
215        let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
216        trace_macros_note(&mut cx.expansions, sp, msg);
217    }
218
219    // Track nothing for the best performance.
220    let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
221
222    match try_success_result {
223        Ok((i, rule, named_matches)) => {
224            let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rule.rhs else {
225                cx.dcx().span_bug(sp, "malformed macro rhs");
226            };
227            let arm_span = rule.rhs.span();
228
229            // rhs has holes ( `$id` and `$(...)` that need filled)
230            let id = cx.current_expansion.id;
231            let tts = match transcribe(psess, &named_matches, rhs, rhs_span, transparency, id) {
232                Ok(tts) => tts,
233                Err(err) => {
234                    let guar = err.emit();
235                    return DummyResult::any(arm_span, guar);
236                }
237            };
238
239            if cx.trace_macros() {
240                let msg = format!("to `{}`", pprust::tts_to_string(&tts));
241                trace_macros_note(&mut cx.expansions, sp, msg);
242            }
243
244            let p = Parser::new(psess, tts, None);
245
246            if is_local {
247                cx.resolver.record_macro_rule_usage(node_id, i);
248            }
249
250            // Let the context choose how to interpret the result.
251            // Weird, but useful for X-macros.
252            Box::new(ParserAnyMacro {
253                parser: p,
254
255                // Pass along the original expansion site and the name of the macro
256                // so we can print a useful error message if the parse of the expanded
257                // macro leaves unparsed tokens.
258                site_span: sp,
259                macro_ident: name,
260                lint_node_id: cx.current_expansion.lint_node_id,
261                is_trailing_mac: cx.current_expansion.is_trailing_mac,
262                arm_span,
263                is_local,
264            })
265        }
266        Err(CanRetry::No(guar)) => {
267            debug!("Will not retry matching as an error was emitted already");
268            DummyResult::any(sp, guar)
269        }
270        Err(CanRetry::Yes) => {
271            // Retry and emit a better error.
272            let (span, guar) =
273                diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, rules);
274            cx.trace_macros_diag();
275            DummyResult::any(span, guar)
276        }
277    }
278}
279
280pub(super) enum CanRetry {
281    Yes,
282    /// We are not allowed to retry macro expansion as a fatal error has been emitted already.
283    No(ErrorGuaranteed),
284}
285
286/// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful,
287/// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors
288/// correctly.
289#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
290pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
291    psess: &ParseSess,
292    name: Ident,
293    arg: &TokenStream,
294    rules: &'matcher [MacroRule],
295    track: &mut T,
296) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
297    // We create a base parser that can be used for the "black box" parts.
298    // Every iteration needs a fresh copy of that parser. However, the parser
299    // is not mutated on many of the iterations, particularly when dealing with
300    // macros like this:
301    //
302    // macro_rules! foo {
303    //     ("a") => (A);
304    //     ("b") => (B);
305    //     ("c") => (C);
306    //     // ... etc. (maybe hundreds more)
307    // }
308    //
309    // as seen in the `html5ever` benchmark. We use a `Cow` so that the base
310    // parser is only cloned when necessary (upon mutation). Furthermore, we
311    // reinitialize the `Cow` with the base parser at the start of every
312    // iteration, so that any mutated parsers are not reused. This is all quite
313    // hacky, but speeds up the `html5ever` benchmark significantly. (Issue
314    // 68836 suggests a more comprehensive but more complex change to deal with
315    // this situation.)
316    let parser = parser_from_cx(psess, arg.clone(), T::recovery());
317    // Try each arm's matchers.
318    let mut tt_parser = TtParser::new(name);
319    for (i, rule) in rules.iter().enumerate() {
320        let _tracing_span = trace_span!("Matching arm", %i);
321
322        // Take a snapshot of the state of pre-expansion gating at this point.
323        // This is used so that if a matcher is not `Success(..)`ful,
324        // then the spans which became gated when parsing the unsuccessful matcher
325        // are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
326        let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
327
328        let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, track);
329
330        track.after_arm(&result);
331
332        match result {
333            Success(named_matches) => {
334                debug!("Parsed arm successfully");
335                // The matcher was `Success(..)`ful.
336                // Merge the gated spans from parsing the matcher with the preexisting ones.
337                psess.gated_spans.merge(gated_spans_snapshot);
338
339                return Ok((i, rule, named_matches));
340            }
341            Failure(_) => {
342                trace!("Failed to match arm, trying the next one");
343                // Try the next arm.
344            }
345            Error(_, _) => {
346                debug!("Fatal error occurred during matching");
347                // We haven't emitted an error yet, so we can retry.
348                return Err(CanRetry::Yes);
349            }
350            ErrorReported(guarantee) => {
351                debug!("Fatal error occurred and was reported during matching");
352                // An error has been reported already, we cannot retry as that would cause duplicate errors.
353                return Err(CanRetry::No(guarantee));
354            }
355        }
356
357        // The matcher was not `Success(..)`ful.
358        // Restore to the state before snapshotting and maybe try again.
359        mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
360    }
361
362    Err(CanRetry::Yes)
363}
364
365/// Converts a macro item into a syntax extension.
366pub fn compile_declarative_macro(
367    sess: &Session,
368    features: &Features,
369    macro_def: &ast::MacroDef,
370    ident: Ident,
371    attrs: &[hir::Attribute],
372    span: Span,
373    node_id: NodeId,
374    edition: Edition,
375) -> (SyntaxExtension, usize) {
376    let is_local = node_id != DUMMY_NODE_ID;
377    let mk_syn_ext = |expander| {
378        let kind = SyntaxExtensionKind::LegacyBang(expander);
379        SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
380    };
381    let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), 0);
382
383    let macro_rules = macro_def.macro_rules;
384    let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
385
386    let body = macro_def.body.tokens.clone();
387    let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
388
389    // Don't abort iteration early, so that multiple errors can be reported. We only abort early on
390    // parse failures we can't recover from.
391    let mut guar = None;
392    let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
393
394    let mut rules = Vec::new();
395
396    while p.token != token::Eof {
397        let lhs_tt = p.parse_token_tree();
398        let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
399        check_emission(check_lhs(sess, node_id, &lhs_tt));
400        if let Err(e) = p.expect(exp!(FatArrow)) {
401            return dummy_syn_ext(e.emit());
402        }
403        if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
404            return dummy_syn_ext(guar);
405        }
406        let rhs_tt = p.parse_token_tree();
407        let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
408        check_emission(check_rhs(sess, &rhs_tt));
409        check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt));
410        let lhs_span = lhs_tt.span();
411        // Convert the lhs into `MatcherLoc` form, which is better for doing the
412        // actual matching.
413        let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
414            mbe::macro_parser::compute_locs(&delimited.tts)
415        } else {
416            return dummy_syn_ext(guar.unwrap());
417        };
418        rules.push(MacroRule { lhs, lhs_span, rhs: rhs_tt });
419        if p.token == token::Eof {
420            break;
421        }
422        if let Err(e) = p.expect(exp_sep) {
423            return dummy_syn_ext(e.emit());
424        }
425    }
426
427    if rules.is_empty() {
428        let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
429        return dummy_syn_ext(guar);
430    }
431
432    let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
433        .unwrap_or(Transparency::fallback(macro_rules));
434
435    if let Some(guar) = guar {
436        // To avoid warning noise, only consider the rules of this
437        // macro for the lint, if all rules are valid.
438        return dummy_syn_ext(guar);
439    }
440
441    // Return the number of rules for unused rule linting, if this is a local macro.
442    let nrules = if is_local { rules.len() } else { 0 };
443
444    let expander =
445        Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules });
446    (mk_syn_ext(expander), nrules)
447}
448
449fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
450    if p.token == token::Eof {
451        let err_sp = p.token.span.shrink_to_hi();
452        let guar = sess
453            .dcx()
454            .struct_span_err(err_sp, "macro definition ended unexpectedly")
455            .with_span_label(err_sp, msg)
456            .emit();
457        return Some(guar);
458    }
459    None
460}
461
462fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
463    let e1 = check_lhs_nt_follows(sess, node_id, lhs);
464    let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
465    e1.and(e2)
466}
467
468fn check_lhs_nt_follows(
469    sess: &Session,
470    node_id: NodeId,
471    lhs: &mbe::TokenTree,
472) -> Result<(), ErrorGuaranteed> {
473    // lhs is going to be like TokenTree::Delimited(...), where the
474    // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
475    if let mbe::TokenTree::Delimited(.., delimited) = lhs {
476        check_matcher(sess, node_id, &delimited.tts)
477    } else {
478        let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
479        Err(sess.dcx().span_err(lhs.span(), msg))
480    }
481}
482
483fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
484    if seq.separator.is_some() {
485        false
486    } else {
487        let mut is_empty = true;
488        let mut iter = seq.tts.iter().peekable();
489        while let Some(tt) = iter.next() {
490            match tt {
491                mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
492                mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
493                    let mut now = t;
494                    while let Some(&mbe::TokenTree::Token(
495                        next @ Token { kind: DocComment(..), .. },
496                    )) = iter.peek()
497                    {
498                        now = next;
499                        iter.next();
500                    }
501                    let span = t.span.to(now.span);
502                    sess.dcx().span_note(span, "doc comments are ignored in matcher position");
503                }
504                mbe::TokenTree::Sequence(_, sub_seq)
505                    if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
506                        || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
507                _ => is_empty = false,
508            }
509        }
510        is_empty
511    }
512}
513
514/// Checks if a `vis` nonterminal fragment is unnecessarily wrapped in an optional repetition.
515///
516/// When a `vis` fragment (which can already be empty) is wrapped in `$(...)?`,
517/// this suggests removing the redundant repetition syntax since it provides no additional benefit.
518fn check_redundant_vis_repetition(
519    err: &mut Diag<'_>,
520    sess: &Session,
521    seq: &SequenceRepetition,
522    span: &DelimSpan,
523) {
524    let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne;
525    let is_vis = seq.tts.first().map_or(false, |tt| {
526        matches!(tt, mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
527    });
528
529    if is_vis && is_zero_or_one {
530        err.note("a `vis` fragment can already be empty");
531        err.multipart_suggestion(
532            "remove the `$(` and `)?`",
533            vec![
534                (
535                    sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
536                    "".to_string(),
537                ),
538                (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
539            ],
540            Applicability::MaybeIncorrect,
541        );
542    }
543}
544
545/// Checks that the lhs contains no repetition which could match an empty token
546/// tree, because then the matcher would hang indefinitely.
547fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
548    use mbe::TokenTree;
549    for tt in tts {
550        match tt {
551            TokenTree::Token(..)
552            | TokenTree::MetaVar(..)
553            | TokenTree::MetaVarDecl { .. }
554            | TokenTree::MetaVarExpr(..) => (),
555            TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
556            TokenTree::Sequence(span, seq) => {
557                if is_empty_token_tree(sess, seq) {
558                    let sp = span.entire();
559                    let mut err =
560                        sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
561                    check_redundant_vis_repetition(&mut err, sess, seq, span);
562                    return Err(err.emit());
563                }
564                check_lhs_no_empty_seq(sess, &seq.tts)?
565            }
566        }
567    }
568
569    Ok(())
570}
571
572fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
573    match *rhs {
574        mbe::TokenTree::Delimited(..) => Ok(()),
575        _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
576    }
577}
578
579fn check_matcher(
580    sess: &Session,
581    node_id: NodeId,
582    matcher: &[mbe::TokenTree],
583) -> Result<(), ErrorGuaranteed> {
584    let first_sets = FirstSets::new(matcher);
585    let empty_suffix = TokenSet::empty();
586    check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
587    Ok(())
588}
589
590fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
591    match rhs {
592        mbe::TokenTree::Delimited(.., d) => {
593            let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
594                if let mbe::TokenTree::Token(ident) = ident
595                    && let TokenKind::Ident(ident, _) = ident.kind
596                    && ident == sym::compile_error
597                    && let mbe::TokenTree::Token(bang) = bang
598                    && let TokenKind::Bang = bang.kind
599                    && let mbe::TokenTree::Delimited(.., del) = args
600                    && !del.delim.skip()
601                {
602                    true
603                } else {
604                    false
605                }
606            });
607            if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
608        }
609        _ => false,
610    }
611}
612
613// `The FirstSets` for a matcher is a mapping from subsequences in the
614// matcher to the FIRST set for that subsequence.
615//
616// This mapping is partially precomputed via a backwards scan over the
617// token trees of the matcher, which provides a mapping from each
618// repetition sequence to its *first* set.
619//
620// (Hypothetically, sequences should be uniquely identifiable via their
621// spans, though perhaps that is false, e.g., for macro-generated macros
622// that do not try to inject artificial span information. My plan is
623// to try to catch such cases ahead of time and not include them in
624// the precomputed mapping.)
625struct FirstSets<'tt> {
626    // this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
627    // span in the original matcher to the First set for the inner sequence `tt ...`.
628    //
629    // If two sequences have the same span in a matcher, then map that
630    // span to None (invalidating the mapping here and forcing the code to
631    // use a slow path).
632    first: FxHashMap<Span, Option<TokenSet<'tt>>>,
633}
634
635impl<'tt> FirstSets<'tt> {
636    fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
637        use mbe::TokenTree;
638
639        let mut sets = FirstSets { first: FxHashMap::default() };
640        build_recur(&mut sets, tts);
641        return sets;
642
643        // walks backward over `tts`, returning the FIRST for `tts`
644        // and updating `sets` at the same time for all sequence
645        // substructure we find within `tts`.
646        fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
647            let mut first = TokenSet::empty();
648            for tt in tts.iter().rev() {
649                match tt {
650                    TokenTree::Token(..)
651                    | TokenTree::MetaVar(..)
652                    | TokenTree::MetaVarDecl { .. }
653                    | TokenTree::MetaVarExpr(..) => {
654                        first.replace_with(TtHandle::TtRef(tt));
655                    }
656                    TokenTree::Delimited(span, _, delimited) => {
657                        build_recur(sets, &delimited.tts);
658                        first.replace_with(TtHandle::from_token_kind(
659                            delimited.delim.as_open_token_kind(),
660                            span.open,
661                        ));
662                    }
663                    TokenTree::Sequence(sp, seq_rep) => {
664                        let subfirst = build_recur(sets, &seq_rep.tts);
665
666                        match sets.first.entry(sp.entire()) {
667                            Entry::Vacant(vac) => {
668                                vac.insert(Some(subfirst.clone()));
669                            }
670                            Entry::Occupied(mut occ) => {
671                                // if there is already an entry, then a span must have collided.
672                                // This should not happen with typical macro_rules macros,
673                                // but syntax extensions need not maintain distinct spans,
674                                // so distinct syntax trees can be assigned the same span.
675                                // In such a case, the map cannot be trusted; so mark this
676                                // entry as unusable.
677                                occ.insert(None);
678                            }
679                        }
680
681                        // If the sequence contents can be empty, then the first
682                        // token could be the separator token itself.
683
684                        if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
685                            first.add_one_maybe(TtHandle::from_token(*sep));
686                        }
687
688                        // Reverse scan: Sequence comes before `first`.
689                        if subfirst.maybe_empty
690                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
691                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
692                        {
693                            // If sequence is potentially empty, then
694                            // union them (preserving first emptiness).
695                            first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
696                        } else {
697                            // Otherwise, sequence guaranteed
698                            // non-empty; replace first.
699                            first = subfirst;
700                        }
701                    }
702                }
703            }
704
705            first
706        }
707    }
708
709    // walks forward over `tts` until all potential FIRST tokens are
710    // identified.
711    fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
712        use mbe::TokenTree;
713
714        let mut first = TokenSet::empty();
715        for tt in tts.iter() {
716            assert!(first.maybe_empty);
717            match tt {
718                TokenTree::Token(..)
719                | TokenTree::MetaVar(..)
720                | TokenTree::MetaVarDecl { .. }
721                | TokenTree::MetaVarExpr(..) => {
722                    first.add_one(TtHandle::TtRef(tt));
723                    return first;
724                }
725                TokenTree::Delimited(span, _, delimited) => {
726                    first.add_one(TtHandle::from_token_kind(
727                        delimited.delim.as_open_token_kind(),
728                        span.open,
729                    ));
730                    return first;
731                }
732                TokenTree::Sequence(sp, seq_rep) => {
733                    let subfirst_owned;
734                    let subfirst = match self.first.get(&sp.entire()) {
735                        Some(Some(subfirst)) => subfirst,
736                        Some(&None) => {
737                            subfirst_owned = self.first(&seq_rep.tts);
738                            &subfirst_owned
739                        }
740                        None => {
741                            panic!("We missed a sequence during FirstSets construction");
742                        }
743                    };
744
745                    // If the sequence contents can be empty, then the first
746                    // token could be the separator token itself.
747                    if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
748                        first.add_one_maybe(TtHandle::from_token(*sep));
749                    }
750
751                    assert!(first.maybe_empty);
752                    first.add_all(subfirst);
753                    if subfirst.maybe_empty
754                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
755                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
756                    {
757                        // Continue scanning for more first
758                        // tokens, but also make sure we
759                        // restore empty-tracking state.
760                        first.maybe_empty = true;
761                        continue;
762                    } else {
763                        return first;
764                    }
765                }
766            }
767        }
768
769        // we only exit the loop if `tts` was empty or if every
770        // element of `tts` matches the empty sequence.
771        assert!(first.maybe_empty);
772        first
773    }
774}
775
776// Most `mbe::TokenTree`s are preexisting in the matcher, but some are defined
777// implicitly, such as opening/closing delimiters and sequence repetition ops.
778// This type encapsulates both kinds. It implements `Clone` while avoiding the
779// need for `mbe::TokenTree` to implement `Clone`.
780#[derive(Debug)]
781enum TtHandle<'tt> {
782    /// This is used in most cases.
783    TtRef(&'tt mbe::TokenTree),
784
785    /// This is only used for implicit token trees. The `mbe::TokenTree` *must*
786    /// be `mbe::TokenTree::Token`. No other variants are allowed. We store an
787    /// `mbe::TokenTree` rather than a `Token` so that `get()` can return a
788    /// `&mbe::TokenTree`.
789    Token(mbe::TokenTree),
790}
791
792impl<'tt> TtHandle<'tt> {
793    fn from_token(tok: Token) -> Self {
794        TtHandle::Token(mbe::TokenTree::Token(tok))
795    }
796
797    fn from_token_kind(kind: TokenKind, span: Span) -> Self {
798        TtHandle::from_token(Token::new(kind, span))
799    }
800
801    // Get a reference to a token tree.
802    fn get(&'tt self) -> &'tt mbe::TokenTree {
803        match self {
804            TtHandle::TtRef(tt) => tt,
805            TtHandle::Token(token_tt) => token_tt,
806        }
807    }
808}
809
810impl<'tt> PartialEq for TtHandle<'tt> {
811    fn eq(&self, other: &TtHandle<'tt>) -> bool {
812        self.get() == other.get()
813    }
814}
815
816impl<'tt> Clone for TtHandle<'tt> {
817    fn clone(&self) -> Self {
818        match self {
819            TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
820
821            // This variant *must* contain a `mbe::TokenTree::Token`, and not
822            // any other variant of `mbe::TokenTree`.
823            TtHandle::Token(mbe::TokenTree::Token(tok)) => {
824                TtHandle::Token(mbe::TokenTree::Token(*tok))
825            }
826
827            _ => unreachable!(),
828        }
829    }
830}
831
832// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
833// (for macro-by-example syntactic variables). It also carries the
834// `maybe_empty` flag; that is true if and only if the matcher can
835// match an empty token sequence.
836//
837// The First set is computed on submatchers like `$($a:expr b),* $(c)* d`,
838// which has corresponding FIRST = {$a:expr, c, d}.
839// Likewise, `$($a:expr b),* $(c)+ d` has FIRST = {$a:expr, c}.
840//
841// (Notably, we must allow for *-op to occur zero times.)
842#[derive(Clone, Debug)]
843struct TokenSet<'tt> {
844    tokens: Vec<TtHandle<'tt>>,
845    maybe_empty: bool,
846}
847
848impl<'tt> TokenSet<'tt> {
849    // Returns a set for the empty sequence.
850    fn empty() -> Self {
851        TokenSet { tokens: Vec::new(), maybe_empty: true }
852    }
853
854    // Returns the set `{ tok }` for the single-token (and thus
855    // non-empty) sequence [tok].
856    fn singleton(tt: TtHandle<'tt>) -> Self {
857        TokenSet { tokens: vec![tt], maybe_empty: false }
858    }
859
860    // Changes self to be the set `{ tok }`.
861    // Since `tok` is always present, marks self as non-empty.
862    fn replace_with(&mut self, tt: TtHandle<'tt>) {
863        self.tokens.clear();
864        self.tokens.push(tt);
865        self.maybe_empty = false;
866    }
867
868    // Changes self to be the empty set `{}`; meant for use when
869    // the particular token does not matter, but we want to
870    // record that it occurs.
871    fn replace_with_irrelevant(&mut self) {
872        self.tokens.clear();
873        self.maybe_empty = false;
874    }
875
876    // Adds `tok` to the set for `self`, marking sequence as non-empty.
877    fn add_one(&mut self, tt: TtHandle<'tt>) {
878        if !self.tokens.contains(&tt) {
879            self.tokens.push(tt);
880        }
881        self.maybe_empty = false;
882    }
883
884    // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
885    fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
886        if !self.tokens.contains(&tt) {
887            self.tokens.push(tt);
888        }
889    }
890
891    // Adds all elements of `other` to this.
892    //
893    // (Since this is a set, we filter out duplicates.)
894    //
895    // If `other` is potentially empty, then preserves the previous
896    // setting of the empty flag of `self`. If `other` is guaranteed
897    // non-empty, then `self` is marked non-empty.
898    fn add_all(&mut self, other: &Self) {
899        for tt in &other.tokens {
900            if !self.tokens.contains(tt) {
901                self.tokens.push(tt.clone());
902            }
903        }
904        if !other.maybe_empty {
905            self.maybe_empty = false;
906        }
907    }
908}
909
910// Checks that `matcher` is internally consistent and that it
911// can legally be followed by a token `N`, for all `N` in `follow`.
912// (If `follow` is empty, then it imposes no constraint on
913// the `matcher`.)
914//
915// Returns the set of NT tokens that could possibly come last in
916// `matcher`. (If `matcher` matches the empty sequence, then
917// `maybe_empty` will be set to true.)
918//
919// Requires that `first_sets` is pre-computed for `matcher`;
920// see `FirstSets::new`.
921fn check_matcher_core<'tt>(
922    sess: &Session,
923    node_id: NodeId,
924    first_sets: &FirstSets<'tt>,
925    matcher: &'tt [mbe::TokenTree],
926    follow: &TokenSet<'tt>,
927) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
928    use mbe::TokenTree;
929
930    let mut last = TokenSet::empty();
931
932    let mut errored = Ok(());
933
934    // 2. For each token and suffix  [T, SUFFIX] in M:
935    // ensure that T can be followed by SUFFIX, and if SUFFIX may be empty,
936    // then ensure T can also be followed by any element of FOLLOW.
937    'each_token: for i in 0..matcher.len() {
938        let token = &matcher[i];
939        let suffix = &matcher[i + 1..];
940
941        let build_suffix_first = || {
942            let mut s = first_sets.first(suffix);
943            if s.maybe_empty {
944                s.add_all(follow);
945            }
946            s
947        };
948
949        // (we build `suffix_first` on demand below; you can tell
950        // which cases are supposed to fall through by looking for the
951        // initialization of this variable.)
952        let suffix_first;
953
954        // First, update `last` so that it corresponds to the set
955        // of NT tokens that might end the sequence `... token`.
956        match token {
957            TokenTree::Token(..)
958            | TokenTree::MetaVar(..)
959            | TokenTree::MetaVarDecl { .. }
960            | TokenTree::MetaVarExpr(..) => {
961                if token_can_be_followed_by_any(token) {
962                    // don't need to track tokens that work with any,
963                    last.replace_with_irrelevant();
964                    // ... and don't need to check tokens that can be
965                    // followed by anything against SUFFIX.
966                    continue 'each_token;
967                } else {
968                    last.replace_with(TtHandle::TtRef(token));
969                    suffix_first = build_suffix_first();
970                }
971            }
972            TokenTree::Delimited(span, _, d) => {
973                let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
974                    d.delim.as_close_token_kind(),
975                    span.close,
976                ));
977                check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
978                // don't track non NT tokens
979                last.replace_with_irrelevant();
980
981                // also, we don't need to check delimited sequences
982                // against SUFFIX
983                continue 'each_token;
984            }
985            TokenTree::Sequence(_, seq_rep) => {
986                suffix_first = build_suffix_first();
987                // The trick here: when we check the interior, we want
988                // to include the separator (if any) as a potential
989                // (but not guaranteed) element of FOLLOW. So in that
990                // case, we make a temp copy of suffix and stuff
991                // delimiter in there.
992                //
993                // FIXME: Should I first scan suffix_first to see if
994                // delimiter is already in it before I go through the
995                // work of cloning it? But then again, this way I may
996                // get a "tighter" span?
997                let mut new;
998                let my_suffix = if let Some(sep) = &seq_rep.separator {
999                    new = suffix_first.clone();
1000                    new.add_one_maybe(TtHandle::from_token(*sep));
1001                    &new
1002                } else {
1003                    &suffix_first
1004                };
1005
1006                // At this point, `suffix_first` is built, and
1007                // `my_suffix` is some TokenSet that we can use
1008                // for checking the interior of `seq_rep`.
1009                let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1010                if next.maybe_empty {
1011                    last.add_all(&next);
1012                } else {
1013                    last = next;
1014                }
1015
1016                // the recursive call to check_matcher_core already ran the 'each_last
1017                // check below, so we can just keep going forward here.
1018                continue 'each_token;
1019            }
1020        }
1021
1022        // (`suffix_first` guaranteed initialized once reaching here.)
1023
1024        // Now `last` holds the complete set of NT tokens that could
1025        // end the sequence before SUFFIX. Check that every one works with `suffix`.
1026        for tt in &last.tokens {
1027            if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1028                for next_token in &suffix_first.tokens {
1029                    let next_token = next_token.get();
1030
1031                    // Check if the old pat is used and the next token is `|`
1032                    // to warn about incompatibility with Rust 2021.
1033                    // We only emit this lint if we're parsing the original
1034                    // definition of this macro_rules, not while (re)parsing
1035                    // the macro when compiling another crate that is using the
1036                    // macro. (See #86567.)
1037                    // Macros defined in the current crate have a real node id,
1038                    // whereas macros from an external crate have a dummy id.
1039                    if node_id != DUMMY_NODE_ID
1040                        && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1041                        && matches!(
1042                            next_token,
1043                            TokenTree::Token(token) if *token == token::Or
1044                        )
1045                    {
1046                        // It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
1047                        let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1048                            span,
1049                            name,
1050                            kind: NonterminalKind::Pat(PatParam { inferred: false }),
1051                        });
1052                        sess.psess.buffer_lint(
1053                            RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1054                            span,
1055                            ast::CRATE_NODE_ID,
1056                            BuiltinLintDiag::OrPatternsBackCompat(span, suggestion),
1057                        );
1058                    }
1059                    match is_in_follow(next_token, kind) {
1060                        IsInFollow::Yes => {}
1061                        IsInFollow::No(possible) => {
1062                            let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1063                            {
1064                                "is"
1065                            } else {
1066                                "may be"
1067                            };
1068
1069                            let sp = next_token.span();
1070                            let mut err = sess.dcx().struct_span_err(
1071                                sp,
1072                                format!(
1073                                    "`${name}:{frag}` {may_be} followed by `{next}`, which \
1074                                     is not allowed for `{frag}` fragments",
1075                                    name = name,
1076                                    frag = kind,
1077                                    next = quoted_tt_to_string(next_token),
1078                                    may_be = may_be
1079                                ),
1080                            );
1081                            err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1082
1083                            if kind == NonterminalKind::Pat(PatWithOr)
1084                                && sess.psess.edition.at_least_rust_2021()
1085                                && next_token.is_token(&token::Or)
1086                            {
1087                                let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1088                                    span,
1089                                    name,
1090                                    kind: NonterminalKind::Pat(PatParam { inferred: false }),
1091                                });
1092                                err.span_suggestion(
1093                                    span,
1094                                    "try a `pat_param` fragment specifier instead",
1095                                    suggestion,
1096                                    Applicability::MaybeIncorrect,
1097                                );
1098                            }
1099
1100                            let msg = "allowed there are: ";
1101                            match possible {
1102                                &[] => {}
1103                                &[t] => {
1104                                    err.note(format!(
1105                                        "only {t} is allowed after `{kind}` fragments",
1106                                    ));
1107                                }
1108                                ts => {
1109                                    err.note(format!(
1110                                        "{}{} or {}",
1111                                        msg,
1112                                        ts[..ts.len() - 1].to_vec().join(", "),
1113                                        ts[ts.len() - 1],
1114                                    ));
1115                                }
1116                            }
1117                            errored = Err(err.emit());
1118                        }
1119                    }
1120                }
1121            }
1122        }
1123    }
1124    errored?;
1125    Ok(last)
1126}
1127
1128fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1129    if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1130        frag_can_be_followed_by_any(kind)
1131    } else {
1132        // (Non NT's can always be followed by anything in matchers.)
1133        true
1134    }
1135}
1136
1137/// Returns `true` if a fragment of type `frag` can be followed by any sort of
1138/// token. We use this (among other things) as a useful approximation
1139/// for when `frag` can be followed by a repetition like `$(...)*` or
1140/// `$(...)+`. In general, these can be a bit tricky to reason about,
1141/// so we adopt a conservative position that says that any fragment
1142/// specifier which consumes at most one token tree can be followed by
1143/// a fragment specifier (indeed, these fragments can be followed by
1144/// ANYTHING without fear of future compatibility hazards).
1145fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1146    matches!(
1147        kind,
1148        NonterminalKind::Item           // always terminated by `}` or `;`
1149        | NonterminalKind::Block        // exactly one token tree
1150        | NonterminalKind::Ident        // exactly one token tree
1151        | NonterminalKind::Literal      // exactly one token tree
1152        | NonterminalKind::Meta         // exactly one token tree
1153        | NonterminalKind::Lifetime     // exactly one token tree
1154        | NonterminalKind::TT // exactly one token tree
1155    )
1156}
1157
1158enum IsInFollow {
1159    Yes,
1160    No(&'static [&'static str]),
1161}
1162
1163/// Returns `true` if `frag` can legally be followed by the token `tok`. For
1164/// fragments that can consume an unbounded number of tokens, `tok`
1165/// must be within a well-defined follow set. This is intended to
1166/// guarantee future compatibility: for example, without this rule, if
1167/// we expanded `expr` to include a new binary operator, we might
1168/// break macros that were relying on that binary operator as a
1169/// separator.
1170// when changing this do not forget to update doc/book/macros.md!
1171fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1172    use mbe::TokenTree;
1173
1174    if let TokenTree::Token(Token { kind, .. }) = tok
1175        && kind.close_delim().is_some()
1176    {
1177        // closing a token tree can never be matched by any fragment;
1178        // iow, we always require that `(` and `)` match, etc.
1179        IsInFollow::Yes
1180    } else {
1181        match kind {
1182            NonterminalKind::Item => {
1183                // since items *must* be followed by either a `;` or a `}`, we can
1184                // accept anything after them
1185                IsInFollow::Yes
1186            }
1187            NonterminalKind::Block => {
1188                // anything can follow block, the braces provide an easy boundary to
1189                // maintain
1190                IsInFollow::Yes
1191            }
1192            NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1193                const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1194                match tok {
1195                    TokenTree::Token(token) => match token.kind {
1196                        FatArrow | Comma | Semi => IsInFollow::Yes,
1197                        _ => IsInFollow::No(TOKENS),
1198                    },
1199                    _ => IsInFollow::No(TOKENS),
1200                }
1201            }
1202            NonterminalKind::Pat(PatParam { .. }) => {
1203                const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1204                match tok {
1205                    TokenTree::Token(token) => match token.kind {
1206                        FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1207                        Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1208                            IsInFollow::Yes
1209                        }
1210                        _ => IsInFollow::No(TOKENS),
1211                    },
1212                    _ => IsInFollow::No(TOKENS),
1213                }
1214            }
1215            NonterminalKind::Pat(PatWithOr) => {
1216                const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1217                match tok {
1218                    TokenTree::Token(token) => match token.kind {
1219                        FatArrow | Comma | Eq => IsInFollow::Yes,
1220                        Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1221                            IsInFollow::Yes
1222                        }
1223                        _ => IsInFollow::No(TOKENS),
1224                    },
1225                    _ => IsInFollow::No(TOKENS),
1226                }
1227            }
1228            NonterminalKind::Path | NonterminalKind::Ty => {
1229                const TOKENS: &[&str] = &[
1230                    "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1231                    "`where`",
1232                ];
1233                match tok {
1234                    TokenTree::Token(token) => match token.kind {
1235                        OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1236                        | Semi | Or => IsInFollow::Yes,
1237                        Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1238                            IsInFollow::Yes
1239                        }
1240                        _ => IsInFollow::No(TOKENS),
1241                    },
1242                    TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1243                    _ => IsInFollow::No(TOKENS),
1244                }
1245            }
1246            NonterminalKind::Ident | NonterminalKind::Lifetime => {
1247                // being a single token, idents and lifetimes are harmless
1248                IsInFollow::Yes
1249            }
1250            NonterminalKind::Literal => {
1251                // literals may be of a single token, or two tokens (negative numbers)
1252                IsInFollow::Yes
1253            }
1254            NonterminalKind::Meta | NonterminalKind::TT => {
1255                // being either a single token or a delimited sequence, tt is
1256                // harmless
1257                IsInFollow::Yes
1258            }
1259            NonterminalKind::Vis => {
1260                // Explicitly disallow `priv`, on the off chance it comes back.
1261                const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1262                match tok {
1263                    TokenTree::Token(token) => match token.kind {
1264                        Comma => IsInFollow::Yes,
1265                        Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1266                        Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1267                        _ => {
1268                            if token.can_begin_type() {
1269                                IsInFollow::Yes
1270                            } else {
1271                                IsInFollow::No(TOKENS)
1272                            }
1273                        }
1274                    },
1275                    TokenTree::MetaVarDecl {
1276                        kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1277                        ..
1278                    } => IsInFollow::Yes,
1279                    _ => IsInFollow::No(TOKENS),
1280                }
1281            }
1282        }
1283    }
1284}
1285
1286fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1287    match tt {
1288        mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1289        mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1290        mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1291        _ => panic!(
1292            "{}",
1293            "unexpected mbe::TokenTree::{Sequence or Delimited} \
1294             in follow set checker"
1295        ),
1296    }
1297}
1298
1299pub(super) fn parser_from_cx(
1300    psess: &ParseSess,
1301    mut tts: TokenStream,
1302    recovery: Recovery,
1303) -> Parser<'_> {
1304    tts.desugar_doc_comments();
1305    Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1306}