1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
12use rustc_ast_pretty::pprust;
13use rustc_attr_data_structures::{AttributeKind, find_attr};
14use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
15use rustc_errors::{Applicability, Diag, ErrorGuaranteed};
16use rustc_feature::Features;
17use rustc_hir as hir;
18use rustc_lint_defs::BuiltinLintDiag;
19use rustc_lint_defs::builtin::{
20 RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
21};
22use rustc_parse::exp;
23use rustc_parse::parser::{Parser, Recovery};
24use rustc_session::Session;
25use rustc_session::parse::ParseSess;
26use rustc_span::edition::Edition;
27use rustc_span::hygiene::Transparency;
28use rustc_span::{Ident, Span, kw, sym};
29use tracing::{debug, instrument, trace, trace_span};
30
31use super::macro_parser::{NamedMatches, NamedParseResult};
32use super::{SequenceRepetition, diagnostics};
33use crate::base::{
34 DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult, SyntaxExtension,
35 SyntaxExtensionKind, TTMacroExpander,
36};
37use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
38use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
39use crate::mbe::quoted::{RulePart, parse_one_tt};
40use crate::mbe::transcribe::transcribe;
41use crate::mbe::{self, KleeneOp, macro_check};
42
43pub(crate) struct ParserAnyMacro<'a> {
44 parser: Parser<'a>,
45
46 site_span: Span,
48 macro_ident: Ident,
50 lint_node_id: NodeId,
51 is_trailing_mac: bool,
52 arm_span: Span,
53 is_local: bool,
55}
56
57impl<'a> ParserAnyMacro<'a> {
58 pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
59 let ParserAnyMacro {
60 site_span,
61 macro_ident,
62 ref mut parser,
63 lint_node_id,
64 arm_span,
65 is_trailing_mac,
66 is_local,
67 } = *self;
68 let snapshot = &mut parser.create_snapshot_for_diagnostic();
69 let fragment = match parse_ast_fragment(parser, kind) {
70 Ok(f) => f,
71 Err(err) => {
72 let guar = diagnostics::emit_frag_parse_err(
73 err, parser, snapshot, site_span, arm_span, kind,
74 );
75 return kind.dummy(site_span, guar);
76 }
77 };
78
79 if kind == AstFragmentKind::Expr && parser.token == token::Semi {
83 if is_local {
84 parser.psess.buffer_lint(
85 SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
86 parser.token.span,
87 lint_node_id,
88 BuiltinLintDiag::TrailingMacro(is_trailing_mac, macro_ident),
89 );
90 }
91 parser.bump();
92 }
93
94 let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
96 ensure_complete_parse(parser, &path, kind.name(), site_span);
97 fragment
98 }
99
100 #[instrument(skip(cx, tts))]
101 pub(crate) fn from_tts<'cx>(
102 cx: &'cx mut ExtCtxt<'a>,
103 tts: TokenStream,
104 site_span: Span,
105 arm_span: Span,
106 is_local: bool,
107 macro_ident: Ident,
108 ) -> Self {
109 Self {
110 parser: Parser::new(&cx.sess.psess, tts, None),
111
112 site_span,
116 macro_ident,
117 lint_node_id: cx.current_expansion.lint_node_id,
118 is_trailing_mac: cx.current_expansion.is_trailing_mac,
119 arm_span,
120 is_local,
121 }
122 }
123}
124
125pub(super) struct MacroRule {
126 pub(super) lhs: Vec<MatcherLoc>,
127 lhs_span: Span,
128 rhs: mbe::TokenTree,
129}
130
131struct MacroRulesMacroExpander {
132 node_id: NodeId,
133 name: Ident,
134 span: Span,
135 transparency: Transparency,
136 rules: Vec<MacroRule>,
137}
138
139impl TTMacroExpander for MacroRulesMacroExpander {
140 fn expand<'cx>(
141 &self,
142 cx: &'cx mut ExtCtxt<'_>,
143 sp: Span,
144 input: TokenStream,
145 ) -> MacroExpanderResult<'cx> {
146 ExpandResult::Ready(expand_macro(
147 cx,
148 sp,
149 self.span,
150 self.node_id,
151 self.name,
152 self.transparency,
153 input,
154 &self.rules,
155 ))
156 }
157
158 fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
159 let rule = &self.rules[rule_i];
161 if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
162 }
163}
164
165struct DummyExpander(ErrorGuaranteed);
166
167impl TTMacroExpander for DummyExpander {
168 fn expand<'cx>(
169 &self,
170 _: &'cx mut ExtCtxt<'_>,
171 span: Span,
172 _: TokenStream,
173 ) -> ExpandResult<Box<dyn MacResult + 'cx>, ()> {
174 ExpandResult::Ready(DummyResult::any(span, self.0))
175 }
176}
177
178fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
179 let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
180 cx_expansions.entry(sp).or_default().push(message);
181}
182
183pub(super) trait Tracker<'matcher> {
184 type Failure;
186
187 fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
191
192 fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
194
195 fn after_arm(&mut self, _result: &NamedParseResult<Self::Failure>) {}
198
199 fn description() -> &'static str;
201
202 fn recovery() -> Recovery {
203 Recovery::Forbidden
204 }
205}
206
207pub(super) struct NoopTracker;
210
211impl<'matcher> Tracker<'matcher> for NoopTracker {
212 type Failure = ();
213
214 fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
215
216 fn description() -> &'static str {
217 "none"
218 }
219}
220
221#[instrument(skip(cx, transparency, arg, rules))]
223fn expand_macro<'cx>(
224 cx: &'cx mut ExtCtxt<'_>,
225 sp: Span,
226 def_span: Span,
227 node_id: NodeId,
228 name: Ident,
229 transparency: Transparency,
230 arg: TokenStream,
231 rules: &[MacroRule],
232) -> Box<dyn MacResult + 'cx> {
233 let psess = &cx.sess.psess;
234
235 if cx.trace_macros() {
236 let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
237 trace_macros_note(&mut cx.expansions, sp, msg);
238 }
239
240 let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
242
243 match try_success_result {
244 Ok((rule_index, rule, named_matches)) => {
245 let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rule.rhs else {
246 cx.dcx().span_bug(sp, "malformed macro rhs");
247 };
248 let arm_span = rule.rhs.span();
249
250 let id = cx.current_expansion.id;
252 let tts = match transcribe(psess, &named_matches, rhs, rhs_span, transparency, id) {
253 Ok(tts) => tts,
254 Err(err) => {
255 let guar = err.emit();
256 return DummyResult::any(arm_span, guar);
257 }
258 };
259
260 if cx.trace_macros() {
261 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
262 trace_macros_note(&mut cx.expansions, sp, msg);
263 }
264
265 let is_local = is_defined_in_current_crate(node_id);
266 if is_local {
267 cx.resolver.record_macro_rule_usage(node_id, rule_index);
268 }
269
270 Box::new(ParserAnyMacro::from_tts(cx, tts, sp, arm_span, is_local, name))
272 }
273 Err(CanRetry::No(guar)) => {
274 debug!("Will not retry matching as an error was emitted already");
275 DummyResult::any(sp, guar)
276 }
277 Err(CanRetry::Yes) => {
278 let (span, guar) =
280 diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, rules);
281 cx.trace_macros_diag();
282 DummyResult::any(span, guar)
283 }
284 }
285}
286
287pub(super) enum CanRetry {
288 Yes,
289 No(ErrorGuaranteed),
291}
292
293#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
297pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
298 psess: &ParseSess,
299 name: Ident,
300 arg: &TokenStream,
301 rules: &'matcher [MacroRule],
302 track: &mut T,
303) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
304 let parser = parser_from_cx(psess, arg.clone(), T::recovery());
324 let mut tt_parser = TtParser::new(name);
326 for (i, rule) in rules.iter().enumerate() {
327 let _tracing_span = trace_span!("Matching arm", %i);
328
329 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
334
335 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, track);
336
337 track.after_arm(&result);
338
339 match result {
340 Success(named_matches) => {
341 debug!("Parsed arm successfully");
342 psess.gated_spans.merge(gated_spans_snapshot);
345
346 return Ok((i, rule, named_matches));
347 }
348 Failure(_) => {
349 trace!("Failed to match arm, trying the next one");
350 }
352 Error(_, _) => {
353 debug!("Fatal error occurred during matching");
354 return Err(CanRetry::Yes);
356 }
357 ErrorReported(guarantee) => {
358 debug!("Fatal error occurred and was reported during matching");
359 return Err(CanRetry::No(guarantee));
361 }
362 }
363
364 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
367 }
368
369 Err(CanRetry::Yes)
370}
371
372pub fn compile_declarative_macro(
374 sess: &Session,
375 features: &Features,
376 macro_def: &ast::MacroDef,
377 ident: Ident,
378 attrs: &[hir::Attribute],
379 span: Span,
380 node_id: NodeId,
381 edition: Edition,
382) -> (SyntaxExtension, usize) {
383 let mk_syn_ext = |expander| {
384 let kind = SyntaxExtensionKind::LegacyBang(expander);
385 let is_local = is_defined_in_current_crate(node_id);
386 SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
387 };
388 let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), 0);
389
390 let macro_rules = macro_def.macro_rules;
391 let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
392
393 let body = macro_def.body.tokens.clone();
394 let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
395
396 let mut guar = None;
399 let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
400
401 let mut rules = Vec::new();
402
403 while p.token != token::Eof {
404 let lhs_tt = p.parse_token_tree();
405 let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
406 check_emission(check_lhs(sess, node_id, &lhs_tt));
407 if let Err(e) = p.expect(exp!(FatArrow)) {
408 return dummy_syn_ext(e.emit());
409 }
410 if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
411 return dummy_syn_ext(guar);
412 }
413 let rhs_tt = p.parse_token_tree();
414 let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
415 check_emission(check_rhs(sess, &rhs_tt));
416 check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt));
417 let lhs_span = lhs_tt.span();
418 let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
421 mbe::macro_parser::compute_locs(&delimited.tts)
422 } else {
423 return dummy_syn_ext(guar.unwrap());
424 };
425 rules.push(MacroRule { lhs, lhs_span, rhs: rhs_tt });
426 if p.token == token::Eof {
427 break;
428 }
429 if let Err(e) = p.expect(exp_sep) {
430 return dummy_syn_ext(e.emit());
431 }
432 }
433
434 if rules.is_empty() {
435 let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
436 return dummy_syn_ext(guar);
437 }
438
439 let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
440 .unwrap_or(Transparency::fallback(macro_rules));
441
442 if let Some(guar) = guar {
443 return dummy_syn_ext(guar);
446 }
447
448 let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 };
450
451 let expander =
452 Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules });
453 (mk_syn_ext(expander), nrules)
454}
455
456fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
457 if p.token == token::Eof {
458 let err_sp = p.token.span.shrink_to_hi();
459 let guar = sess
460 .dcx()
461 .struct_span_err(err_sp, "macro definition ended unexpectedly")
462 .with_span_label(err_sp, msg)
463 .emit();
464 return Some(guar);
465 }
466 None
467}
468
469fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
470 let e1 = check_lhs_nt_follows(sess, node_id, lhs);
471 let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
472 e1.and(e2)
473}
474
475fn check_lhs_nt_follows(
476 sess: &Session,
477 node_id: NodeId,
478 lhs: &mbe::TokenTree,
479) -> Result<(), ErrorGuaranteed> {
480 if let mbe::TokenTree::Delimited(.., delimited) = lhs {
483 check_matcher(sess, node_id, &delimited.tts)
484 } else {
485 let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
486 Err(sess.dcx().span_err(lhs.span(), msg))
487 }
488}
489
490fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
491 if seq.separator.is_some() {
492 false
493 } else {
494 let mut is_empty = true;
495 let mut iter = seq.tts.iter().peekable();
496 while let Some(tt) = iter.next() {
497 match tt {
498 mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
499 mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
500 let mut now = t;
501 while let Some(&mbe::TokenTree::Token(
502 next @ Token { kind: DocComment(..), .. },
503 )) = iter.peek()
504 {
505 now = next;
506 iter.next();
507 }
508 let span = t.span.to(now.span);
509 sess.dcx().span_note(span, "doc comments are ignored in matcher position");
510 }
511 mbe::TokenTree::Sequence(_, sub_seq)
512 if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
513 || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
514 _ => is_empty = false,
515 }
516 }
517 is_empty
518 }
519}
520
521fn check_redundant_vis_repetition(
526 err: &mut Diag<'_>,
527 sess: &Session,
528 seq: &SequenceRepetition,
529 span: &DelimSpan,
530) {
531 let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne;
532 let is_vis = seq.tts.first().map_or(false, |tt| {
533 matches!(tt, mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
534 });
535
536 if is_vis && is_zero_or_one {
537 err.note("a `vis` fragment can already be empty");
538 err.multipart_suggestion(
539 "remove the `$(` and `)?`",
540 vec![
541 (
542 sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
543 "".to_string(),
544 ),
545 (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
546 ],
547 Applicability::MaybeIncorrect,
548 );
549 }
550}
551
552fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
555 use mbe::TokenTree;
556 for tt in tts {
557 match tt {
558 TokenTree::Token(..)
559 | TokenTree::MetaVar(..)
560 | TokenTree::MetaVarDecl { .. }
561 | TokenTree::MetaVarExpr(..) => (),
562 TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
563 TokenTree::Sequence(span, seq) => {
564 if is_empty_token_tree(sess, seq) {
565 let sp = span.entire();
566 let mut err =
567 sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
568 check_redundant_vis_repetition(&mut err, sess, seq, span);
569 return Err(err.emit());
570 }
571 check_lhs_no_empty_seq(sess, &seq.tts)?
572 }
573 }
574 }
575
576 Ok(())
577}
578
579fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
580 match *rhs {
581 mbe::TokenTree::Delimited(..) => Ok(()),
582 _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
583 }
584}
585
586fn check_matcher(
587 sess: &Session,
588 node_id: NodeId,
589 matcher: &[mbe::TokenTree],
590) -> Result<(), ErrorGuaranteed> {
591 let first_sets = FirstSets::new(matcher);
592 let empty_suffix = TokenSet::empty();
593 check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
594 Ok(())
595}
596
597fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
598 match rhs {
599 mbe::TokenTree::Delimited(.., d) => {
600 let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
601 if let mbe::TokenTree::Token(ident) = ident
602 && let TokenKind::Ident(ident, _) = ident.kind
603 && ident == sym::compile_error
604 && let mbe::TokenTree::Token(bang) = bang
605 && let TokenKind::Bang = bang.kind
606 && let mbe::TokenTree::Delimited(.., del) = args
607 && !del.delim.skip()
608 {
609 true
610 } else {
611 false
612 }
613 });
614 if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
615 }
616 _ => false,
617 }
618}
619
620struct FirstSets<'tt> {
633 first: FxHashMap<Span, Option<TokenSet<'tt>>>,
640}
641
642impl<'tt> FirstSets<'tt> {
643 fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
644 use mbe::TokenTree;
645
646 let mut sets = FirstSets { first: FxHashMap::default() };
647 build_recur(&mut sets, tts);
648 return sets;
649
650 fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
654 let mut first = TokenSet::empty();
655 for tt in tts.iter().rev() {
656 match tt {
657 TokenTree::Token(..)
658 | TokenTree::MetaVar(..)
659 | TokenTree::MetaVarDecl { .. }
660 | TokenTree::MetaVarExpr(..) => {
661 first.replace_with(TtHandle::TtRef(tt));
662 }
663 TokenTree::Delimited(span, _, delimited) => {
664 build_recur(sets, &delimited.tts);
665 first.replace_with(TtHandle::from_token_kind(
666 delimited.delim.as_open_token_kind(),
667 span.open,
668 ));
669 }
670 TokenTree::Sequence(sp, seq_rep) => {
671 let subfirst = build_recur(sets, &seq_rep.tts);
672
673 match sets.first.entry(sp.entire()) {
674 Entry::Vacant(vac) => {
675 vac.insert(Some(subfirst.clone()));
676 }
677 Entry::Occupied(mut occ) => {
678 occ.insert(None);
685 }
686 }
687
688 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
692 first.add_one_maybe(TtHandle::from_token(*sep));
693 }
694
695 if subfirst.maybe_empty
697 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
698 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
699 {
700 first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
703 } else {
704 first = subfirst;
707 }
708 }
709 }
710 }
711
712 first
713 }
714 }
715
716 fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
719 use mbe::TokenTree;
720
721 let mut first = TokenSet::empty();
722 for tt in tts.iter() {
723 assert!(first.maybe_empty);
724 match tt {
725 TokenTree::Token(..)
726 | TokenTree::MetaVar(..)
727 | TokenTree::MetaVarDecl { .. }
728 | TokenTree::MetaVarExpr(..) => {
729 first.add_one(TtHandle::TtRef(tt));
730 return first;
731 }
732 TokenTree::Delimited(span, _, delimited) => {
733 first.add_one(TtHandle::from_token_kind(
734 delimited.delim.as_open_token_kind(),
735 span.open,
736 ));
737 return first;
738 }
739 TokenTree::Sequence(sp, seq_rep) => {
740 let subfirst_owned;
741 let subfirst = match self.first.get(&sp.entire()) {
742 Some(Some(subfirst)) => subfirst,
743 Some(&None) => {
744 subfirst_owned = self.first(&seq_rep.tts);
745 &subfirst_owned
746 }
747 None => {
748 panic!("We missed a sequence during FirstSets construction");
749 }
750 };
751
752 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
755 first.add_one_maybe(TtHandle::from_token(*sep));
756 }
757
758 assert!(first.maybe_empty);
759 first.add_all(subfirst);
760 if subfirst.maybe_empty
761 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
762 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
763 {
764 first.maybe_empty = true;
768 continue;
769 } else {
770 return first;
771 }
772 }
773 }
774 }
775
776 assert!(first.maybe_empty);
779 first
780 }
781}
782
783#[derive(Debug)]
788enum TtHandle<'tt> {
789 TtRef(&'tt mbe::TokenTree),
791
792 Token(mbe::TokenTree),
797}
798
799impl<'tt> TtHandle<'tt> {
800 fn from_token(tok: Token) -> Self {
801 TtHandle::Token(mbe::TokenTree::Token(tok))
802 }
803
804 fn from_token_kind(kind: TokenKind, span: Span) -> Self {
805 TtHandle::from_token(Token::new(kind, span))
806 }
807
808 fn get(&'tt self) -> &'tt mbe::TokenTree {
810 match self {
811 TtHandle::TtRef(tt) => tt,
812 TtHandle::Token(token_tt) => token_tt,
813 }
814 }
815}
816
817impl<'tt> PartialEq for TtHandle<'tt> {
818 fn eq(&self, other: &TtHandle<'tt>) -> bool {
819 self.get() == other.get()
820 }
821}
822
823impl<'tt> Clone for TtHandle<'tt> {
824 fn clone(&self) -> Self {
825 match self {
826 TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
827
828 TtHandle::Token(mbe::TokenTree::Token(tok)) => {
831 TtHandle::Token(mbe::TokenTree::Token(*tok))
832 }
833
834 _ => unreachable!(),
835 }
836 }
837}
838
839#[derive(Clone, Debug)]
850struct TokenSet<'tt> {
851 tokens: Vec<TtHandle<'tt>>,
852 maybe_empty: bool,
853}
854
855impl<'tt> TokenSet<'tt> {
856 fn empty() -> Self {
858 TokenSet { tokens: Vec::new(), maybe_empty: true }
859 }
860
861 fn singleton(tt: TtHandle<'tt>) -> Self {
864 TokenSet { tokens: vec![tt], maybe_empty: false }
865 }
866
867 fn replace_with(&mut self, tt: TtHandle<'tt>) {
870 self.tokens.clear();
871 self.tokens.push(tt);
872 self.maybe_empty = false;
873 }
874
875 fn replace_with_irrelevant(&mut self) {
879 self.tokens.clear();
880 self.maybe_empty = false;
881 }
882
883 fn add_one(&mut self, tt: TtHandle<'tt>) {
885 if !self.tokens.contains(&tt) {
886 self.tokens.push(tt);
887 }
888 self.maybe_empty = false;
889 }
890
891 fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
893 if !self.tokens.contains(&tt) {
894 self.tokens.push(tt);
895 }
896 }
897
898 fn add_all(&mut self, other: &Self) {
906 for tt in &other.tokens {
907 if !self.tokens.contains(tt) {
908 self.tokens.push(tt.clone());
909 }
910 }
911 if !other.maybe_empty {
912 self.maybe_empty = false;
913 }
914 }
915}
916
917fn check_matcher_core<'tt>(
929 sess: &Session,
930 node_id: NodeId,
931 first_sets: &FirstSets<'tt>,
932 matcher: &'tt [mbe::TokenTree],
933 follow: &TokenSet<'tt>,
934) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
935 use mbe::TokenTree;
936
937 let mut last = TokenSet::empty();
938
939 let mut errored = Ok(());
940
941 'each_token: for i in 0..matcher.len() {
945 let token = &matcher[i];
946 let suffix = &matcher[i + 1..];
947
948 let build_suffix_first = || {
949 let mut s = first_sets.first(suffix);
950 if s.maybe_empty {
951 s.add_all(follow);
952 }
953 s
954 };
955
956 let suffix_first;
960
961 match token {
964 TokenTree::Token(..)
965 | TokenTree::MetaVar(..)
966 | TokenTree::MetaVarDecl { .. }
967 | TokenTree::MetaVarExpr(..) => {
968 if token_can_be_followed_by_any(token) {
969 last.replace_with_irrelevant();
971 continue 'each_token;
974 } else {
975 last.replace_with(TtHandle::TtRef(token));
976 suffix_first = build_suffix_first();
977 }
978 }
979 TokenTree::Delimited(span, _, d) => {
980 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
981 d.delim.as_close_token_kind(),
982 span.close,
983 ));
984 check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
985 last.replace_with_irrelevant();
987
988 continue 'each_token;
991 }
992 TokenTree::Sequence(_, seq_rep) => {
993 suffix_first = build_suffix_first();
994 let mut new;
1005 let my_suffix = if let Some(sep) = &seq_rep.separator {
1006 new = suffix_first.clone();
1007 new.add_one_maybe(TtHandle::from_token(*sep));
1008 &new
1009 } else {
1010 &suffix_first
1011 };
1012
1013 let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1017 if next.maybe_empty {
1018 last.add_all(&next);
1019 } else {
1020 last = next;
1021 }
1022
1023 continue 'each_token;
1026 }
1027 }
1028
1029 for tt in &last.tokens {
1034 if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1035 for next_token in &suffix_first.tokens {
1036 let next_token = next_token.get();
1037
1038 if is_defined_in_current_crate(node_id)
1045 && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1046 && matches!(
1047 next_token,
1048 TokenTree::Token(token) if *token == token::Or
1049 )
1050 {
1051 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1053 span,
1054 name,
1055 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1056 });
1057 sess.psess.buffer_lint(
1058 RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1059 span,
1060 ast::CRATE_NODE_ID,
1061 BuiltinLintDiag::OrPatternsBackCompat(span, suggestion),
1062 );
1063 }
1064 match is_in_follow(next_token, kind) {
1065 IsInFollow::Yes => {}
1066 IsInFollow::No(possible) => {
1067 let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1068 {
1069 "is"
1070 } else {
1071 "may be"
1072 };
1073
1074 let sp = next_token.span();
1075 let mut err = sess.dcx().struct_span_err(
1076 sp,
1077 format!(
1078 "`${name}:{frag}` {may_be} followed by `{next}`, which \
1079 is not allowed for `{frag}` fragments",
1080 name = name,
1081 frag = kind,
1082 next = quoted_tt_to_string(next_token),
1083 may_be = may_be
1084 ),
1085 );
1086 err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1087
1088 if kind == NonterminalKind::Pat(PatWithOr)
1089 && sess.psess.edition.at_least_rust_2021()
1090 && next_token.is_token(&token::Or)
1091 {
1092 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1093 span,
1094 name,
1095 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1096 });
1097 err.span_suggestion(
1098 span,
1099 "try a `pat_param` fragment specifier instead",
1100 suggestion,
1101 Applicability::MaybeIncorrect,
1102 );
1103 }
1104
1105 let msg = "allowed there are: ";
1106 match possible {
1107 &[] => {}
1108 &[t] => {
1109 err.note(format!(
1110 "only {t} is allowed after `{kind}` fragments",
1111 ));
1112 }
1113 ts => {
1114 err.note(format!(
1115 "{}{} or {}",
1116 msg,
1117 ts[..ts.len() - 1].to_vec().join(", "),
1118 ts[ts.len() - 1],
1119 ));
1120 }
1121 }
1122 errored = Err(err.emit());
1123 }
1124 }
1125 }
1126 }
1127 }
1128 }
1129 errored?;
1130 Ok(last)
1131}
1132
1133fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1134 if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1135 frag_can_be_followed_by_any(kind)
1136 } else {
1137 true
1139 }
1140}
1141
1142fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1151 matches!(
1152 kind,
1153 NonterminalKind::Item | NonterminalKind::Block | NonterminalKind::Ident | NonterminalKind::Literal | NonterminalKind::Meta | NonterminalKind::Lifetime | NonterminalKind::TT )
1161}
1162
1163enum IsInFollow {
1164 Yes,
1165 No(&'static [&'static str]),
1166}
1167
1168fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1177 use mbe::TokenTree;
1178
1179 if let TokenTree::Token(Token { kind, .. }) = tok
1180 && kind.close_delim().is_some()
1181 {
1182 IsInFollow::Yes
1185 } else {
1186 match kind {
1187 NonterminalKind::Item => {
1188 IsInFollow::Yes
1191 }
1192 NonterminalKind::Block => {
1193 IsInFollow::Yes
1196 }
1197 NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1198 const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1199 match tok {
1200 TokenTree::Token(token) => match token.kind {
1201 FatArrow | Comma | Semi => IsInFollow::Yes,
1202 _ => IsInFollow::No(TOKENS),
1203 },
1204 _ => IsInFollow::No(TOKENS),
1205 }
1206 }
1207 NonterminalKind::Pat(PatParam { .. }) => {
1208 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1209 match tok {
1210 TokenTree::Token(token) => match token.kind {
1211 FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1212 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1213 IsInFollow::Yes
1214 }
1215 _ => IsInFollow::No(TOKENS),
1216 },
1217 _ => IsInFollow::No(TOKENS),
1218 }
1219 }
1220 NonterminalKind::Pat(PatWithOr) => {
1221 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1222 match tok {
1223 TokenTree::Token(token) => match token.kind {
1224 FatArrow | Comma | Eq => IsInFollow::Yes,
1225 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1226 IsInFollow::Yes
1227 }
1228 _ => IsInFollow::No(TOKENS),
1229 },
1230 _ => IsInFollow::No(TOKENS),
1231 }
1232 }
1233 NonterminalKind::Path | NonterminalKind::Ty => {
1234 const TOKENS: &[&str] = &[
1235 "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1236 "`where`",
1237 ];
1238 match tok {
1239 TokenTree::Token(token) => match token.kind {
1240 OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1241 | Semi | Or => IsInFollow::Yes,
1242 Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1243 IsInFollow::Yes
1244 }
1245 _ => IsInFollow::No(TOKENS),
1246 },
1247 TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1248 _ => IsInFollow::No(TOKENS),
1249 }
1250 }
1251 NonterminalKind::Ident | NonterminalKind::Lifetime => {
1252 IsInFollow::Yes
1254 }
1255 NonterminalKind::Literal => {
1256 IsInFollow::Yes
1258 }
1259 NonterminalKind::Meta | NonterminalKind::TT => {
1260 IsInFollow::Yes
1263 }
1264 NonterminalKind::Vis => {
1265 const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1267 match tok {
1268 TokenTree::Token(token) => match token.kind {
1269 Comma => IsInFollow::Yes,
1270 Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1271 Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1272 _ => {
1273 if token.can_begin_type() {
1274 IsInFollow::Yes
1275 } else {
1276 IsInFollow::No(TOKENS)
1277 }
1278 }
1279 },
1280 TokenTree::MetaVarDecl {
1281 kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1282 ..
1283 } => IsInFollow::Yes,
1284 _ => IsInFollow::No(TOKENS),
1285 }
1286 }
1287 }
1288 }
1289}
1290
1291fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1292 match tt {
1293 mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1294 mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1295 mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1296 _ => panic!(
1297 "{}",
1298 "unexpected mbe::TokenTree::{Sequence or Delimited} \
1299 in follow set checker"
1300 ),
1301 }
1302}
1303
1304fn is_defined_in_current_crate(node_id: NodeId) -> bool {
1305 node_id != DUMMY_NODE_ID
1308}
1309
1310pub(super) fn parser_from_cx(
1311 psess: &ParseSess,
1312 mut tts: TokenStream,
1313 recovery: Recovery,
1314) -> Parser<'_> {
1315 tts.desugar_doc_comments();
1316 Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1317}