1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
12use rustc_ast_pretty::pprust;
13use rustc_attr_data_structures::{AttributeKind, find_attr};
14use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
15use rustc_errors::{Applicability, Diag, ErrorGuaranteed};
16use rustc_feature::Features;
17use rustc_hir as hir;
18use rustc_lint_defs::BuiltinLintDiag;
19use rustc_lint_defs::builtin::{
20 RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
21};
22use rustc_parse::exp;
23use rustc_parse::parser::{Parser, Recovery};
24use rustc_session::Session;
25use rustc_session::parse::ParseSess;
26use rustc_span::edition::Edition;
27use rustc_span::hygiene::Transparency;
28use rustc_span::{Ident, Span, kw, sym};
29use tracing::{debug, instrument, trace, trace_span};
30
31use super::macro_parser::{NamedMatches, NamedParseResult};
32use super::{SequenceRepetition, diagnostics};
33use crate::base::{
34 DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult, SyntaxExtension,
35 SyntaxExtensionKind, TTMacroExpander,
36};
37use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
38use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
39use crate::mbe::quoted::{RulePart, parse_one_tt};
40use crate::mbe::transcribe::transcribe;
41use crate::mbe::{self, KleeneOp, macro_check};
42
43pub(crate) struct ParserAnyMacro<'a> {
44 parser: Parser<'a>,
45
46 site_span: Span,
48 macro_ident: Ident,
50 lint_node_id: NodeId,
51 is_trailing_mac: bool,
52 arm_span: Span,
53 is_local: bool,
55}
56
57impl<'a> ParserAnyMacro<'a> {
58 pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
59 let ParserAnyMacro {
60 site_span,
61 macro_ident,
62 ref mut parser,
63 lint_node_id,
64 arm_span,
65 is_trailing_mac,
66 is_local,
67 } = *self;
68 let snapshot = &mut parser.create_snapshot_for_diagnostic();
69 let fragment = match parse_ast_fragment(parser, kind) {
70 Ok(f) => f,
71 Err(err) => {
72 let guar = diagnostics::emit_frag_parse_err(
73 err, parser, snapshot, site_span, arm_span, kind,
74 );
75 return kind.dummy(site_span, guar);
76 }
77 };
78
79 if kind == AstFragmentKind::Expr && parser.token == token::Semi {
83 if is_local {
84 parser.psess.buffer_lint(
85 SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
86 parser.token.span,
87 lint_node_id,
88 BuiltinLintDiag::TrailingMacro(is_trailing_mac, macro_ident),
89 );
90 }
91 parser.bump();
92 }
93
94 let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
96 ensure_complete_parse(parser, &path, kind.name(), site_span);
97 fragment
98 }
99}
100
101pub(super) struct MacroRule {
102 pub(super) lhs: Vec<MatcherLoc>,
103 lhs_span: Span,
104 rhs: mbe::TokenTree,
105}
106
107struct MacroRulesMacroExpander {
108 node_id: NodeId,
109 name: Ident,
110 span: Span,
111 transparency: Transparency,
112 rules: Vec<MacroRule>,
113}
114
115impl TTMacroExpander for MacroRulesMacroExpander {
116 fn expand<'cx>(
117 &self,
118 cx: &'cx mut ExtCtxt<'_>,
119 sp: Span,
120 input: TokenStream,
121 ) -> MacroExpanderResult<'cx> {
122 ExpandResult::Ready(expand_macro(
123 cx,
124 sp,
125 self.span,
126 self.node_id,
127 self.name,
128 self.transparency,
129 input,
130 &self.rules,
131 ))
132 }
133
134 fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
135 let rule = &self.rules[rule_i];
137 if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
138 }
139}
140
141struct DummyExpander(ErrorGuaranteed);
142
143impl TTMacroExpander for DummyExpander {
144 fn expand<'cx>(
145 &self,
146 _: &'cx mut ExtCtxt<'_>,
147 span: Span,
148 _: TokenStream,
149 ) -> ExpandResult<Box<dyn MacResult + 'cx>, ()> {
150 ExpandResult::Ready(DummyResult::any(span, self.0))
151 }
152}
153
154fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
155 let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
156 cx_expansions.entry(sp).or_default().push(message);
157}
158
159pub(super) trait Tracker<'matcher> {
160 type Failure;
162
163 fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
167
168 fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
170
171 fn after_arm(&mut self, _result: &NamedParseResult<Self::Failure>) {}
174
175 fn description() -> &'static str;
177
178 fn recovery() -> Recovery {
179 Recovery::Forbidden
180 }
181}
182
183pub(super) struct NoopTracker;
186
187impl<'matcher> Tracker<'matcher> for NoopTracker {
188 type Failure = ();
189
190 fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
191
192 fn description() -> &'static str {
193 "none"
194 }
195}
196
197#[instrument(skip(cx, transparency, arg, rules))]
199fn expand_macro<'cx>(
200 cx: &'cx mut ExtCtxt<'_>,
201 sp: Span,
202 def_span: Span,
203 node_id: NodeId,
204 name: Ident,
205 transparency: Transparency,
206 arg: TokenStream,
207 rules: &[MacroRule],
208) -> Box<dyn MacResult + 'cx> {
209 let psess = &cx.sess.psess;
210 let is_local = node_id != DUMMY_NODE_ID;
213
214 if cx.trace_macros() {
215 let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
216 trace_macros_note(&mut cx.expansions, sp, msg);
217 }
218
219 let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
221
222 match try_success_result {
223 Ok((i, rule, named_matches)) => {
224 let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rule.rhs else {
225 cx.dcx().span_bug(sp, "malformed macro rhs");
226 };
227 let arm_span = rule.rhs.span();
228
229 let id = cx.current_expansion.id;
231 let tts = match transcribe(psess, &named_matches, rhs, rhs_span, transparency, id) {
232 Ok(tts) => tts,
233 Err(err) => {
234 let guar = err.emit();
235 return DummyResult::any(arm_span, guar);
236 }
237 };
238
239 if cx.trace_macros() {
240 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
241 trace_macros_note(&mut cx.expansions, sp, msg);
242 }
243
244 let p = Parser::new(psess, tts, None);
245
246 if is_local {
247 cx.resolver.record_macro_rule_usage(node_id, i);
248 }
249
250 Box::new(ParserAnyMacro {
253 parser: p,
254
255 site_span: sp,
259 macro_ident: name,
260 lint_node_id: cx.current_expansion.lint_node_id,
261 is_trailing_mac: cx.current_expansion.is_trailing_mac,
262 arm_span,
263 is_local,
264 })
265 }
266 Err(CanRetry::No(guar)) => {
267 debug!("Will not retry matching as an error was emitted already");
268 DummyResult::any(sp, guar)
269 }
270 Err(CanRetry::Yes) => {
271 let (span, guar) =
273 diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, rules);
274 cx.trace_macros_diag();
275 DummyResult::any(span, guar)
276 }
277 }
278}
279
280pub(super) enum CanRetry {
281 Yes,
282 No(ErrorGuaranteed),
284}
285
286#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
290pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
291 psess: &ParseSess,
292 name: Ident,
293 arg: &TokenStream,
294 rules: &'matcher [MacroRule],
295 track: &mut T,
296) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
297 let parser = parser_from_cx(psess, arg.clone(), T::recovery());
317 let mut tt_parser = TtParser::new(name);
319 for (i, rule) in rules.iter().enumerate() {
320 let _tracing_span = trace_span!("Matching arm", %i);
321
322 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
327
328 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, track);
329
330 track.after_arm(&result);
331
332 match result {
333 Success(named_matches) => {
334 debug!("Parsed arm successfully");
335 psess.gated_spans.merge(gated_spans_snapshot);
338
339 return Ok((i, rule, named_matches));
340 }
341 Failure(_) => {
342 trace!("Failed to match arm, trying the next one");
343 }
345 Error(_, _) => {
346 debug!("Fatal error occurred during matching");
347 return Err(CanRetry::Yes);
349 }
350 ErrorReported(guarantee) => {
351 debug!("Fatal error occurred and was reported during matching");
352 return Err(CanRetry::No(guarantee));
354 }
355 }
356
357 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
360 }
361
362 Err(CanRetry::Yes)
363}
364
365pub fn compile_declarative_macro(
367 sess: &Session,
368 features: &Features,
369 macro_def: &ast::MacroDef,
370 ident: Ident,
371 attrs: &[hir::Attribute],
372 span: Span,
373 node_id: NodeId,
374 edition: Edition,
375) -> (SyntaxExtension, usize) {
376 let is_local = node_id != DUMMY_NODE_ID;
377 let mk_syn_ext = |expander| {
378 let kind = SyntaxExtensionKind::LegacyBang(expander);
379 SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
380 };
381 let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), 0);
382
383 let macro_rules = macro_def.macro_rules;
384 let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
385
386 let body = macro_def.body.tokens.clone();
387 let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
388
389 let mut guar = None;
392 let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
393
394 let mut rules = Vec::new();
395
396 while p.token != token::Eof {
397 let lhs_tt = p.parse_token_tree();
398 let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
399 check_emission(check_lhs(sess, node_id, &lhs_tt));
400 if let Err(e) = p.expect(exp!(FatArrow)) {
401 return dummy_syn_ext(e.emit());
402 }
403 if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
404 return dummy_syn_ext(guar);
405 }
406 let rhs_tt = p.parse_token_tree();
407 let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
408 check_emission(check_rhs(sess, &rhs_tt));
409 check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt));
410 let lhs_span = lhs_tt.span();
411 let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
414 mbe::macro_parser::compute_locs(&delimited.tts)
415 } else {
416 return dummy_syn_ext(guar.unwrap());
417 };
418 rules.push(MacroRule { lhs, lhs_span, rhs: rhs_tt });
419 if p.token == token::Eof {
420 break;
421 }
422 if let Err(e) = p.expect(exp_sep) {
423 return dummy_syn_ext(e.emit());
424 }
425 }
426
427 if rules.is_empty() {
428 let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
429 return dummy_syn_ext(guar);
430 }
431
432 let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
433 .unwrap_or(Transparency::fallback(macro_rules));
434
435 if let Some(guar) = guar {
436 return dummy_syn_ext(guar);
439 }
440
441 let nrules = if is_local { rules.len() } else { 0 };
443
444 let expander =
445 Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules });
446 (mk_syn_ext(expander), nrules)
447}
448
449fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
450 if p.token == token::Eof {
451 let err_sp = p.token.span.shrink_to_hi();
452 let guar = sess
453 .dcx()
454 .struct_span_err(err_sp, "macro definition ended unexpectedly")
455 .with_span_label(err_sp, msg)
456 .emit();
457 return Some(guar);
458 }
459 None
460}
461
462fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
463 let e1 = check_lhs_nt_follows(sess, node_id, lhs);
464 let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
465 e1.and(e2)
466}
467
468fn check_lhs_nt_follows(
469 sess: &Session,
470 node_id: NodeId,
471 lhs: &mbe::TokenTree,
472) -> Result<(), ErrorGuaranteed> {
473 if let mbe::TokenTree::Delimited(.., delimited) = lhs {
476 check_matcher(sess, node_id, &delimited.tts)
477 } else {
478 let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
479 Err(sess.dcx().span_err(lhs.span(), msg))
480 }
481}
482
483fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
484 if seq.separator.is_some() {
485 false
486 } else {
487 let mut is_empty = true;
488 let mut iter = seq.tts.iter().peekable();
489 while let Some(tt) = iter.next() {
490 match tt {
491 mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
492 mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
493 let mut now = t;
494 while let Some(&mbe::TokenTree::Token(
495 next @ Token { kind: DocComment(..), .. },
496 )) = iter.peek()
497 {
498 now = next;
499 iter.next();
500 }
501 let span = t.span.to(now.span);
502 sess.dcx().span_note(span, "doc comments are ignored in matcher position");
503 }
504 mbe::TokenTree::Sequence(_, sub_seq)
505 if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
506 || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
507 _ => is_empty = false,
508 }
509 }
510 is_empty
511 }
512}
513
514fn check_redundant_vis_repetition(
519 err: &mut Diag<'_>,
520 sess: &Session,
521 seq: &SequenceRepetition,
522 span: &DelimSpan,
523) {
524 let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne;
525 let is_vis = seq.tts.first().map_or(false, |tt| {
526 matches!(tt, mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
527 });
528
529 if is_vis && is_zero_or_one {
530 err.note("a `vis` fragment can already be empty");
531 err.multipart_suggestion(
532 "remove the `$(` and `)?`",
533 vec![
534 (
535 sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
536 "".to_string(),
537 ),
538 (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
539 ],
540 Applicability::MaybeIncorrect,
541 );
542 }
543}
544
545fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
548 use mbe::TokenTree;
549 for tt in tts {
550 match tt {
551 TokenTree::Token(..)
552 | TokenTree::MetaVar(..)
553 | TokenTree::MetaVarDecl { .. }
554 | TokenTree::MetaVarExpr(..) => (),
555 TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
556 TokenTree::Sequence(span, seq) => {
557 if is_empty_token_tree(sess, seq) {
558 let sp = span.entire();
559 let mut err =
560 sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
561 check_redundant_vis_repetition(&mut err, sess, seq, span);
562 return Err(err.emit());
563 }
564 check_lhs_no_empty_seq(sess, &seq.tts)?
565 }
566 }
567 }
568
569 Ok(())
570}
571
572fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
573 match *rhs {
574 mbe::TokenTree::Delimited(..) => Ok(()),
575 _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
576 }
577}
578
579fn check_matcher(
580 sess: &Session,
581 node_id: NodeId,
582 matcher: &[mbe::TokenTree],
583) -> Result<(), ErrorGuaranteed> {
584 let first_sets = FirstSets::new(matcher);
585 let empty_suffix = TokenSet::empty();
586 check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
587 Ok(())
588}
589
590fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
591 match rhs {
592 mbe::TokenTree::Delimited(.., d) => {
593 let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
594 if let mbe::TokenTree::Token(ident) = ident
595 && let TokenKind::Ident(ident, _) = ident.kind
596 && ident == sym::compile_error
597 && let mbe::TokenTree::Token(bang) = bang
598 && let TokenKind::Bang = bang.kind
599 && let mbe::TokenTree::Delimited(.., del) = args
600 && !del.delim.skip()
601 {
602 true
603 } else {
604 false
605 }
606 });
607 if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
608 }
609 _ => false,
610 }
611}
612
613struct FirstSets<'tt> {
626 first: FxHashMap<Span, Option<TokenSet<'tt>>>,
633}
634
635impl<'tt> FirstSets<'tt> {
636 fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
637 use mbe::TokenTree;
638
639 let mut sets = FirstSets { first: FxHashMap::default() };
640 build_recur(&mut sets, tts);
641 return sets;
642
643 fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
647 let mut first = TokenSet::empty();
648 for tt in tts.iter().rev() {
649 match tt {
650 TokenTree::Token(..)
651 | TokenTree::MetaVar(..)
652 | TokenTree::MetaVarDecl { .. }
653 | TokenTree::MetaVarExpr(..) => {
654 first.replace_with(TtHandle::TtRef(tt));
655 }
656 TokenTree::Delimited(span, _, delimited) => {
657 build_recur(sets, &delimited.tts);
658 first.replace_with(TtHandle::from_token_kind(
659 delimited.delim.as_open_token_kind(),
660 span.open,
661 ));
662 }
663 TokenTree::Sequence(sp, seq_rep) => {
664 let subfirst = build_recur(sets, &seq_rep.tts);
665
666 match sets.first.entry(sp.entire()) {
667 Entry::Vacant(vac) => {
668 vac.insert(Some(subfirst.clone()));
669 }
670 Entry::Occupied(mut occ) => {
671 occ.insert(None);
678 }
679 }
680
681 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
685 first.add_one_maybe(TtHandle::from_token(*sep));
686 }
687
688 if subfirst.maybe_empty
690 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
691 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
692 {
693 first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
696 } else {
697 first = subfirst;
700 }
701 }
702 }
703 }
704
705 first
706 }
707 }
708
709 fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
712 use mbe::TokenTree;
713
714 let mut first = TokenSet::empty();
715 for tt in tts.iter() {
716 assert!(first.maybe_empty);
717 match tt {
718 TokenTree::Token(..)
719 | TokenTree::MetaVar(..)
720 | TokenTree::MetaVarDecl { .. }
721 | TokenTree::MetaVarExpr(..) => {
722 first.add_one(TtHandle::TtRef(tt));
723 return first;
724 }
725 TokenTree::Delimited(span, _, delimited) => {
726 first.add_one(TtHandle::from_token_kind(
727 delimited.delim.as_open_token_kind(),
728 span.open,
729 ));
730 return first;
731 }
732 TokenTree::Sequence(sp, seq_rep) => {
733 let subfirst_owned;
734 let subfirst = match self.first.get(&sp.entire()) {
735 Some(Some(subfirst)) => subfirst,
736 Some(&None) => {
737 subfirst_owned = self.first(&seq_rep.tts);
738 &subfirst_owned
739 }
740 None => {
741 panic!("We missed a sequence during FirstSets construction");
742 }
743 };
744
745 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
748 first.add_one_maybe(TtHandle::from_token(*sep));
749 }
750
751 assert!(first.maybe_empty);
752 first.add_all(subfirst);
753 if subfirst.maybe_empty
754 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
755 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
756 {
757 first.maybe_empty = true;
761 continue;
762 } else {
763 return first;
764 }
765 }
766 }
767 }
768
769 assert!(first.maybe_empty);
772 first
773 }
774}
775
776#[derive(Debug)]
781enum TtHandle<'tt> {
782 TtRef(&'tt mbe::TokenTree),
784
785 Token(mbe::TokenTree),
790}
791
792impl<'tt> TtHandle<'tt> {
793 fn from_token(tok: Token) -> Self {
794 TtHandle::Token(mbe::TokenTree::Token(tok))
795 }
796
797 fn from_token_kind(kind: TokenKind, span: Span) -> Self {
798 TtHandle::from_token(Token::new(kind, span))
799 }
800
801 fn get(&'tt self) -> &'tt mbe::TokenTree {
803 match self {
804 TtHandle::TtRef(tt) => tt,
805 TtHandle::Token(token_tt) => token_tt,
806 }
807 }
808}
809
810impl<'tt> PartialEq for TtHandle<'tt> {
811 fn eq(&self, other: &TtHandle<'tt>) -> bool {
812 self.get() == other.get()
813 }
814}
815
816impl<'tt> Clone for TtHandle<'tt> {
817 fn clone(&self) -> Self {
818 match self {
819 TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
820
821 TtHandle::Token(mbe::TokenTree::Token(tok)) => {
824 TtHandle::Token(mbe::TokenTree::Token(*tok))
825 }
826
827 _ => unreachable!(),
828 }
829 }
830}
831
832#[derive(Clone, Debug)]
843struct TokenSet<'tt> {
844 tokens: Vec<TtHandle<'tt>>,
845 maybe_empty: bool,
846}
847
848impl<'tt> TokenSet<'tt> {
849 fn empty() -> Self {
851 TokenSet { tokens: Vec::new(), maybe_empty: true }
852 }
853
854 fn singleton(tt: TtHandle<'tt>) -> Self {
857 TokenSet { tokens: vec![tt], maybe_empty: false }
858 }
859
860 fn replace_with(&mut self, tt: TtHandle<'tt>) {
863 self.tokens.clear();
864 self.tokens.push(tt);
865 self.maybe_empty = false;
866 }
867
868 fn replace_with_irrelevant(&mut self) {
872 self.tokens.clear();
873 self.maybe_empty = false;
874 }
875
876 fn add_one(&mut self, tt: TtHandle<'tt>) {
878 if !self.tokens.contains(&tt) {
879 self.tokens.push(tt);
880 }
881 self.maybe_empty = false;
882 }
883
884 fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
886 if !self.tokens.contains(&tt) {
887 self.tokens.push(tt);
888 }
889 }
890
891 fn add_all(&mut self, other: &Self) {
899 for tt in &other.tokens {
900 if !self.tokens.contains(tt) {
901 self.tokens.push(tt.clone());
902 }
903 }
904 if !other.maybe_empty {
905 self.maybe_empty = false;
906 }
907 }
908}
909
910fn check_matcher_core<'tt>(
922 sess: &Session,
923 node_id: NodeId,
924 first_sets: &FirstSets<'tt>,
925 matcher: &'tt [mbe::TokenTree],
926 follow: &TokenSet<'tt>,
927) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
928 use mbe::TokenTree;
929
930 let mut last = TokenSet::empty();
931
932 let mut errored = Ok(());
933
934 'each_token: for i in 0..matcher.len() {
938 let token = &matcher[i];
939 let suffix = &matcher[i + 1..];
940
941 let build_suffix_first = || {
942 let mut s = first_sets.first(suffix);
943 if s.maybe_empty {
944 s.add_all(follow);
945 }
946 s
947 };
948
949 let suffix_first;
953
954 match token {
957 TokenTree::Token(..)
958 | TokenTree::MetaVar(..)
959 | TokenTree::MetaVarDecl { .. }
960 | TokenTree::MetaVarExpr(..) => {
961 if token_can_be_followed_by_any(token) {
962 last.replace_with_irrelevant();
964 continue 'each_token;
967 } else {
968 last.replace_with(TtHandle::TtRef(token));
969 suffix_first = build_suffix_first();
970 }
971 }
972 TokenTree::Delimited(span, _, d) => {
973 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
974 d.delim.as_close_token_kind(),
975 span.close,
976 ));
977 check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
978 last.replace_with_irrelevant();
980
981 continue 'each_token;
984 }
985 TokenTree::Sequence(_, seq_rep) => {
986 suffix_first = build_suffix_first();
987 let mut new;
998 let my_suffix = if let Some(sep) = &seq_rep.separator {
999 new = suffix_first.clone();
1000 new.add_one_maybe(TtHandle::from_token(*sep));
1001 &new
1002 } else {
1003 &suffix_first
1004 };
1005
1006 let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1010 if next.maybe_empty {
1011 last.add_all(&next);
1012 } else {
1013 last = next;
1014 }
1015
1016 continue 'each_token;
1019 }
1020 }
1021
1022 for tt in &last.tokens {
1027 if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1028 for next_token in &suffix_first.tokens {
1029 let next_token = next_token.get();
1030
1031 if node_id != DUMMY_NODE_ID
1040 && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1041 && matches!(
1042 next_token,
1043 TokenTree::Token(token) if *token == token::Or
1044 )
1045 {
1046 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1048 span,
1049 name,
1050 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1051 });
1052 sess.psess.buffer_lint(
1053 RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1054 span,
1055 ast::CRATE_NODE_ID,
1056 BuiltinLintDiag::OrPatternsBackCompat(span, suggestion),
1057 );
1058 }
1059 match is_in_follow(next_token, kind) {
1060 IsInFollow::Yes => {}
1061 IsInFollow::No(possible) => {
1062 let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1063 {
1064 "is"
1065 } else {
1066 "may be"
1067 };
1068
1069 let sp = next_token.span();
1070 let mut err = sess.dcx().struct_span_err(
1071 sp,
1072 format!(
1073 "`${name}:{frag}` {may_be} followed by `{next}`, which \
1074 is not allowed for `{frag}` fragments",
1075 name = name,
1076 frag = kind,
1077 next = quoted_tt_to_string(next_token),
1078 may_be = may_be
1079 ),
1080 );
1081 err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1082
1083 if kind == NonterminalKind::Pat(PatWithOr)
1084 && sess.psess.edition.at_least_rust_2021()
1085 && next_token.is_token(&token::Or)
1086 {
1087 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1088 span,
1089 name,
1090 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1091 });
1092 err.span_suggestion(
1093 span,
1094 "try a `pat_param` fragment specifier instead",
1095 suggestion,
1096 Applicability::MaybeIncorrect,
1097 );
1098 }
1099
1100 let msg = "allowed there are: ";
1101 match possible {
1102 &[] => {}
1103 &[t] => {
1104 err.note(format!(
1105 "only {t} is allowed after `{kind}` fragments",
1106 ));
1107 }
1108 ts => {
1109 err.note(format!(
1110 "{}{} or {}",
1111 msg,
1112 ts[..ts.len() - 1].to_vec().join(", "),
1113 ts[ts.len() - 1],
1114 ));
1115 }
1116 }
1117 errored = Err(err.emit());
1118 }
1119 }
1120 }
1121 }
1122 }
1123 }
1124 errored?;
1125 Ok(last)
1126}
1127
1128fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1129 if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1130 frag_can_be_followed_by_any(kind)
1131 } else {
1132 true
1134 }
1135}
1136
1137fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1146 matches!(
1147 kind,
1148 NonterminalKind::Item | NonterminalKind::Block | NonterminalKind::Ident | NonterminalKind::Literal | NonterminalKind::Meta | NonterminalKind::Lifetime | NonterminalKind::TT )
1156}
1157
1158enum IsInFollow {
1159 Yes,
1160 No(&'static [&'static str]),
1161}
1162
1163fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1172 use mbe::TokenTree;
1173
1174 if let TokenTree::Token(Token { kind, .. }) = tok
1175 && kind.close_delim().is_some()
1176 {
1177 IsInFollow::Yes
1180 } else {
1181 match kind {
1182 NonterminalKind::Item => {
1183 IsInFollow::Yes
1186 }
1187 NonterminalKind::Block => {
1188 IsInFollow::Yes
1191 }
1192 NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1193 const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1194 match tok {
1195 TokenTree::Token(token) => match token.kind {
1196 FatArrow | Comma | Semi => IsInFollow::Yes,
1197 _ => IsInFollow::No(TOKENS),
1198 },
1199 _ => IsInFollow::No(TOKENS),
1200 }
1201 }
1202 NonterminalKind::Pat(PatParam { .. }) => {
1203 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1204 match tok {
1205 TokenTree::Token(token) => match token.kind {
1206 FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1207 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1208 IsInFollow::Yes
1209 }
1210 _ => IsInFollow::No(TOKENS),
1211 },
1212 _ => IsInFollow::No(TOKENS),
1213 }
1214 }
1215 NonterminalKind::Pat(PatWithOr) => {
1216 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1217 match tok {
1218 TokenTree::Token(token) => match token.kind {
1219 FatArrow | Comma | Eq => IsInFollow::Yes,
1220 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1221 IsInFollow::Yes
1222 }
1223 _ => IsInFollow::No(TOKENS),
1224 },
1225 _ => IsInFollow::No(TOKENS),
1226 }
1227 }
1228 NonterminalKind::Path | NonterminalKind::Ty => {
1229 const TOKENS: &[&str] = &[
1230 "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1231 "`where`",
1232 ];
1233 match tok {
1234 TokenTree::Token(token) => match token.kind {
1235 OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1236 | Semi | Or => IsInFollow::Yes,
1237 Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1238 IsInFollow::Yes
1239 }
1240 _ => IsInFollow::No(TOKENS),
1241 },
1242 TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1243 _ => IsInFollow::No(TOKENS),
1244 }
1245 }
1246 NonterminalKind::Ident | NonterminalKind::Lifetime => {
1247 IsInFollow::Yes
1249 }
1250 NonterminalKind::Literal => {
1251 IsInFollow::Yes
1253 }
1254 NonterminalKind::Meta | NonterminalKind::TT => {
1255 IsInFollow::Yes
1258 }
1259 NonterminalKind::Vis => {
1260 const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1262 match tok {
1263 TokenTree::Token(token) => match token.kind {
1264 Comma => IsInFollow::Yes,
1265 Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1266 Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1267 _ => {
1268 if token.can_begin_type() {
1269 IsInFollow::Yes
1270 } else {
1271 IsInFollow::No(TOKENS)
1272 }
1273 }
1274 },
1275 TokenTree::MetaVarDecl {
1276 kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1277 ..
1278 } => IsInFollow::Yes,
1279 _ => IsInFollow::No(TOKENS),
1280 }
1281 }
1282 }
1283 }
1284}
1285
1286fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1287 match tt {
1288 mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1289 mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1290 mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1291 _ => panic!(
1292 "{}",
1293 "unexpected mbe::TokenTree::{Sequence or Delimited} \
1294 in follow set checker"
1295 ),
1296 }
1297}
1298
1299pub(super) fn parser_from_cx(
1300 psess: &ParseSess,
1301 mut tts: TokenStream,
1302 recovery: Recovery,
1303) -> Parser<'_> {
1304 tts.desugar_doc_comments();
1305 Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1306}