rustc_expand/mbe/
quoted.rs

1use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token};
2use rustc_ast::tokenstream::TokenStreamIter;
3use rustc_ast::{NodeId, tokenstream};
4use rustc_ast_pretty::pprust;
5use rustc_feature::Features;
6use rustc_session::Session;
7use rustc_session::parse::feature_err;
8use rustc_span::edition::Edition;
9use rustc_span::{Ident, Span, kw, sym};
10
11use crate::errors;
12use crate::mbe::macro_parser::count_metavar_decls;
13use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree};
14
15pub(crate) const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
16    `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, \
17    `meta`, `tt`, `item` and `vis`, along with `expr_2021` and `pat_param` for edition compatibility";
18
19/// Which part of a macro rule we're parsing
20#[derive(Copy, Clone)]
21pub(crate) enum RulePart {
22    /// The left-hand side, with patterns and metavar definitions with types
23    Pattern,
24    /// The right-hand side body, with metavar references and metavar expressions
25    Body,
26}
27
28impl RulePart {
29    #[inline(always)]
30    fn is_pattern(&self) -> bool {
31        matches!(self, Self::Pattern)
32    }
33
34    #[inline(always)]
35    fn is_body(&self) -> bool {
36        matches!(self, Self::Body)
37    }
38}
39
40/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
41/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
42/// collection of `TokenTree` for use in parsing a macro.
43///
44/// # Parameters
45///
46/// - `input`: a token stream to read from, the contents of which we are parsing.
47/// - `part`: whether we're parsing the patterns or the body of a macro. Both take roughly the same
48///   form _except_ that:
49///   - In a pattern, metavars are declared with their "matcher" type. For example `$var:expr` or
50///     `$id:ident`. In this example, `expr` and `ident` are "matchers". They are not present in the
51///     body of a macro rule -- just in the pattern.
52///   - Metavariable expressions are only valid in the "body", not the "pattern".
53/// - `sess`: the parsing session. Any errors will be emitted to this session.
54/// - `node_id`: the NodeId of the macro we are parsing.
55/// - `features`: language features so we can do feature gating.
56///
57/// # Returns
58///
59/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
60fn parse(
61    input: &tokenstream::TokenStream,
62    part: RulePart,
63    sess: &Session,
64    node_id: NodeId,
65    features: &Features,
66    edition: Edition,
67) -> Vec<TokenTree> {
68    // Will contain the final collection of `self::TokenTree`
69    let mut result = Vec::new();
70
71    // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
72    // additional trees if need be.
73    let mut iter = input.iter();
74    while let Some(tree) = iter.next() {
75        // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
76        // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
77        let tree = parse_tree(tree, &mut iter, part, sess, node_id, features, edition);
78
79        if part.is_body() {
80            // No matchers allowed, nothing to process here
81            result.push(tree);
82            continue;
83        }
84
85        let TokenTree::MetaVar(start_sp, ident) = tree else {
86            // Not a metavariable, just return the tree
87            result.push(tree);
88            continue;
89        };
90
91        // Push a metavariable with no fragment specifier at the given span
92        let mut missing_fragment_specifier = |span| {
93            sess.dcx().emit_err(errors::MissingFragmentSpecifier {
94                span,
95                add_span: span.shrink_to_hi(),
96                valid: VALID_FRAGMENT_NAMES_MSG,
97            });
98
99            // Fall back to a `TokenTree` since that will match anything if we continue expanding.
100            result.push(TokenTree::MetaVarDecl { span, name: ident, kind: NonterminalKind::TT });
101        };
102
103        // Not consuming the next token immediately, as it may not be a colon
104        if let Some(peek) = iter.peek()
105            && let tokenstream::TokenTree::Token(token, _spacing) = peek
106            && let Token { kind: token::Colon, span: colon_span } = token
107        {
108            // Next token is a colon; consume it
109            iter.next();
110
111            // It's ok to consume the next tree no matter how,
112            // since if it's not a token then it will be an invalid declaration.
113            let Some(tokenstream::TokenTree::Token(token, _)) = iter.next() else {
114                // Invalid, return a nice source location as `var:`
115                missing_fragment_specifier(colon_span.with_lo(start_sp.lo()));
116                continue;
117            };
118
119            let Some((fragment, _)) = token.ident() else {
120                // No identifier for the fragment specifier;
121                missing_fragment_specifier(token.span);
122                continue;
123            };
124
125            let span = token.span.with_lo(start_sp.lo());
126            let edition = || {
127                // FIXME(#85708) - once we properly decode a foreign
128                // crate's `SyntaxContext::root`, then we can replace
129                // this with just `span.edition()`. A
130                // `SyntaxContext::root()` from the current crate will
131                // have the edition of the current crate, and a
132                // `SyntaxContext::root()` from a foreign crate will
133                // have the edition of that crate (which we manually
134                // retrieve via the `edition` parameter).
135                if !span.from_expansion() { edition } else { span.edition() }
136            };
137            let kind = NonterminalKind::from_symbol(fragment.name, edition).unwrap_or_else(|| {
138                sess.dcx().emit_err(errors::InvalidFragmentSpecifier {
139                    span,
140                    fragment,
141                    help: VALID_FRAGMENT_NAMES_MSG,
142                });
143                NonterminalKind::TT
144            });
145            result.push(TokenTree::MetaVarDecl { span, name: ident, kind });
146        } else {
147            // Whether it's none or some other tree, it doesn't belong to
148            // the current meta variable, returning the original span.
149            missing_fragment_specifier(start_sp);
150        }
151    }
152    result
153}
154
155/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Like `parse`, but for a
156/// single token tree. Emits errors to `sess` if needed.
157#[inline]
158pub(super) fn parse_one_tt(
159    input: tokenstream::TokenTree,
160    part: RulePart,
161    sess: &Session,
162    node_id: NodeId,
163    features: &Features,
164    edition: Edition,
165) -> TokenTree {
166    parse(&tokenstream::TokenStream::new(vec![input]), part, sess, node_id, features, edition)
167        .pop()
168        .unwrap()
169}
170
171/// Asks for the `macro_metavar_expr` feature if it is not enabled
172fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &Session, span: Span) {
173    if !features.macro_metavar_expr() {
174        let msg = "meta-variable expressions are unstable";
175        feature_err(sess, sym::macro_metavar_expr, span, msg).emit();
176    }
177}
178
179fn maybe_emit_macro_metavar_expr_concat_feature(features: &Features, sess: &Session, span: Span) {
180    if !features.macro_metavar_expr_concat() {
181        let msg = "the `concat` meta-variable expression is unstable";
182        feature_err(sess, sym::macro_metavar_expr_concat, span, msg).emit();
183    }
184}
185
186/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
187/// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
188/// for use in parsing a macro.
189///
190/// Converting the given tree may involve reading more tokens.
191///
192/// # Parameters
193///
194/// - `tree`: the tree we wish to convert.
195/// - `outer_iter`: an iterator over trees. We may need to read more tokens from it in order to finish
196///   converting `tree`
197/// - `part`: same as [parse].
198/// - `sess`: the parsing session. Any errors will be emitted to this session.
199/// - `features`: language features so we can do feature gating.
200fn parse_tree<'a>(
201    tree: &'a tokenstream::TokenTree,
202    outer_iter: &mut TokenStreamIter<'a>,
203    part: RulePart,
204    sess: &Session,
205    node_id: NodeId,
206    features: &Features,
207    edition: Edition,
208) -> TokenTree {
209    // Depending on what `tree` is, we could be parsing different parts of a macro
210    match tree {
211        // `tree` is a `$` token. Look at the next token in `trees`
212        &tokenstream::TokenTree::Token(Token { kind: token::Dollar, span: dollar_span }, _) => {
213            // FIXME: Handle `Invisible`-delimited groups in a more systematic way
214            // during parsing.
215            let mut next = outer_iter.next();
216            let mut iter_storage;
217            let mut iter: &mut TokenStreamIter<'_> = match next {
218                Some(tokenstream::TokenTree::Delimited(.., delim, tts)) if delim.skip() => {
219                    iter_storage = tts.iter();
220                    next = iter_storage.next();
221                    &mut iter_storage
222                }
223                _ => outer_iter,
224            };
225
226            match next {
227                // `tree` is followed by a delimited set of token trees.
228                Some(&tokenstream::TokenTree::Delimited(delim_span, _, delim, ref tts)) => {
229                    if part.is_pattern() {
230                        if delim != Delimiter::Parenthesis {
231                            span_dollar_dollar_or_metavar_in_the_lhs_err(
232                                sess,
233                                &Token {
234                                    kind: delim.as_open_token_kind(),
235                                    span: delim_span.entire(),
236                                },
237                            );
238                        }
239                    } else {
240                        match delim {
241                            Delimiter::Brace => {
242                                // The delimiter is `{`. This indicates the beginning
243                                // of a meta-variable expression (e.g. `${count(ident)}`).
244                                // Try to parse the meta-variable expression.
245                                match MetaVarExpr::parse(tts, delim_span.entire(), &sess.psess) {
246                                    Err(err) => {
247                                        err.emit();
248                                        // Returns early the same read `$` to avoid spanning
249                                        // unrelated diagnostics that could be performed afterwards
250                                        return TokenTree::token(token::Dollar, dollar_span);
251                                    }
252                                    Ok(elem) => {
253                                        if let MetaVarExpr::Concat(_) = elem {
254                                            maybe_emit_macro_metavar_expr_concat_feature(
255                                                features,
256                                                sess,
257                                                delim_span.entire(),
258                                            );
259                                        } else {
260                                            maybe_emit_macro_metavar_expr_feature(
261                                                features,
262                                                sess,
263                                                delim_span.entire(),
264                                            );
265                                        }
266                                        return TokenTree::MetaVarExpr(delim_span, elem);
267                                    }
268                                }
269                            }
270                            Delimiter::Parenthesis => {}
271                            _ => {
272                                let token =
273                                    pprust::token_kind_to_string(&delim.as_open_token_kind());
274                                sess.dcx().emit_err(errors::ExpectedParenOrBrace {
275                                    span: delim_span.entire(),
276                                    token,
277                                });
278                            }
279                        }
280                    }
281                    // If we didn't find a metavar expression above, then we must have a
282                    // repetition sequence in the macro (e.g. `$(pat)*`). Parse the
283                    // contents of the sequence itself
284                    let sequence = parse(tts, part, sess, node_id, features, edition);
285                    // Get the Kleene operator and optional separator
286                    let (separator, kleene) =
287                        parse_sep_and_kleene_op(&mut iter, delim_span.entire(), sess);
288                    // Count the number of captured "names" (i.e., named metavars)
289                    let num_captures =
290                        if part.is_pattern() { count_metavar_decls(&sequence) } else { 0 };
291                    TokenTree::Sequence(
292                        delim_span,
293                        SequenceRepetition { tts: sequence, separator, kleene, num_captures },
294                    )
295                }
296
297                // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate`
298                // special metavariable that names the crate of the invocation.
299                Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => {
300                    let (ident, is_raw) = token.ident().unwrap();
301                    let span = ident.span.with_lo(dollar_span.lo());
302                    if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) {
303                        TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
304                    } else {
305                        TokenTree::MetaVar(span, ident)
306                    }
307                }
308
309                // `tree` is followed by another `$`. This is an escaped `$`.
310                Some(&tokenstream::TokenTree::Token(
311                    Token { kind: token::Dollar, span: dollar_span2 },
312                    _,
313                )) => {
314                    if part.is_pattern() {
315                        span_dollar_dollar_or_metavar_in_the_lhs_err(
316                            sess,
317                            &Token { kind: token::Dollar, span: dollar_span2 },
318                        );
319                    } else {
320                        maybe_emit_macro_metavar_expr_feature(features, sess, dollar_span2);
321                    }
322                    TokenTree::token(token::Dollar, dollar_span2)
323                }
324
325                // `tree` is followed by some other token. This is an error.
326                Some(tokenstream::TokenTree::Token(token, _)) => {
327                    let msg =
328                        format!("expected identifier, found `{}`", pprust::token_to_string(token),);
329                    sess.dcx().span_err(token.span, msg);
330                    TokenTree::MetaVar(token.span, Ident::dummy())
331                }
332
333                // There are no more tokens. Just return the `$` we already have.
334                None => TokenTree::token(token::Dollar, dollar_span),
335            }
336        }
337
338        // `tree` is an arbitrary token. Keep it.
339        tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token),
340
341        // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
342        // descend into the delimited set and further parse it.
343        &tokenstream::TokenTree::Delimited(span, spacing, delim, ref tts) => TokenTree::Delimited(
344            span,
345            spacing,
346            Delimited { delim, tts: parse(tts, part, sess, node_id, features, edition) },
347        ),
348    }
349}
350
351/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
352/// `None`.
353fn kleene_op(token: &Token) -> Option<KleeneOp> {
354    match token.kind {
355        token::Star => Some(KleeneOp::ZeroOrMore),
356        token::Plus => Some(KleeneOp::OneOrMore),
357        token::Question => Some(KleeneOp::ZeroOrOne),
358        _ => None,
359    }
360}
361
362/// Parse the next token tree of the input looking for a KleeneOp. Returns
363///
364/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
365/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
366/// - Err(span) if the next token tree is not a token
367fn parse_kleene_op(
368    iter: &mut TokenStreamIter<'_>,
369    span: Span,
370) -> Result<Result<(KleeneOp, Span), Token>, Span> {
371    match iter.next() {
372        Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
373            Some(op) => Ok(Ok((op, token.span))),
374            None => Ok(Err(*token)),
375        },
376        tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
377    }
378}
379
380/// Attempt to parse a single Kleene star, possibly with a separator.
381///
382/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
383/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
384/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
385/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
386/// stream of tokens in an invocation of a macro.
387///
388/// This function will take some input iterator `iter` corresponding to `span` and a parsing
389/// session `sess`. If the next one (or possibly two) tokens in `iter` correspond to a Kleene
390/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
391/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
392fn parse_sep_and_kleene_op(
393    iter: &mut TokenStreamIter<'_>,
394    span: Span,
395    sess: &Session,
396) -> (Option<Token>, KleeneToken) {
397    // We basically look at two token trees here, denoted as #1 and #2 below
398    let span = match parse_kleene_op(iter, span) {
399        // #1 is a `?`, `+`, or `*` KleeneOp
400        Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
401
402        // #1 is a separator followed by #2, a KleeneOp
403        Ok(Err(token)) => match parse_kleene_op(iter, token.span) {
404            // #2 is the `?` Kleene op, which does not take a separator (error)
405            Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
406                // Error!
407                sess.dcx().span_err(
408                    token.span,
409                    "the `?` macro repetition operator does not take a separator",
410                );
411
412                // Return a dummy
413                return (None, KleeneToken::new(KleeneOp::ZeroOrMore, span));
414            }
415
416            // #2 is a KleeneOp :D
417            Ok(Ok((op, span))) => return (Some(token), KleeneToken::new(op, span)),
418
419            // #2 is a random token or not a token at all :(
420            Ok(Err(Token { span, .. })) | Err(span) => span,
421        },
422
423        // #1 is not a token
424        Err(span) => span,
425    };
426
427    // If we ever get to this point, we have experienced an "unexpected token" error
428    sess.dcx().span_err(span, "expected one of: `*`, `+`, or `?`");
429
430    // Return a dummy
431    (None, KleeneToken::new(KleeneOp::ZeroOrMore, span))
432}
433
434// `$$` or a meta-variable is the lhs of a macro but shouldn't.
435//
436// For example, `macro_rules! foo { ( ${len()} ) => {} }`
437fn span_dollar_dollar_or_metavar_in_the_lhs_err(sess: &Session, token: &Token) {
438    sess.dcx()
439        .span_err(token.span, format!("unexpected token: {}", pprust::token_to_string(token)));
440    sess.dcx().span_note(
441        token.span,
442        "`$$` and meta-variable expressions are not allowed inside macro parameter definitions",
443    );
444}