rustc_expand/
proc_macro_server.rs

1use std::ops::{Bound, Range};
2
3use ast::token::IdentIsRaw;
4use pm::bridge::{
5    DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree, server,
6};
7use pm::{Delimiter, Level};
8use rustc_ast as ast;
9use rustc_ast::token;
10use rustc_ast::tokenstream::{self, DelimSpacing, Spacing, TokenStream};
11use rustc_ast::util::literal::escape_byte_str_symbol;
12use rustc_ast_pretty::pprust;
13use rustc_data_structures::fx::FxHashMap;
14use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
15use rustc_parse::lexer::nfc_normalize;
16use rustc_parse::parser::Parser;
17use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
18use rustc_session::parse::ParseSess;
19use rustc_span::def_id::CrateNum;
20use rustc_span::{BytePos, FileName, Pos, Span, Symbol, sym};
21use smallvec::{SmallVec, smallvec};
22
23use crate::base::ExtCtxt;
24
25trait FromInternal<T> {
26    fn from_internal(x: T) -> Self;
27}
28
29trait ToInternal<T> {
30    fn to_internal(self) -> T;
31}
32
33impl FromInternal<token::Delimiter> for Delimiter {
34    fn from_internal(delim: token::Delimiter) -> Delimiter {
35        match delim {
36            token::Delimiter::Parenthesis => Delimiter::Parenthesis,
37            token::Delimiter::Brace => Delimiter::Brace,
38            token::Delimiter::Bracket => Delimiter::Bracket,
39            token::Delimiter::Invisible(_) => Delimiter::None,
40        }
41    }
42}
43
44impl ToInternal<token::Delimiter> for Delimiter {
45    fn to_internal(self) -> token::Delimiter {
46        match self {
47            Delimiter::Parenthesis => token::Delimiter::Parenthesis,
48            Delimiter::Brace => token::Delimiter::Brace,
49            Delimiter::Bracket => token::Delimiter::Bracket,
50            Delimiter::None => token::Delimiter::Invisible(token::InvisibleOrigin::ProcMacro),
51        }
52    }
53}
54
55impl FromInternal<token::LitKind> for LitKind {
56    fn from_internal(kind: token::LitKind) -> Self {
57        match kind {
58            token::Byte => LitKind::Byte,
59            token::Char => LitKind::Char,
60            token::Integer => LitKind::Integer,
61            token::Float => LitKind::Float,
62            token::Str => LitKind::Str,
63            token::StrRaw(n) => LitKind::StrRaw(n),
64            token::ByteStr => LitKind::ByteStr,
65            token::ByteStrRaw(n) => LitKind::ByteStrRaw(n),
66            token::CStr => LitKind::CStr,
67            token::CStrRaw(n) => LitKind::CStrRaw(n),
68            token::Err(_guar) => {
69                // This is the only place a `pm::bridge::LitKind::ErrWithGuar`
70                // is constructed. Note that an `ErrorGuaranteed` is available,
71                // as required. See the comment in `to_internal`.
72                LitKind::ErrWithGuar
73            }
74            token::Bool => unreachable!(),
75        }
76    }
77}
78
79impl ToInternal<token::LitKind> for LitKind {
80    fn to_internal(self) -> token::LitKind {
81        match self {
82            LitKind::Byte => token::Byte,
83            LitKind::Char => token::Char,
84            LitKind::Integer => token::Integer,
85            LitKind::Float => token::Float,
86            LitKind::Str => token::Str,
87            LitKind::StrRaw(n) => token::StrRaw(n),
88            LitKind::ByteStr => token::ByteStr,
89            LitKind::ByteStrRaw(n) => token::ByteStrRaw(n),
90            LitKind::CStr => token::CStr,
91            LitKind::CStrRaw(n) => token::CStrRaw(n),
92            LitKind::ErrWithGuar => {
93                // This is annoying but valid. `LitKind::ErrWithGuar` would
94                // have an `ErrorGuaranteed` except that type isn't available
95                // in that crate. So we have to fake one. And we don't want to
96                // use a delayed bug because there might be lots of these,
97                // which would be expensive.
98                #[allow(deprecated)]
99                let guar = ErrorGuaranteed::unchecked_error_guaranteed();
100                token::Err(guar)
101            }
102        }
103    }
104}
105
106impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStream, Span, Symbol>> {
107    fn from_internal((stream, rustc): (TokenStream, &mut Rustc<'_, '_>)) -> Self {
108        use rustc_ast::token::*;
109
110        // Estimate the capacity as `stream.len()` rounded up to the next power
111        // of two to limit the number of required reallocations.
112        let mut trees = Vec::with_capacity(stream.len().next_power_of_two());
113        let mut iter = stream.iter();
114
115        while let Some(tree) = iter.next() {
116            let (Token { kind, span }, joint) = match tree.clone() {
117                tokenstream::TokenTree::Delimited(span, _, mut delim, mut stream) => {
118                    // We used to have an alternative behaviour for crates that
119                    // needed it: a hack used to pass AST fragments to
120                    // attribute and derive macros as a single nonterminal
121                    // token instead of a token stream. Such token needs to be
122                    // "unwrapped" and not represented as a delimited group. We
123                    // had a lint for a long time, but now we just emit a hard
124                    // error. Eventually we might remove the special case hard
125                    // error check altogether. See #73345.
126                    if let Delimiter::Invisible(InvisibleOrigin::MetaVar(kind)) = delim {
127                        crate::base::stream_pretty_printing_compatibility_hack(
128                            kind,
129                            &stream,
130                            rustc.psess(),
131                        );
132                    }
133
134                    // In `mk_delimited` we avoid nesting invisible delimited
135                    // of the same `MetaVarKind`. Here we do the same but
136                    // ignore the `MetaVarKind` because it is discarded when we
137                    // convert it to a `Group`.
138                    while let Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) = delim {
139                        if stream.len() == 1
140                            && let tree = stream.iter().next().unwrap()
141                            && let tokenstream::TokenTree::Delimited(_, _, delim2, stream2) = tree
142                            && let Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) = delim2
143                        {
144                            delim = *delim2;
145                            stream = stream2.clone();
146                        } else {
147                            break;
148                        }
149                    }
150
151                    trees.push(TokenTree::Group(Group {
152                        delimiter: pm::Delimiter::from_internal(delim),
153                        stream: Some(stream),
154                        span: DelimSpan {
155                            open: span.open,
156                            close: span.close,
157                            entire: span.entire(),
158                        },
159                    }));
160                    continue;
161                }
162                tokenstream::TokenTree::Token(token, spacing) => {
163                    // Do not be tempted to check here that the `spacing`
164                    // values are "correct" w.r.t. the token stream (e.g. that
165                    // `Spacing::Joint` is actually followed by a `Punct` token
166                    // tree). Because the problem in #76399 was introduced that
167                    // way.
168                    //
169                    // This is where the `Hidden` in `JointHidden` applies,
170                    // because the jointness is effectively hidden from proc
171                    // macros.
172                    let joint = match spacing {
173                        Spacing::Alone | Spacing::JointHidden => false,
174                        Spacing::Joint => true,
175                    };
176                    (token, joint)
177                }
178            };
179
180            // Split the operator into one or more `Punct`s, one per character.
181            // The final one inherits the jointness of the original token. Any
182            // before that get `joint = true`.
183            let mut op = |s: &str| {
184                assert!(s.is_ascii());
185                trees.extend(s.bytes().enumerate().map(|(i, ch)| {
186                    let is_final = i == s.len() - 1;
187                    // Split the token span into single chars. Unless the span
188                    // is an unusual one, e.g. due to proc macro expansion. We
189                    // determine this by assuming any span with a length that
190                    // matches the operator length is a normal one, and any
191                    // span with a different length is an unusual one.
192                    let span = if (span.hi() - span.lo()).to_usize() == s.len() {
193                        let lo = span.lo() + BytePos::from_usize(i);
194                        let hi = lo + BytePos::from_usize(1);
195                        span.with_lo(lo).with_hi(hi)
196                    } else {
197                        span
198                    };
199                    let joint = if is_final { joint } else { true };
200                    TokenTree::Punct(Punct { ch, joint, span })
201                }));
202            };
203
204            match kind {
205                Eq => op("="),
206                Lt => op("<"),
207                Le => op("<="),
208                EqEq => op("=="),
209                Ne => op("!="),
210                Ge => op(">="),
211                Gt => op(">"),
212                AndAnd => op("&&"),
213                OrOr => op("||"),
214                Bang => op("!"),
215                Tilde => op("~"),
216                Plus => op("+"),
217                Minus => op("-"),
218                Star => op("*"),
219                Slash => op("/"),
220                Percent => op("%"),
221                Caret => op("^"),
222                And => op("&"),
223                Or => op("|"),
224                Shl => op("<<"),
225                Shr => op(">>"),
226                PlusEq => op("+="),
227                MinusEq => op("-="),
228                StarEq => op("*="),
229                SlashEq => op("/="),
230                PercentEq => op("%="),
231                CaretEq => op("^="),
232                AndEq => op("&="),
233                OrEq => op("|="),
234                ShlEq => op("<<="),
235                ShrEq => op(">>="),
236                At => op("@"),
237                Dot => op("."),
238                DotDot => op(".."),
239                DotDotDot => op("..."),
240                DotDotEq => op("..="),
241                Comma => op(","),
242                Semi => op(";"),
243                Colon => op(":"),
244                PathSep => op("::"),
245                RArrow => op("->"),
246                LArrow => op("<-"),
247                FatArrow => op("=>"),
248                Pound => op("#"),
249                Dollar => op("$"),
250                Question => op("?"),
251                SingleQuote => op("'"),
252
253                Ident(sym, is_raw) => {
254                    trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span }))
255                }
256                NtIdent(ident, is_raw) => trees.push(TokenTree::Ident(Ident {
257                    sym: ident.name,
258                    is_raw: is_raw.into(),
259                    span: ident.span,
260                })),
261
262                Lifetime(name, is_raw) => {
263                    let ident = rustc_span::Ident::new(name, span).without_first_quote();
264                    trees.extend([
265                        TokenTree::Punct(Punct { ch: b'\'', joint: true, span }),
266                        TokenTree::Ident(Ident { sym: ident.name, is_raw: is_raw.into(), span }),
267                    ]);
268                }
269                NtLifetime(ident, is_raw) => {
270                    let stream =
271                        TokenStream::token_alone(token::Lifetime(ident.name, is_raw), ident.span);
272                    trees.push(TokenTree::Group(Group {
273                        delimiter: pm::Delimiter::None,
274                        stream: Some(stream),
275                        span: DelimSpan::from_single(span),
276                    }))
277                }
278
279                Literal(token::Lit { kind, symbol, suffix }) => {
280                    trees.push(TokenTree::Literal(self::Literal {
281                        kind: FromInternal::from_internal(kind),
282                        symbol,
283                        suffix,
284                        span,
285                    }));
286                }
287                DocComment(_, attr_style, data) => {
288                    let mut escaped = String::new();
289                    for ch in data.as_str().chars() {
290                        escaped.extend(ch.escape_debug());
291                    }
292                    let stream = [
293                        Ident(sym::doc, IdentIsRaw::No),
294                        Eq,
295                        TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
296                    ]
297                    .into_iter()
298                    .map(|kind| tokenstream::TokenTree::token_alone(kind, span))
299                    .collect();
300                    trees.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
301                    if attr_style == ast::AttrStyle::Inner {
302                        trees.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span }));
303                    }
304                    trees.push(TokenTree::Group(Group {
305                        delimiter: pm::Delimiter::Bracket,
306                        stream: Some(stream),
307                        span: DelimSpan::from_single(span),
308                    }));
309                }
310
311                OpenParen | CloseParen | OpenBrace | CloseBrace | OpenBracket | CloseBracket
312                | OpenInvisible(_) | CloseInvisible(_) | Eof => unreachable!(),
313            }
314        }
315        trees
316    }
317}
318
319// We use a `SmallVec` because the output size is always one or two `TokenTree`s.
320impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
321    for (TokenTree<TokenStream, Span, Symbol>, &mut Rustc<'_, '_>)
322{
323    fn to_internal(self) -> SmallVec<[tokenstream::TokenTree; 2]> {
324        use rustc_ast::token::*;
325
326        // The code below is conservative, using `token_alone`/`Spacing::Alone`
327        // in most places. It's hard in general to do better when working at
328        // the token level. When the resulting code is pretty-printed by
329        // `print_tts` the `space_between` function helps avoid a lot of
330        // unnecessary whitespace, so the results aren't too bad.
331        let (tree, rustc) = self;
332        match tree {
333            TokenTree::Punct(Punct { ch, joint, span }) => {
334                let kind = match ch {
335                    b'=' => Eq,
336                    b'<' => Lt,
337                    b'>' => Gt,
338                    b'!' => Bang,
339                    b'~' => Tilde,
340                    b'+' => Plus,
341                    b'-' => Minus,
342                    b'*' => Star,
343                    b'/' => Slash,
344                    b'%' => Percent,
345                    b'^' => Caret,
346                    b'&' => And,
347                    b'|' => Or,
348                    b'@' => At,
349                    b'.' => Dot,
350                    b',' => Comma,
351                    b';' => Semi,
352                    b':' => Colon,
353                    b'#' => Pound,
354                    b'$' => Dollar,
355                    b'?' => Question,
356                    b'\'' => SingleQuote,
357                    _ => unreachable!(),
358                };
359                // We never produce `token::Spacing::JointHidden` here, which
360                // means the pretty-printing of code produced by proc macros is
361                // ugly, with lots of whitespace between tokens. This is
362                // unavoidable because `proc_macro::Spacing` only applies to
363                // `Punct` token trees.
364                smallvec![if joint {
365                    tokenstream::TokenTree::token_joint(kind, span)
366                } else {
367                    tokenstream::TokenTree::token_alone(kind, span)
368                }]
369            }
370            TokenTree::Group(Group { delimiter, stream, span: DelimSpan { open, close, .. } }) => {
371                smallvec![tokenstream::TokenTree::Delimited(
372                    tokenstream::DelimSpan { open, close },
373                    DelimSpacing::new(Spacing::Alone, Spacing::Alone),
374                    delimiter.to_internal(),
375                    stream.unwrap_or_default(),
376                )]
377            }
378            TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
379                rustc.psess().symbol_gallery.insert(sym, span);
380                smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw.into()), span)]
381            }
382            TokenTree::Literal(self::Literal {
383                kind: self::LitKind::Integer,
384                symbol,
385                suffix,
386                span,
387            }) if let Some(symbol) = symbol.as_str().strip_prefix('-') => {
388                let symbol = Symbol::intern(symbol);
389                let integer = TokenKind::lit(token::Integer, symbol, suffix);
390                let a = tokenstream::TokenTree::token_joint_hidden(Minus, span);
391                let b = tokenstream::TokenTree::token_alone(integer, span);
392                smallvec![a, b]
393            }
394            TokenTree::Literal(self::Literal {
395                kind: self::LitKind::Float,
396                symbol,
397                suffix,
398                span,
399            }) if let Some(symbol) = symbol.as_str().strip_prefix('-') => {
400                let symbol = Symbol::intern(symbol);
401                let float = TokenKind::lit(token::Float, symbol, suffix);
402                let a = tokenstream::TokenTree::token_joint_hidden(Minus, span);
403                let b = tokenstream::TokenTree::token_alone(float, span);
404                smallvec![a, b]
405            }
406            TokenTree::Literal(self::Literal { kind, symbol, suffix, span }) => {
407                smallvec![tokenstream::TokenTree::token_alone(
408                    TokenKind::lit(kind.to_internal(), symbol, suffix),
409                    span,
410                )]
411            }
412        }
413    }
414}
415
416impl ToInternal<rustc_errors::Level> for Level {
417    fn to_internal(self) -> rustc_errors::Level {
418        match self {
419            Level::Error => rustc_errors::Level::Error,
420            Level::Warning => rustc_errors::Level::Warning,
421            Level::Note => rustc_errors::Level::Note,
422            Level::Help => rustc_errors::Level::Help,
423            _ => unreachable!("unknown proc_macro::Level variant: {:?}", self),
424        }
425    }
426}
427
428pub(crate) struct FreeFunctions;
429
430pub(crate) struct Rustc<'a, 'b> {
431    ecx: &'a mut ExtCtxt<'b>,
432    def_site: Span,
433    call_site: Span,
434    mixed_site: Span,
435    krate: CrateNum,
436    rebased_spans: FxHashMap<usize, Span>,
437}
438
439impl<'a, 'b> Rustc<'a, 'b> {
440    pub(crate) fn new(ecx: &'a mut ExtCtxt<'b>) -> Self {
441        let expn_data = ecx.current_expansion.id.expn_data();
442        Rustc {
443            def_site: ecx.with_def_site_ctxt(expn_data.def_site),
444            call_site: ecx.with_call_site_ctxt(expn_data.call_site),
445            mixed_site: ecx.with_mixed_site_ctxt(expn_data.call_site),
446            krate: expn_data.macro_def_id.unwrap().krate,
447            rebased_spans: FxHashMap::default(),
448            ecx,
449        }
450    }
451
452    fn psess(&self) -> &ParseSess {
453        self.ecx.psess()
454    }
455}
456
457impl server::Types for Rustc<'_, '_> {
458    type FreeFunctions = FreeFunctions;
459    type TokenStream = TokenStream;
460    type Span = Span;
461    type Symbol = Symbol;
462}
463
464impl server::FreeFunctions for Rustc<'_, '_> {
465    fn injected_env_var(&mut self, var: &str) -> Option<String> {
466        self.ecx.sess.opts.logical_env.get(var).cloned()
467    }
468
469    fn track_env_var(&mut self, var: &str, value: Option<&str>) {
470        self.psess()
471            .env_depinfo
472            .borrow_mut()
473            .insert((Symbol::intern(var), value.map(Symbol::intern)));
474    }
475
476    fn track_path(&mut self, path: &str) {
477        self.psess().file_depinfo.borrow_mut().insert(Symbol::intern(path));
478    }
479
480    fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, ()> {
481        let name = FileName::proc_macro_source_code(s);
482        let mut parser =
483            unwrap_or_emit_fatal(new_parser_from_source_str(self.psess(), name, s.to_owned()));
484
485        let first_span = parser.token.span.data();
486        let minus_present = parser.eat(exp!(Minus));
487
488        let lit_span = parser.token.span.data();
489        let token::Literal(mut lit) = parser.token.kind else {
490            return Err(());
491        };
492
493        // Check no comment or whitespace surrounding the (possibly negative)
494        // literal, or more tokens after it.
495        if (lit_span.hi.0 - first_span.lo.0) as usize != s.len() {
496            return Err(());
497        }
498
499        if minus_present {
500            // If minus is present, check no comment or whitespace in between it
501            // and the literal token.
502            if first_span.hi.0 != lit_span.lo.0 {
503                return Err(());
504            }
505
506            // Check literal is a kind we allow to be negated in a proc macro token.
507            match lit.kind {
508                token::LitKind::Bool
509                | token::LitKind::Byte
510                | token::LitKind::Char
511                | token::LitKind::Str
512                | token::LitKind::StrRaw(_)
513                | token::LitKind::ByteStr
514                | token::LitKind::ByteStrRaw(_)
515                | token::LitKind::CStr
516                | token::LitKind::CStrRaw(_)
517                | token::LitKind::Err(_) => return Err(()),
518                token::LitKind::Integer | token::LitKind::Float => {}
519            }
520
521            // Synthesize a new symbol that includes the minus sign.
522            let symbol = Symbol::intern(&s[..1 + lit.symbol.as_str().len()]);
523            lit = token::Lit::new(lit.kind, symbol, lit.suffix);
524        }
525        let token::Lit { kind, symbol, suffix } = lit;
526        Ok(Literal {
527            kind: FromInternal::from_internal(kind),
528            symbol,
529            suffix,
530            span: self.call_site,
531        })
532    }
533
534    fn emit_diagnostic(&mut self, diagnostic: Diagnostic<Self::Span>) {
535        let message = rustc_errors::DiagMessage::from(diagnostic.message);
536        let mut diag: Diag<'_, ()> =
537            Diag::new(self.psess().dcx(), diagnostic.level.to_internal(), message);
538        diag.span(MultiSpan::from_spans(diagnostic.spans));
539        for child in diagnostic.children {
540            // This message comes from another diagnostic, and we are just reconstructing the
541            // diagnostic, so there's no need for translation.
542            #[allow(rustc::untranslatable_diagnostic)]
543            diag.sub(child.level.to_internal(), child.message, MultiSpan::from_spans(child.spans));
544        }
545        diag.emit();
546    }
547}
548
549impl server::TokenStream for Rustc<'_, '_> {
550    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
551        stream.is_empty()
552    }
553
554    fn from_str(&mut self, src: &str) -> Self::TokenStream {
555        unwrap_or_emit_fatal(source_str_to_stream(
556            self.psess(),
557            FileName::proc_macro_source_code(src),
558            src.to_string(),
559            Some(self.call_site),
560        ))
561    }
562
563    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
564        pprust::tts_to_string(stream)
565    }
566
567    fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
568        // Parse the expression from our tokenstream.
569        let expr: PResult<'_, _> = try {
570            let mut p = Parser::new(self.psess(), stream.clone(), Some("proc_macro expand expr"));
571            let expr = p.parse_expr()?;
572            if p.token != token::Eof {
573                p.unexpected()?;
574            }
575            expr
576        };
577        let expr = expr.map_err(|err| {
578            err.emit();
579        })?;
580
581        // Perform eager expansion on the expression.
582        let expr = self
583            .ecx
584            .expander()
585            .fully_expand_fragment(crate::expand::AstFragment::Expr(expr))
586            .make_expr();
587
588        // NOTE: For now, limit `expand_expr` to exclusively expand to literals.
589        // This may be relaxed in the future.
590        // We don't use `TokenStream::from_ast` as the tokenstream currently cannot
591        // be recovered in the general case.
592        match &expr.kind {
593            ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => {
594                Ok(tokenstream::TokenStream::token_alone(
595                    token::Ident(token_lit.symbol, IdentIsRaw::No),
596                    expr.span,
597                ))
598            }
599            ast::ExprKind::Lit(token_lit) => {
600                Ok(tokenstream::TokenStream::token_alone(token::Literal(*token_lit), expr.span))
601            }
602            ast::ExprKind::IncludedBytes(bytes) => {
603                let lit = token::Lit::new(token::ByteStr, escape_byte_str_symbol(bytes), None);
604                Ok(tokenstream::TokenStream::token_alone(token::TokenKind::Literal(lit), expr.span))
605            }
606            ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
607                ast::ExprKind::Lit(token_lit) => match token_lit {
608                    token::Lit { kind: token::Integer | token::Float, .. } => {
609                        Ok(Self::TokenStream::from_iter([
610                            // FIXME: The span of the `-` token is lost when
611                            // parsing, so we cannot faithfully recover it here.
612                            tokenstream::TokenTree::token_joint_hidden(token::Minus, e.span),
613                            tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span),
614                        ]))
615                    }
616                    _ => Err(()),
617                },
618                _ => Err(()),
619            },
620            _ => Err(()),
621        }
622    }
623
624    fn from_token_tree(
625        &mut self,
626        tree: TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
627    ) -> Self::TokenStream {
628        Self::TokenStream::new((tree, &mut *self).to_internal().into_iter().collect::<Vec<_>>())
629    }
630
631    fn concat_trees(
632        &mut self,
633        base: Option<Self::TokenStream>,
634        trees: Vec<TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
635    ) -> Self::TokenStream {
636        let mut stream = base.unwrap_or_default();
637        for tree in trees {
638            for tt in (tree, &mut *self).to_internal() {
639                stream.push_tree(tt);
640            }
641        }
642        stream
643    }
644
645    fn concat_streams(
646        &mut self,
647        base: Option<Self::TokenStream>,
648        streams: Vec<Self::TokenStream>,
649    ) -> Self::TokenStream {
650        let mut stream = base.unwrap_or_default();
651        for s in streams {
652            stream.push_stream(s);
653        }
654        stream
655    }
656
657    fn into_trees(
658        &mut self,
659        stream: Self::TokenStream,
660    ) -> Vec<TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
661        FromInternal::from_internal((stream, self))
662    }
663}
664
665impl server::Span for Rustc<'_, '_> {
666    fn debug(&mut self, span: Self::Span) -> String {
667        if self.ecx.ecfg.span_debug {
668            format!("{span:?}")
669        } else {
670            format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
671        }
672    }
673
674    fn file(&mut self, span: Self::Span) -> String {
675        self.psess()
676            .source_map()
677            .lookup_char_pos(span.lo())
678            .file
679            .name
680            .prefer_remapped_unconditionaly()
681            .to_string()
682    }
683
684    fn local_file(&mut self, span: Self::Span) -> Option<String> {
685        self.psess()
686            .source_map()
687            .lookup_char_pos(span.lo())
688            .file
689            .name
690            .clone()
691            .into_local_path()
692            .map(|p| {
693                p.to_str()
694                    .expect("non-UTF8 file path in `proc_macro::SourceFile::path`")
695                    .to_string()
696            })
697    }
698
699    fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
700        span.parent_callsite()
701    }
702
703    fn source(&mut self, span: Self::Span) -> Self::Span {
704        span.source_callsite()
705    }
706
707    fn byte_range(&mut self, span: Self::Span) -> Range<usize> {
708        let source_map = self.psess().source_map();
709
710        let relative_start_pos = source_map.lookup_byte_offset(span.lo()).pos;
711        let relative_end_pos = source_map.lookup_byte_offset(span.hi()).pos;
712
713        Range { start: relative_start_pos.0 as usize, end: relative_end_pos.0 as usize }
714    }
715    fn start(&mut self, span: Self::Span) -> Self::Span {
716        span.shrink_to_lo()
717    }
718
719    fn end(&mut self, span: Self::Span) -> Self::Span {
720        span.shrink_to_hi()
721    }
722
723    fn line(&mut self, span: Self::Span) -> usize {
724        let loc = self.psess().source_map().lookup_char_pos(span.lo());
725        loc.line
726    }
727
728    fn column(&mut self, span: Self::Span) -> usize {
729        let loc = self.psess().source_map().lookup_char_pos(span.lo());
730        loc.col.to_usize() + 1
731    }
732
733    fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
734        let self_loc = self.psess().source_map().lookup_char_pos(first.lo());
735        let other_loc = self.psess().source_map().lookup_char_pos(second.lo());
736
737        if self_loc.file.name != other_loc.file.name {
738            return None;
739        }
740
741        Some(first.to(second))
742    }
743
744    fn subspan(
745        &mut self,
746        span: Self::Span,
747        start: Bound<usize>,
748        end: Bound<usize>,
749    ) -> Option<Self::Span> {
750        let length = span.hi().to_usize() - span.lo().to_usize();
751
752        let start = match start {
753            Bound::Included(lo) => lo,
754            Bound::Excluded(lo) => lo.checked_add(1)?,
755            Bound::Unbounded => 0,
756        };
757
758        let end = match end {
759            Bound::Included(hi) => hi.checked_add(1)?,
760            Bound::Excluded(hi) => hi,
761            Bound::Unbounded => length,
762        };
763
764        // Bounds check the values, preventing addition overflow and OOB spans.
765        if start > u32::MAX as usize
766            || end > u32::MAX as usize
767            || (u32::MAX - start as u32) < span.lo().to_u32()
768            || (u32::MAX - end as u32) < span.lo().to_u32()
769            || start >= end
770            || end > length
771        {
772            return None;
773        }
774
775        let new_lo = span.lo() + BytePos::from_usize(start);
776        let new_hi = span.lo() + BytePos::from_usize(end);
777        Some(span.with_lo(new_lo).with_hi(new_hi))
778    }
779
780    fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
781        span.with_ctxt(at.ctxt())
782    }
783
784    fn source_text(&mut self, span: Self::Span) -> Option<String> {
785        self.psess().source_map().span_to_snippet(span).ok()
786    }
787
788    /// Saves the provided span into the metadata of
789    /// *the crate we are currently compiling*, which must
790    /// be a proc-macro crate. This id can be passed to
791    /// `recover_proc_macro_span` when our current crate
792    /// is *run* as a proc-macro.
793    ///
794    /// Let's suppose that we have two crates - `my_client`
795    /// and `my_proc_macro`. The `my_proc_macro` crate
796    /// contains a procedural macro `my_macro`, which
797    /// is implemented as: `quote! { "hello" }`
798    ///
799    /// When we *compile* `my_proc_macro`, we will execute
800    /// the `quote` proc-macro. This will save the span of
801    /// "hello" into the metadata of `my_proc_macro`. As a result,
802    /// the body of `my_proc_macro` (after expansion) will end
803    /// up containing a call that looks like this:
804    /// `proc_macro::Ident::new("hello", proc_macro::Span::recover_proc_macro_span(0))`
805    ///
806    /// where `0` is the id returned by this function.
807    /// When `my_proc_macro` *executes* (during the compilation of `my_client`),
808    /// the call to `recover_proc_macro_span` will load the corresponding
809    /// span from the metadata of `my_proc_macro` (which we have access to,
810    /// since we've loaded `my_proc_macro` from disk in order to execute it).
811    /// In this way, we have obtained a span pointing into `my_proc_macro`
812    fn save_span(&mut self, span: Self::Span) -> usize {
813        self.psess().save_proc_macro_span(span)
814    }
815
816    fn recover_proc_macro_span(&mut self, id: usize) -> Self::Span {
817        let (resolver, krate, def_site) = (&*self.ecx.resolver, self.krate, self.def_site);
818        *self.rebased_spans.entry(id).or_insert_with(|| {
819            // FIXME: `SyntaxContext` for spans from proc macro crates is lost during encoding,
820            // replace it with a def-site context until we are encoding it properly.
821            resolver.get_proc_macro_quoted_span(krate, id).with_ctxt(def_site.ctxt())
822        })
823    }
824}
825
826impl server::Symbol for Rustc<'_, '_> {
827    fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
828        let sym = nfc_normalize(string);
829        if rustc_lexer::is_ident(sym.as_str()) { Ok(sym) } else { Err(()) }
830    }
831}
832
833impl server::Server for Rustc<'_, '_> {
834    fn globals(&mut self) -> ExpnGlobals<Self::Span> {
835        ExpnGlobals {
836            def_site: self.def_site,
837            call_site: self.call_site,
838            mixed_site: self.mixed_site,
839        }
840    }
841
842    fn intern_symbol(string: &str) -> Self::Symbol {
843        Symbol::intern(string)
844    }
845
846    fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
847        f(symbol.as_str())
848    }
849}