@@ -252,9 +252,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
252
252
let convert_tokens = |tokens : Option < LazyTokenStream > | tokens. map ( |t| t. into_token_stream ( ) ) ;
253
253
254
254
let tokens = match * nt {
255
- Nonterminal :: NtItem ( ref item) => {
256
- prepend_attrs ( sess, & item. attrs , item. tokens . as_ref ( ) , span)
257
- }
255
+ Nonterminal :: NtItem ( ref item) => prepend_attrs ( & item. attrs , item. tokens . as_ref ( ) ) ,
258
256
Nonterminal :: NtBlock ( ref block) => convert_tokens ( block. tokens . clone ( ) ) ,
259
257
Nonterminal :: NtStmt ( ref stmt) => {
260
258
// FIXME: We currently only collect tokens for `:stmt`
@@ -279,7 +277,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
279
277
if expr. tokens . is_none ( ) {
280
278
debug ! ( "missing tokens for expr {:?}" , expr) ;
281
279
}
282
- prepend_attrs ( sess , & expr. attrs , expr. tokens . as_ref ( ) , span )
280
+ prepend_attrs ( & expr. attrs , expr. tokens . as_ref ( ) )
283
281
}
284
282
} ;
285
283
@@ -603,10 +601,8 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
603
601
}
604
602
605
603
fn prepend_attrs (
606
- sess : & ParseSess ,
607
604
attrs : & [ ast:: Attribute ] ,
608
605
tokens : Option < & tokenstream:: LazyTokenStream > ,
609
- span : rustc_span:: Span ,
610
606
) -> Option < tokenstream:: TokenStream > {
611
607
let tokens = tokens?. clone ( ) . into_token_stream ( ) ;
612
608
if attrs. is_empty ( ) {
@@ -619,47 +615,12 @@ fn prepend_attrs(
619
615
ast:: AttrStyle :: Outer ,
620
616
"inner attributes should prevent cached tokens from existing"
621
617
) ;
622
-
623
- let source = pprust:: attribute_to_string ( attr) ;
624
- let macro_filename = FileName :: macro_expansion_source_code ( & source) ;
625
-
626
- let item = match attr. kind {
627
- ast:: AttrKind :: Normal ( ref item) => item,
628
- ast:: AttrKind :: DocComment ( ..) => {
629
- let stream = parse_stream_from_source_str ( macro_filename, source, sess, Some ( span) ) ;
630
- builder. push ( stream) ;
631
- continue ;
632
- }
633
- } ;
634
-
635
- // synthesize # [ $path $tokens ] manually here
636
- let mut brackets = tokenstream:: TokenStreamBuilder :: new ( ) ;
637
-
638
- // For simple paths, push the identifier directly
639
- if item. path . segments . len ( ) == 1 && item. path . segments [ 0 ] . args . is_none ( ) {
640
- let ident = item. path . segments [ 0 ] . ident ;
641
- let token = token:: Ident ( ident. name , ident. as_str ( ) . starts_with ( "r#" ) ) ;
642
- brackets. push ( tokenstream:: TokenTree :: token ( token, ident. span ) ) ;
643
-
644
- // ... and for more complicated paths, fall back to a reparse hack that
645
- // should eventually be removed.
646
- } else {
647
- let stream = parse_stream_from_source_str ( macro_filename, source, sess, Some ( span) ) ;
648
- brackets. push ( stream) ;
649
- }
650
-
651
- brackets. push ( item. args . outer_tokens ( ) ) ;
652
-
653
- // The span we list here for `#` and for `[ ... ]` are both wrong in
654
- // that it encompasses more than each token, but it hopefully is "good
655
- // enough" for now at least.
656
- builder. push ( tokenstream:: TokenTree :: token ( token:: Pound , attr. span ) ) ;
657
- let delim_span = tokenstream:: DelimSpan :: from_single ( attr. span ) ;
658
- builder. push ( tokenstream:: TokenTree :: Delimited (
659
- delim_span,
660
- token:: DelimToken :: Bracket ,
661
- brackets. build ( ) ,
662
- ) ) ;
618
+ builder. push (
619
+ attr. tokens
620
+ . clone ( )
621
+ . unwrap_or_else ( || panic ! ( "Attribute {:?} is missing tokens!" , attr) )
622
+ . into_token_stream ( ) ,
623
+ ) ;
663
624
}
664
625
builder. push ( tokens. clone ( ) ) ;
665
626
Some ( builder. build ( ) )
0 commit comments