@@ -43,6 +43,8 @@ pub struct TtReader<'a> {
43
43
/* cached: */
44
44
pub cur_tok : Token ,
45
45
pub cur_span : Span ,
46
+ /// Transform doc comments. Only useful in macro invocations
47
+ pub desugar_doc_comments : bool ,
46
48
}
47
49
48
50
/// This can do Macro-By-Example transcription. On the other hand, if
@@ -66,6 +68,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
66
68
} ,
67
69
repeat_idx : Vec :: new ( ) ,
68
70
repeat_len : Vec :: new ( ) ,
71
+ desugar_doc_comments : false ,
69
72
/* dummy values, never read: */
70
73
cur_tok : token:: Eof ,
71
74
cur_span : DUMMY_SP ,
@@ -279,8 +282,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
279
282
}
280
283
}
281
284
// TtDelimited or any token that can be unzipped
282
- seq @ TtDelimited ( ..) | seq @ TtToken ( _, DocComment ( ..) )
283
- | seq @ TtToken ( _, MatchNt ( ..) ) => {
285
+ seq @ TtDelimited ( ..) | seq @ TtToken ( _, MatchNt ( ..) ) => {
284
286
// do not advance the idx yet
285
287
r. stack . push ( TtFrame {
286
288
forest : seq. expand_into_tts ( ) ,
@@ -290,6 +292,14 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
290
292
} ) ;
291
293
// if this could be 0-length, we'd need to potentially recur here
292
294
}
295
+ TtToken ( sp, DocComment ( name) ) if r. desugar_doc_comments => {
296
+ r. stack . push ( TtFrame {
297
+ forest : TtToken ( sp, DocComment ( name) ) . expand_into_tts ( ) ,
298
+ idx : 0 ,
299
+ dotdotdoted : false ,
300
+ sep : None
301
+ } ) ;
302
+ }
293
303
TtToken ( sp, tok) => {
294
304
r. cur_span = sp;
295
305
r. cur_tok = tok;
0 commit comments