Skip to content

Commit 1ae7de9

Browse files
committed
Revert "Recursively expand nonterminals"
This reverts commit 2af0218.
1 parent 4d4facb commit 1ae7de9

File tree

2 files changed

+16
-95
lines changed

2 files changed

+16
-95
lines changed

src/librustc_ast/tokenstream.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -290,6 +290,8 @@ impl TokenStream {
290290
t1.next().is_none() && t2.next().is_none()
291291
}
292292

293+
294+
293295
pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
294296
TokenStream(Lrc::new(
295297
self.0

src/librustc_parse/lib.rs

Lines changed: 14 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,20 @@
77
#![feature(or_patterns)]
88

99
use rustc_ast::ast;
10-
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
11-
use rustc_ast::tokenstream::{self, IsJoint, TokenStream, TokenTree};
10+
use rustc_ast::token::{self, Nonterminal, Token, TokenKind, DelimToken};
11+
use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
1212
use rustc_ast_pretty::pprust;
1313
use rustc_data_structures::sync::Lrc;
1414
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
1515
use rustc_session::parse::ParseSess;
16-
use rustc_span::symbol::kw;
1716
use rustc_span::{FileName, SourceFile, Span};
17+
use rustc_span::symbol::kw;
1818

19-
use std::mem;
2019
use std::path::Path;
2120
use std::str;
21+
use std::mem;
2222

23-
use log::{debug, info};
23+
use log::info;
2424

2525
pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
2626

@@ -308,7 +308,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
308308
// modifications, including adding/removing typically non-semantic
309309
// tokens such as extra braces and commas, don't happen.
310310
if let Some(tokens) = tokens {
311-
if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real, sess) {
311+
if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real) {
312312
return tokens;
313313
}
314314
info!(
@@ -389,11 +389,7 @@ fn prepend_attrs(
389389
//
390390
// This is otherwise the same as `eq_unspanned`, only recursing with a
391391
// different method.
392-
pub fn tokenstream_probably_equal_for_proc_macro(
393-
first: &TokenStream,
394-
other: &TokenStream,
395-
sess: &ParseSess,
396-
) -> bool {
392+
pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &TokenStream) -> bool {
397393
// When checking for `probably_eq`, we ignore certain tokens that aren't
398394
// preserved in the AST. Because they are not preserved, the pretty
399395
// printer arbitrarily adds or removes them when printing as token
@@ -421,83 +417,10 @@ pub fn tokenstream_probably_equal_for_proc_macro(
421417
true
422418
}
423419

424-
// When comparing two `TokenStream`s, we ignore the `IsJoint` information.
425-
//
426-
// However, `rustc_parse::lexer::tokentrees::TokenStreamBuilder` will
427-
// use `Token.glue` on adjacent tokens with the proper `IsJoint`.
428-
// Since we are ignoreing `IsJoint`, a 'glued' token (e.g. `BinOp(Shr)`)
429-
// and its 'split'/'unglued' compoenents (e.g. `Gt, Gt`) are equivalent
430-
// when determining if two `TokenStream`s are 'probably equal'.
431-
//
432-
// Therefore, we use `break_two_token_op` to convert all tokens
433-
// to the 'unglued' form (if it exists). This ensures that two
434-
// `TokenStream`s which differ only in how their tokens are glued
435-
// will be considered 'probably equal', which allows us to keep spans.
436-
//
437-
// This is important when the original `TokenStream` contained
438-
// extra spaces (e.g. `f :: < Vec < _ > > ( ) ;'). These extra spaces
439-
// will be omitted when we pretty-print, which can cause the original
440-
// and reparsed `TokenStream`s to differ in the assignment of `IsJoint`,
441-
// leading to some tokens being 'glued' together in one stream but not
442-
// the other. See #68489 for more details.
443-
fn break_tokens(tree: TokenTree) -> impl Iterator<Item = TokenTree> {
444-
// In almost all cases, we should have either zero or one levels
445-
// of 'unglueing'. However, in some unusual cases, we may need
446-
// to iterate breaking tokens mutliple times. For example:
447-
// '[BinOpEq(Shr)] => [Gt, Ge] -> [Gt, Gt, Eq]'
448-
let mut token_trees: SmallVec<[_; 2]>;
449-
if let TokenTree::Token(token) = &tree {
450-
let mut out = SmallVec::<[_; 2]>::new();
451-
out.push(token.clone());
452-
// Iterate to fixpoint:
453-
// * We start off with 'out' containing our initial token, and `temp` empty
454-
// * If we are able to break any tokens in `out`, then `out` will have
455-
// at least one more element than 'temp', so we will try to break tokens
456-
// again.
457-
// * If we cannot break any tokens in 'out', we are done
458-
loop {
459-
let mut temp = SmallVec::<[_; 2]>::new();
460-
let mut changed = false;
461-
462-
for token in out.into_iter() {
463-
if let Some((first, second)) = token.kind.break_two_token_op() {
464-
temp.push(Token::new(first, DUMMY_SP));
465-
temp.push(Token::new(second, DUMMY_SP));
466-
changed = true;
467-
} else {
468-
temp.push(token);
469-
}
470-
}
471-
out = temp;
472-
if !changed {
473-
break;
474-
}
475-
}
476-
token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect();
477-
if token_trees.len() != 1 {
478-
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
479-
}
480-
} else {
481-
token_trees = SmallVec::new();
482-
token_trees.push(tree);
483-
}
484-
token_trees.into_iter()
485-
}
486-
487-
let expand_nt = |tree: TokenTree| {
488-
if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree {
489-
nt_to_tokenstream(nt, sess, *span).into_trees()
490-
} else {
491-
TokenStream::new(vec![(tree, IsJoint::NonJoint)]).into_trees()
492-
}
493-
};
494-
495-
// Break tokens after we expand any nonterminals, so that we break tokens
496-
// that are produced as a result of nonterminal expansion.
497-
let mut t1 = first.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
498-
let mut t2 = other.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
420+
let mut t1 = first.trees().filter(semantic_tree);
421+
let mut t2 = other.trees().filter(semantic_tree);
499422
for (t1, t2) in t1.by_ref().zip(t2.by_ref()) {
500-
if !tokentree_probably_equal_for_proc_macro(&t1, &t2, sess) {
423+
if !tokentree_probably_equal_for_proc_macro(&t1, &t2) {
501424
return false;
502425
}
503426
}
@@ -556,29 +479,25 @@ crate fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bo
556479
b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
557480
}
558481

559-
// Expanded by `tokenstream_probably_equal_for_proc_macro`
560-
(&Interpolated(_), &Interpolated(_)) => unreachable!(),
482+
(&Interpolated(_), &Interpolated(_)) => false,
561483

562484
_ => panic!("forgot to add a token?"),
563485
}
564486
}
565487

488+
566489
// See comments in `Nonterminal::to_tokenstream` for why we care about
567490
// *probably* equal here rather than actual equality
568491
//
569492
// This is otherwise the same as `eq_unspanned`, only recursing with a
570493
// different method.
571-
pub fn tokentree_probably_equal_for_proc_macro(
572-
first: &TokenTree,
573-
other: &TokenTree,
574-
sess: &ParseSess,
575-
) -> bool {
494+
pub fn tokentree_probably_equal_for_proc_macro(first: &TokenTree, other: &TokenTree) -> bool {
576495
match (first, other) {
577496
(TokenTree::Token(token), TokenTree::Token(token2)) => {
578497
token_probably_equal_for_proc_macro(token, token2)
579498
}
580499
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
581-
delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess)
500+
delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2)
582501
}
583502
_ => false,
584503
}

0 commit comments

Comments
 (0)