Skip to content

Cleanup: Rename ModSep to PathSep #123462

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -308,11 +308,11 @@ impl MetaItem {
// FIXME: Share code with `parse_path`.
let path = match tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() {
Some(&TokenTree::Token(
Token { kind: ref kind @ (token::Ident(..) | token::ModSep), span },
Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span },
_,
)) => 'arm: {
let mut segments = if let &token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }, _)) =
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
tokens.peek()
{
tokens.next();
Expand All @@ -331,7 +331,7 @@ impl MetaItem {
} else {
return None;
}
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }, _)) =
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
tokens.peek()
{
tokens.next();
Expand Down
20 changes: 11 additions & 9 deletions compiler/rustc_ast/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ pub enum TokenKind {
/// `:`
Colon,
/// `::`
ModSep,
PathSep,
/// `->`
RArrow,
/// `<-`
Expand Down Expand Up @@ -393,7 +393,7 @@ impl TokenKind {
BinOpEq(Shr) => (Gt, Ge),
DotDot => (Dot, Dot),
DotDotDot => (Dot, DotDot),
ModSep => (Colon, Colon),
PathSep => (Colon, Colon),
RArrow => (BinOp(Minus), Gt),
LArrow => (Lt, BinOp(Minus)),
FatArrow => (Eq, Gt),
Expand Down Expand Up @@ -454,7 +454,9 @@ impl Token {
match self.kind {
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | BinOp(_)
| BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon
| ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => true,
| PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => {
true
}

OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
| Lifetime(..) | Interpolated(..) | Eof => false,
Expand All @@ -481,7 +483,7 @@ impl Token {
// DotDotDot is no longer supported, but we need some way to display the error
DotDot | DotDotDot | DotDotEq | // range notation
Lt | BinOp(Shl) | // associated path
ModSep | // global path
PathSep | // global path
Lifetime(..) | // labeled loop
Pound => true, // expression attributes
Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) |
Expand All @@ -507,7 +509,7 @@ impl Token {
// DotDotDot is no longer supported
| DotDot | DotDotDot | DotDotEq // ranges
| Lt | BinOp(Shl) // associated path
| ModSep => true, // global path
| PathSep => true, // global path
Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) |
NtPat(..) |
NtBlock(..) |
Expand All @@ -530,7 +532,7 @@ impl Token {
Question | // maybe bound in trait object
Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
PathSep => true, // global path
Interpolated(ref nt) => matches!(&nt.0, NtTy(..) | NtPath(..)),
// For anonymous structs or unions, which only appear in specific positions
// (type of struct fields or union fields), we don't consider them as regular types
Expand Down Expand Up @@ -708,7 +710,7 @@ impl Token {
}

pub fn is_path_start(&self) -> bool {
self == &ModSep
self == &PathSep
|| self.is_qpath_start()
|| self.is_whole_path()
|| self.is_path_segment_keyword()
Expand Down Expand Up @@ -821,7 +823,7 @@ impl Token {
_ => return None,
},
Colon => match joint.kind {
Colon => ModSep,
Colon => PathSep,
_ => return None,
},
SingleQuote => match joint.kind {
Expand All @@ -830,7 +832,7 @@ impl Token {
},

Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot
| DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar
| DotDotEq | Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar
| Question | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..)
| Lifetime(..) | Interpolated(..) | DocComment(..) | Eof => return None,
};
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -893,7 +893,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
token::Comma => ",".into(),
token::Semi => ";".into(),
token::Colon => ":".into(),
token::ModSep => "::".into(),
token::PathSep => "::".into(),
token::RArrow => "->".into(),
token::LArrow => "<-".into(),
token::FatArrow => "=>".into(),
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
Comma => op(","),
Semi => op(";"),
Colon => op(":"),
ModSep => op("::"),
PathSep => op("::"),
RArrow => op("->"),
LArrow => op("<-"),
FatArrow => op("=>"),
Expand Down
12 changes: 6 additions & 6 deletions compiler/rustc_parse/src/parser/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ impl<'a> Parser<'a> {
TokenKind::Colon,
TokenKind::Comma,
TokenKind::Semi,
TokenKind::ModSep,
TokenKind::PathSep,
TokenKind::OpenDelim(Delimiter::Brace),
TokenKind::OpenDelim(Delimiter::Parenthesis),
TokenKind::CloseDelim(Delimiter::Brace),
Expand Down Expand Up @@ -1169,7 +1169,7 @@ impl<'a> Parser<'a> {
return;
}

if token::ModSep == self.token.kind && segment.args.is_none() {
if token::PathSep == self.token.kind && segment.args.is_none() {
let snapshot = self.create_snapshot_for_diagnostic();
self.bump();
let lo = self.token.span;
Expand Down Expand Up @@ -1420,15 +1420,15 @@ impl<'a> Parser<'a> {
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
self.consume_tts(1, &modifiers);

if !&[token::OpenDelim(Delimiter::Parenthesis), token::ModSep]
if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep]
.contains(&self.token.kind)
{
// We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
// parser and bail out.
self.restore_snapshot(snapshot);
}
}
return if token::ModSep == self.token.kind {
return if token::PathSep == self.token.kind {
// We have some certainty that this was a bad turbofish at this point.
// `foo< bar >::`
if let ExprKind::Binary(o, ..) = inner_op.kind
Expand Down Expand Up @@ -1784,7 +1784,7 @@ impl<'a> Parser<'a> {
}

// Do not add `::` to expected tokens.
if self.token == token::ModSep {
if self.token == token::PathSep {
if let Some(ty) = base.to_ty() {
return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
}
Expand All @@ -1799,7 +1799,7 @@ impl<'a> Parser<'a> {
ty_span: Span,
ty: P<Ty>,
) -> PResult<'a, P<T>> {
self.expect(&token::ModSep)?;
self.expect(&token::PathSep)?;

let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
Expand Down
10 changes: 5 additions & 5 deletions compiler/rustc_parse/src/parser/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -358,12 +358,12 @@ impl<'a> Parser<'a> {
fn is_reuse_path_item(&mut self) -> bool {
// no: `reuse ::path` for compatibility reasons with macro invocations
self.token.is_keyword(kw::Reuse)
&& self.look_ahead(1, |t| t.is_path_start() && t.kind != token::ModSep)
&& self.look_ahead(1, |t| t.is_path_start() && t.kind != token::PathSep)
}

/// Are we sure this could not possibly be a macro invocation?
fn isnt_macro_invocation(&mut self) -> bool {
self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::ModSep)
self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep)
}

/// Recover on encountering a struct or method definition where the user
Expand Down Expand Up @@ -1020,7 +1020,7 @@ impl<'a> Parser<'a> {
{
// `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) {
if self.eat(&token::PathSep) {
prefix
.segments
.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
Expand All @@ -1031,7 +1031,7 @@ impl<'a> Parser<'a> {
// `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
prefix = self.parse_path(PathStyle::Mod)?;

if self.eat(&token::ModSep) {
if self.eat(&token::PathSep) {
self.parse_use_tree_glob_or_nested()?
} else {
// Recover from using a colon as path separator.
Expand Down Expand Up @@ -2752,7 +2752,7 @@ impl<'a> Parser<'a> {
// Is `self` `n` tokens ahead?
let is_isolated_self = |this: &Self, n| {
this.is_keyword_ahead(n, &[kw::SelfLower])
&& this.look_ahead(n + 1, |t| t != &token::ModSep)
&& this.look_ahead(n + 1, |t| t != &token::PathSep)
};
// Is `mut self` `n` tokens ahead?
let is_isolated_mut_self =
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery
&& $self.may_recover()
&& $self.look_ahead(1, |t| t == &token::ModSep)
&& $self.look_ahead(1, |t| t == &token::PathSep)
&& let token::Interpolated(nt) = &$self.token.kind
&& let token::NtTy(ty) = &nt.0
{
Expand Down Expand Up @@ -1532,7 +1532,7 @@ impl<'a> Parser<'a> {

/// `::{` or `::*`
fn is_import_coupler(&mut self) -> bool {
self.check(&token::ModSep)
self.check(&token::PathSep)
&& self.look_ahead(1, |t| {
*t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star)
})
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_parse/src/parser/nonterminal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ impl<'a> Parser<'a> {
_ => false,
},
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
token::ModSep | token::Ident(..) => true,
token::PathSep | token::Ident(..) => true,
token::Interpolated(nt) => may_be_ident(&nt.0),
_ => false,
},
Expand All @@ -76,7 +76,7 @@ impl<'a> Parser<'a> {
token::Literal(_) | // literal
token::DotDot | // range pattern (future compat)
token::DotDotDot | // range pattern (future compat)
token::ModSep | // path
token::PathSep | // path
token::Lt | // path (UFCS constant)
token::BinOp(token::Shl) => true, // path (double UFCS)
// leading vert `|` or-pattern
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_parse/src/parser/pat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1016,7 +1016,7 @@ impl<'a> Parser<'a> {
&& self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern.
| token::OpenDelim(Delimiter::Brace) // A struct pattern.
| token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
| token::ModSep // A tuple / struct variant pattern.
| token::PathSep // A tuple / struct variant pattern.
| token::Not)) // A macro expanding to a pattern.
}

Expand Down
18 changes: 9 additions & 9 deletions compiler/rustc_parse/src/parser/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ impl<'a> Parser<'a> {
}

if !self.recover_colon_before_qpath_proj() {
self.expect(&token::ModSep)?;
self.expect(&token::PathSep)?;
}

let qself = P(QSelf { ty, path_span, position: path.segments.len() });
Expand Down Expand Up @@ -200,7 +200,7 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let mut segments = ThinVec::new();
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) {
if self.eat(&token::PathSep) {
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
}
self.parse_path_segments(&mut segments, style, ty_generics)?;
Expand Down Expand Up @@ -232,11 +232,11 @@ impl<'a> Parser<'a> {
// `PathStyle::Expr` is only provided at the root invocation and never in
// `parse_path_segment` to recurse and therefore can be checked to maintain
// this invariant.
self.check_trailing_angle_brackets(&segment, &[&token::ModSep]);
self.check_trailing_angle_brackets(&segment, &[&token::PathSep]);
}
segments.push(segment);

if self.is_import_coupler() || !self.eat(&token::ModSep) {
if self.is_import_coupler() || !self.eat(&token::PathSep) {
if style == PathStyle::Expr
&& self.may_recover()
&& self.token == token::Colon
Expand Down Expand Up @@ -291,7 +291,7 @@ impl<'a> Parser<'a> {
Ok(
if style == PathStyle::Type && check_args_start(self)
|| style != PathStyle::Mod
&& self.check(&token::ModSep)
&& self.check(&token::PathSep)
&& self.look_ahead(1, |t| is_args_start(t))
{
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
Expand All @@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
}

// Generic arguments are found - `<`, `(`, `::<` or `::(`.
self.eat(&token::ModSep);
self.eat(&token::PathSep);
let lo = self.token.span;
let args = if self.eat_lt() {
// `<'a, T, A = U>`
Expand Down Expand Up @@ -379,7 +379,7 @@ impl<'a> Parser<'a> {
let token_before_parsing = self.token.clone();
let mut snapshot = None;
if self.may_recover()
&& prev_token_before_parsing.kind == token::ModSep
&& prev_token_before_parsing.kind == token::PathSep
&& (style == PathStyle::Expr && self.token.can_begin_expr()
|| style == PathStyle::Pat && self.token.can_begin_pattern())
{
Expand All @@ -388,7 +388,7 @@ impl<'a> Parser<'a> {

let (inputs, _) = match self.parse_paren_comma_seq(|p| p.parse_ty()) {
Ok(output) => output,
Err(mut error) if prev_token_before_parsing.kind == token::ModSep => {
Err(mut error) if prev_token_before_parsing.kind == token::PathSep => {
error.span_label(
prev_token_before_parsing.span.to(token_before_parsing.span),
"while parsing this parenthesized list of type arguments starting here",
Expand Down Expand Up @@ -470,7 +470,7 @@ impl<'a> Parser<'a> {
}
}

if let token::ModSep | token::RArrow = self.token.kind {
if let token::PathSep | token::RArrow = self.token.kind {
return;
}

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_parse/src/parser/ty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ enum AllowCVariadic {
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
/// that `IDENT` is not the ident of a fn trait.
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl)
t == &token::PathSep || t == &token::Lt || t == &token::BinOp(token::Shl)
}

fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {
Expand Down
2 changes: 1 addition & 1 deletion src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
if !prev_is_dollar
&& let Some(span) = is_crate_keyword(curr)
&& let Some(next) = cursor.look_ahead(0)
&& is_token(next, &TokenKind::ModSep)
&& is_token(next, &TokenKind::PathSep)
{
return Some(span);
}
Expand Down
2 changes: 1 addition & 1 deletion src/tools/rustfmt/src/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1091,7 +1091,7 @@ fn next_space(tok: &TokenKind) -> SpaceState {
| TokenKind::DotDotEq
| TokenKind::Question => SpaceState::Punctuation,

TokenKind::ModSep
TokenKind::PathSep
| TokenKind::Pound
| TokenKind::Dollar
| TokenKind::OpenDelim(_)
Expand Down
Loading