Skip to content

crate-ify and delete unused code from syntax::parse #51265

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 10, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/libsyntax/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
#![feature(const_atomic_usize_new)]
#![feature(rustc_attrs)]
#![feature(str_escape)]
#![feature(crate_visibility_modifier)]

#![recursion_limit="256"]

Expand Down
11 changes: 5 additions & 6 deletions src/libsyntax/parse/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@
use attr;
use ast;
use codemap::respan;
use parse::common::SeqSep;
use parse::PResult;
use parse::{SeqSep, PResult};
use parse::token::{self, Nonterminal};
use parse::parser::{Parser, TokenType, PathStyle};
use tokenstream::TokenStream;
Expand All @@ -28,7 +27,7 @@ const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &'static str = "an inner attribute

impl<'a> Parser<'a> {
/// Parse attributes that appear before an item
pub fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = Vec::new();
let mut just_parsed_doc_comment = false;
loop {
Expand Down Expand Up @@ -139,7 +138,7 @@ impl<'a> Parser<'a> {
})
}

pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
crate fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
let meta = match self.token {
token::Interpolated(ref nt) => match nt.0 {
Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
Expand All @@ -160,7 +159,7 @@ impl<'a> Parser<'a> {
/// terminated by a semicolon.

/// matches inner_attrs*
pub fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = vec![];
loop {
match self.token {
Expand Down Expand Up @@ -231,7 +230,7 @@ impl<'a> Parser<'a> {
Ok(ast::MetaItem { ident, node, span })
}

pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) {
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
} else if self.eat(&token::OpenDelim(token::Paren)) {
Expand Down
36 changes: 0 additions & 36 deletions src/libsyntax/parse/common.rs

This file was deleted.

2 changes: 1 addition & 1 deletion src/libsyntax/parse/lexer/comments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ pub struct Comment {
pub pos: BytePos,
}

pub fn is_doc_comment(s: &str) -> bool {
fn is_doc_comment(s: &str) -> bool {
(s.starts_with("///") && super::is_doc_comment(s)) || s.starts_with("//!") ||
(s.starts_with("/**") && is_block_doc_comment(s)) || s.starts_with("/*!")
}
Expand Down
40 changes: 19 additions & 21 deletions src/libsyntax/parse/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,16 +51,16 @@ pub struct StringReader<'a> {
pub ch: Option<char>,
pub filemap: Lrc<syntax_pos::FileMap>,
/// Stop reading src at this index.
pub end_src_index: usize,
end_src_index: usize,
/// Whether to record new-lines and multibyte chars in filemap.
/// This is only necessary the first time a filemap is lexed.
/// If part of a filemap is being re-lexed, this should be set to false.
pub save_new_lines_and_multibyte: bool,
save_new_lines_and_multibyte: bool,
// cached:
peek_tok: token::Token,
peek_span: Span,
peek_span_src_raw: Span,
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
fatal_errs: Vec<DiagnosticBuilder<'a>>,
// cache a direct reference to the source text, so that we don't have to
// retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
src: Lrc<String>,
Expand All @@ -70,7 +70,7 @@ pub struct StringReader<'a> {
/// The raw source span which *does not* take `override_span` into account
span_src_raw: Span,
open_braces: Vec<(token::DelimToken, Span)>,
pub override_span: Option<Span>,
crate override_span: Option<Span>,
}

impl<'a> StringReader<'a> {
Expand Down Expand Up @@ -163,11 +163,9 @@ impl<'a> StringReader<'a> {
sp: self.peek_span,
}
}
}

impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into next_pos and ch
pub fn new_raw(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>,
fn new_raw(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>,
override_span: Option<Span>) -> Self {
let mut sr = StringReader::new_raw_internal(sess, filemap, override_span);
sr.bump();
Expand Down Expand Up @@ -240,17 +238,17 @@ impl<'a> StringReader<'a> {
sr
}

pub fn ch_is(&self, c: char) -> bool {
fn ch_is(&self, c: char) -> bool {
self.ch == Some(c)
}

/// Report a fatal lexical error with a given span.
pub fn fatal_span(&self, sp: Span, m: &str) -> FatalError {
fn fatal_span(&self, sp: Span, m: &str) -> FatalError {
self.sess.span_diagnostic.span_fatal(sp, m)
}

/// Report a lexical error with a given span.
pub fn err_span(&self, sp: Span, m: &str) {
fn err_span(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_err(sp, m)
}

Expand Down Expand Up @@ -375,7 +373,7 @@ impl<'a> StringReader<'a> {
/// Calls `f` with a string slice of the source text spanning from `start`
/// up to but excluding `self.pos`, meaning the slice does not include
/// the character `self.ch`.
pub fn with_str_from<T, F>(&self, start: BytePos, f: F) -> T
fn with_str_from<T, F>(&self, start: BytePos, f: F) -> T
where F: FnOnce(&str) -> T
{
self.with_str_from_to(start, self.pos, f)
Expand All @@ -384,13 +382,13 @@ impl<'a> StringReader<'a> {
/// Create a Name from a given offset to the current offset, each
/// adjusted 1 towards each other (assumes that on either side there is a
/// single-byte delimiter).
pub fn name_from(&self, start: BytePos) -> ast::Name {
fn name_from(&self, start: BytePos) -> ast::Name {
debug!("taking an ident from {:?} to {:?}", start, self.pos);
self.with_str_from(start, Symbol::intern)
}

/// As name_from, with an explicit endpoint.
pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
debug!("taking an ident from {:?} to {:?}", start, end);
self.with_str_from_to(start, end, Symbol::intern)
}
Expand Down Expand Up @@ -454,7 +452,7 @@ impl<'a> StringReader<'a> {

/// Advance the StringReader by one character. If a newline is
/// discovered, add it to the FileMap's list of line start offsets.
pub fn bump(&mut self) {
crate fn bump(&mut self) {
let next_src_index = self.src_index(self.next_pos);
if next_src_index < self.end_src_index {
let next_ch = char_at(&self.src, next_src_index);
Expand All @@ -481,7 +479,7 @@ impl<'a> StringReader<'a> {
}
}

pub fn nextch(&self) -> Option<char> {
fn nextch(&self) -> Option<char> {
let next_src_index = self.src_index(self.next_pos);
if next_src_index < self.end_src_index {
Some(char_at(&self.src, next_src_index))
Expand All @@ -490,11 +488,11 @@ impl<'a> StringReader<'a> {
}
}

pub fn nextch_is(&self, c: char) -> bool {
fn nextch_is(&self, c: char) -> bool {
self.nextch() == Some(c)
}

pub fn nextnextch(&self) -> Option<char> {
fn nextnextch(&self) -> Option<char> {
let next_src_index = self.src_index(self.next_pos);
if next_src_index < self.end_src_index {
let next_next_src_index =
Expand All @@ -506,7 +504,7 @@ impl<'a> StringReader<'a> {
None
}

pub fn nextnextch_is(&self, c: char) -> bool {
fn nextnextch_is(&self, c: char) -> bool {
self.nextnextch() == Some(c)
}

Expand Down Expand Up @@ -1732,7 +1730,7 @@ impl<'a> StringReader<'a> {

// This tests the character for the unicode property 'PATTERN_WHITE_SPACE' which
// is guaranteed to be forward compatible. http://unicode.org/reports/tr31/#R3
pub fn is_pattern_whitespace(c: Option<char>) -> bool {
crate fn is_pattern_whitespace(c: Option<char>) -> bool {
c.map_or(false, Pattern_White_Space)
}

Expand All @@ -1747,14 +1745,14 @@ fn is_dec_digit(c: Option<char>) -> bool {
in_range(c, '0', '9')
}

pub fn is_doc_comment(s: &str) -> bool {
fn is_doc_comment(s: &str) -> bool {
let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/') ||
s.starts_with("//!");
debug!("is {:?} a doc comment? {}", s, res);
res
}

pub fn is_block_doc_comment(s: &str) -> bool {
fn is_block_doc_comment(s: &str) -> bool {
// Prevent `/**/` from being parsed as a doc comment
let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*') ||
s.starts_with("/*!")) && s.len() >= 5;
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use tokenstream::{Delimited, TokenStream, TokenTree};

impl<'a> StringReader<'a> {
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
crate fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
let mut tts = Vec::new();
while self.token != token::Eof {
tts.push(self.parse_token_tree()?);
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/lexer/unicode_chars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ const ASCII_ARRAY: &'static [(char, &'static str)] = &[
('=', "Equals Sign"),
('>', "Greater-Than Sign"), ];

pub fn check_for_substitution<'a>(reader: &StringReader<'a>,
crate fn check_for_substitution<'a>(reader: &StringReader<'a>,
ch: char,
err: &mut DiagnosticBuilder<'a>) -> bool {
UNICODE_ARRAY
Expand Down
Loading