diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 958c6373a8c9a..101784a7e73d6 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -23,6 +23,7 @@ use metadata::loader; use metadata::loader::Os; use std::cell::RefCell; +use std::rc::Rc; use collections::HashMap; use syntax::ast; use syntax::abi; @@ -41,7 +42,7 @@ use syntax::visit; pub fn read_crates(sess: &Session, krate: &ast::Crate, os: loader::Os, - intr: @IdentInterner) { + intr: Rc) { let mut e = Env { sess: sess, os: os, @@ -114,7 +115,7 @@ struct Env<'a> { os: loader::Os, crate_cache: @RefCell>, next_crate_num: ast::CrateNum, - intr: @IdentInterner + intr: Rc } fn visit_crate(e: &Env, c: &ast::Crate) { @@ -295,7 +296,7 @@ fn resolve_crate(e: &mut Env, id_hash: id_hash, hash: hash.map(|a| &*a), os: e.os, - intr: e.intr, + intr: e.intr.clone(), rejected_via_hash: false, }; let loader::Library { diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index 47202feef9a16..c65659a8aa459 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -63,7 +63,7 @@ pub fn each_child_of_item(cstore: &cstore::CStore, let get_crate_data: decoder::GetCrateDataCb = |cnum| { cstore.get_crate_data(cnum) }; - decoder::each_child_of_item(cstore.intr, + decoder::each_child_of_item(cstore.intr.clone(), crate_data, def_id.node, get_crate_data, @@ -80,7 +80,7 @@ pub fn each_top_level_item_of_crate(cstore: &cstore::CStore, let get_crate_data: decoder::GetCrateDataCb = |cnum| { cstore.get_crate_data(cnum) }; - decoder::each_top_level_item_of_crate(cstore.intr, + decoder::each_top_level_item_of_crate(cstore.intr.clone(), crate_data, get_crate_data, callback) @@ -118,19 +118,19 @@ pub fn get_enum_variants(tcx: &ty::ctxt, def: ast::DefId) -> Vec<@ty::VariantInfo> { let cstore = &tcx.sess.cstore; let cdata = cstore.get_crate_data(def.krate); - return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx) + return decoder::get_enum_variants(cstore.intr.clone(), cdata, def.node, tcx) } /// Returns information about the given implementation. pub fn get_impl(tcx: &ty::ctxt, impl_def_id: ast::DefId) -> ty::Impl { let cdata = tcx.sess.cstore.get_crate_data(impl_def_id.krate); - decoder::get_impl(tcx.sess.cstore.intr, cdata, impl_def_id.node, tcx) + decoder::get_impl(tcx.sess.cstore.intr.clone(), cdata, impl_def_id.node, tcx) } pub fn get_method(tcx: &ty::ctxt, def: ast::DefId) -> ty::Method { let cdata = tcx.sess.cstore.get_crate_data(def.krate); - decoder::get_method(tcx.sess.cstore.intr, cdata, def.node, tcx) + decoder::get_method(tcx.sess.cstore.intr.clone(), cdata, def.node, tcx) } pub fn get_method_name_and_explicit_self(cstore: &cstore::CStore, @@ -138,7 +138,7 @@ pub fn get_method_name_and_explicit_self(cstore: &cstore::CStore, -> (ast::Ident, ast::ExplicitSelf_) { let cdata = cstore.get_crate_data(def.krate); - decoder::get_method_name_and_explicit_self(cstore.intr, cdata, def.node) + decoder::get_method_name_and_explicit_self(cstore.intr.clone(), cdata, def.node) } pub fn get_trait_method_def_ids(cstore: &cstore::CStore, @@ -158,7 +158,7 @@ pub fn get_provided_trait_methods(tcx: &ty::ctxt, -> Vec<@ty::Method> { let cstore = &tcx.sess.cstore; let cdata = cstore.get_crate_data(def.krate); - decoder::get_provided_trait_methods(cstore.intr, cdata, def.node, tcx) + decoder::get_provided_trait_methods(cstore.intr.clone(), cdata, def.node, tcx) } pub fn get_supertraits(tcx: &ty::ctxt, def: ast::DefId) -> Vec<@ty::TraitRef> { @@ -177,7 +177,7 @@ pub fn get_static_methods_if_impl(cstore: &cstore::CStore, def: ast::DefId) -> Option > { let cdata = cstore.get_crate_data(def.krate); - decoder::get_static_methods_if_impl(cstore.intr, cdata, def.node) + decoder::get_static_methods_if_impl(cstore.intr.clone(), cdata, def.node) } pub fn get_item_attrs(cstore: &cstore::CStore, @@ -191,7 +191,7 @@ pub fn get_struct_fields(cstore: &cstore::CStore, def: ast::DefId) -> Vec { let cdata = cstore.get_crate_data(def.krate); - decoder::get_struct_fields(cstore.intr, cdata, def.node) + decoder::get_struct_fields(cstore.intr.clone(), cdata, def.node) } pub fn get_type(tcx: &ty::ctxt, @@ -251,7 +251,7 @@ pub fn get_impl_method(cstore: &cstore::CStore, mname: ast::Ident) -> Option { let cdata = cstore.get_crate_data(def.krate); - decoder::get_impl_method(cstore.intr, cdata, def.node, mname) + decoder::get_impl_method(cstore.intr.clone(), cdata, def.node, mname) } pub fn get_item_visibility(cstore: &cstore::CStore, diff --git a/src/librustc/metadata/cstore.rs b/src/librustc/metadata/cstore.rs index 02c092ca50832..6d58cd57dd462 100644 --- a/src/librustc/metadata/cstore.rs +++ b/src/librustc/metadata/cstore.rs @@ -19,6 +19,7 @@ use metadata::loader; use std::cell::RefCell; use std::c_vec::CVec; +use std::rc::Rc; use collections::HashMap; use syntax::ast; use syntax::parse::token::IdentInterner; @@ -70,14 +71,14 @@ pub struct CStore { priv used_crate_sources: RefCell >, priv used_libraries: RefCell >, priv used_link_args: RefCell >, - intr: @IdentInterner + intr: Rc } // Map from NodeId's of local extern crate statements to crate numbers type extern_mod_crate_map = HashMap; impl CStore { - pub fn new(intr: @IdentInterner) -> CStore { + pub fn new(intr: Rc) -> CStore { CStore { metas: RefCell::new(HashMap::new()), extern_mod_crate_map: RefCell::new(HashMap::new()), diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index f61e85f3f3f9d..7439ae020df17 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -278,7 +278,7 @@ fn item_region_param_defs(item_doc: ebml::Doc, cdata: Cmd) reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| { let ident_str_doc = reader::get_doc(rp_doc, tag_region_param_def_ident); - let ident = item_name(token::get_ident_interner(), ident_str_doc); + let ident = item_name(&*token::get_ident_interner(), ident_str_doc); let def_id_doc = reader::get_doc(rp_doc, tag_region_param_def_def_id); let def_id = reader::with_doc_data(def_id_doc, parse_def_id); @@ -460,13 +460,13 @@ pub fn get_impl_vtables(cdata: Cmd, } -pub fn get_impl_method(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId, +pub fn get_impl_method(intr: Rc, cdata: Cmd, id: ast::NodeId, name: ast::Ident) -> Option { let items = reader::get_doc(reader::Doc(cdata.data()), tag_items); let mut found = None; reader::tagged_docs(find_item(id, items), tag_item_impl_method, |mid| { let m_did = reader::with_doc_data(mid, parse_def_id); - if item_name(intr, find_item(m_did.node, items)) == name { + if item_name(&*intr, find_item(m_did.node, items)) == name { found = Some(translate_def_id(cdata, m_did)); } true @@ -509,7 +509,7 @@ pub fn each_lang_item(cdata: Cmd, f: |ast::NodeId, uint| -> bool) -> bool { }) } -fn each_child_of_item_or_crate(intr: @IdentInterner, +fn each_child_of_item_or_crate(intr: Rc, cdata: Cmd, item_doc: ebml::Doc, get_crate_data: GetCrateDataCb, @@ -536,7 +536,7 @@ fn each_child_of_item_or_crate(intr: @IdentInterner, None => {} Some(child_item_doc) => { // Hand off the item to the callback. - let child_name = item_name(intr, child_item_doc); + let child_name = item_name(&*intr, child_item_doc); let def_like = item_to_def_like(child_item_doc, child_def_id, cdata.cnum); @@ -577,7 +577,7 @@ fn each_child_of_item_or_crate(intr: @IdentInterner, // Hand off the static method // to the callback. let static_method_name = - item_name(intr, impl_method_doc); + item_name(&*intr, impl_method_doc); let static_method_def_like = item_to_def_like(impl_method_doc, impl_method_def_id, @@ -638,7 +638,7 @@ fn each_child_of_item_or_crate(intr: @IdentInterner, } /// Iterates over each child of the given item. -pub fn each_child_of_item(intr: @IdentInterner, +pub fn each_child_of_item(intr: Rc, cdata: Cmd, id: ast::NodeId, get_crate_data: GetCrateDataCb, @@ -659,7 +659,7 @@ pub fn each_child_of_item(intr: @IdentInterner, } /// Iterates over all the top-level crate items. -pub fn each_top_level_item_of_crate(intr: @IdentInterner, +pub fn each_top_level_item_of_crate(intr: Rc, cdata: Cmd, get_crate_data: GetCrateDataCb, callback: |DefLike, @@ -711,7 +711,7 @@ pub fn maybe_get_item_ast(cdata: Cmd, tcx: &ty::ctxt, id: ast::NodeId, } } -pub fn get_enum_variants(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId, +pub fn get_enum_variants(intr: Rc, cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt) -> Vec<@ty::VariantInfo> { let data = cdata.data(); let items = reader::get_doc(reader::Doc(data), tag_items); @@ -723,7 +723,7 @@ pub fn get_enum_variants(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId, let item = find_item(did.node, items); let ctor_ty = item_type(ast::DefId { krate: cdata.cnum, node: id}, item, tcx, cdata); - let name = item_name(intr, item); + let name = item_name(&*intr, item); let arg_tys = match ty::get(ctor_ty).sty { ty::ty_bare_fn(ref f) => f.sig.inputs.clone(), _ => Vec::new(), // Nullary enum variant. @@ -770,12 +770,12 @@ fn get_explicit_self(item: ebml::Doc) -> ast::ExplicitSelf_ { } } -fn item_impl_methods(intr: @IdentInterner, cdata: Cmd, item: ebml::Doc, +fn item_impl_methods(intr: Rc, cdata: Cmd, item: ebml::Doc, tcx: &ty::ctxt) -> Vec<@ty::Method> { let mut rslt = Vec::new(); reader::tagged_docs(item, tag_item_impl_method, |doc| { let m_did = reader::with_doc_data(doc, parse_def_id); - rslt.push(@get_method(intr, cdata, m_did.node, tcx)); + rslt.push(@get_method(intr.clone(), cdata, m_did.node, tcx)); true }); @@ -783,7 +783,7 @@ fn item_impl_methods(intr: @IdentInterner, cdata: Cmd, item: ebml::Doc, } /// Returns information about the given implementation. -pub fn get_impl(intr: @IdentInterner, cdata: Cmd, impl_id: ast::NodeId, +pub fn get_impl(intr: Rc, cdata: Cmd, impl_id: ast::NodeId, tcx: &ty::ctxt) -> ty::Impl { let data = cdata.data(); @@ -793,23 +793,23 @@ pub fn get_impl(intr: @IdentInterner, cdata: Cmd, impl_id: ast::NodeId, krate: cdata.cnum, node: impl_id, }, - ident: item_name(intr, impl_item), + ident: item_name(&*intr, impl_item), methods: item_impl_methods(intr, cdata, impl_item, tcx), } } pub fn get_method_name_and_explicit_self( - intr: @IdentInterner, + intr: Rc, cdata: Cmd, id: ast::NodeId) -> (ast::Ident, ast::ExplicitSelf_) { let method_doc = lookup_item(id, cdata.data()); - let name = item_name(intr, method_doc); + let name = item_name(&*intr, method_doc); let explicit_self = get_explicit_self(method_doc); (name, explicit_self) } -pub fn get_method(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId, +pub fn get_method(intr: Rc, cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt) -> ty::Method { let method_doc = lookup_item(id, cdata.data()); @@ -823,7 +823,7 @@ pub fn get_method(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId, _ => ImplContainer(container_id), }; - let name = item_name(intr, method_doc); + let name = item_name(&*intr, method_doc); let type_param_defs = item_ty_param_defs(method_doc, tcx, cdata, tag_item_method_tps); let rp_defs = item_region_param_defs(method_doc, cdata); @@ -867,7 +867,7 @@ pub fn get_item_variances(cdata: Cmd, id: ast::NodeId) -> ty::ItemVariances { unwrap_(Decodable::decode(&mut decoder)) } -pub fn get_provided_trait_methods(intr: @IdentInterner, cdata: Cmd, +pub fn get_provided_trait_methods(intr: Rc, cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt) -> Vec<@ty::Method> { let data = cdata.data(); @@ -879,7 +879,7 @@ pub fn get_provided_trait_methods(intr: @IdentInterner, cdata: Cmd, let mth = lookup_item(did.node, data); if item_method_sort(mth) == 'p' { - result.push(@get_method(intr, cdata, did.node, tcx)); + result.push(@get_method(intr.clone(), cdata, did.node, tcx)); } true }); @@ -921,7 +921,7 @@ pub fn get_type_name_if_impl(cdata: Cmd, ret } -pub fn get_static_methods_if_impl(intr: @IdentInterner, +pub fn get_static_methods_if_impl(intr: Rc, cdata: Cmd, node_id: ast::NodeId) -> Option > { @@ -957,7 +957,7 @@ pub fn get_static_methods_if_impl(intr: @IdentInterner, } static_impl_methods.push(StaticMethodInfo { - ident: item_name(intr, impl_method_doc), + ident: item_name(&*intr, impl_method_doc), def_id: item_def_id(impl_method_doc, cdata), purity: purity, vis: item_visibility(impl_method_doc), @@ -1009,7 +1009,7 @@ fn struct_field_family_to_visibility(family: Family) -> ast::Visibility { } } -pub fn get_struct_fields(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId) +pub fn get_struct_fields(intr: Rc, cdata: Cmd, id: ast::NodeId) -> Vec { let data = cdata.data(); let item = lookup_item(id, data); @@ -1018,7 +1018,7 @@ pub fn get_struct_fields(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId) let f = item_family(an_item); if f == PublicField || f == PrivateField || f == InheritedField { // FIXME #6993: name should be of type Name, not Ident - let name = item_name(intr, an_item); + let name = item_name(&*intr, an_item); let did = item_def_id(an_item, cdata); result.push(ty::field_ty { name: name.name, diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index c670638385507..6de1bf69f6da9 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -29,6 +29,7 @@ use std::cast; use std::cmp; use std::io; use std::os::consts::{macos, freebsd, linux, android, win32}; +use std::rc::Rc; use std::str; use std::slice; @@ -52,7 +53,7 @@ pub struct Context<'a> { id_hash: &'a str, hash: Option<&'a Svh>, os: Os, - intr: @IdentInterner, + intr: Rc, rejected_via_hash: bool, } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index d67c7ea6505d0..58fc92bf345af 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -44,7 +44,7 @@ pub fn highlight(src: &str, class: Option<&str>) -> ~str { /// it's used. All source code emission is done as slices from the source map, /// not from the tokens themselves, in order to stay true to the original /// source. -fn doit(sess: &parse::ParseSess, lexer: lexer::StringReader, class: Option<&str>, +fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader, class: Option<&str>, out: &mut Writer) -> io::IoResult<()> { use syntax::parse::lexer::Reader; @@ -55,7 +55,7 @@ fn doit(sess: &parse::ParseSess, lexer: lexer::StringReader, class: Option<&str> let mut is_macro_nonterminal = false; loop { let next = lexer.next_token(); - let test = if next.tok == t::EOF {lexer.pos.get()} else {next.sp.lo}; + let test = if next.tok == t::EOF {lexer.pos} else {next.sp.lo}; // The lexer consumes all whitespace and non-doc-comments when iterating // between tokens. If this token isn't directly adjacent to our last diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index a44fbce421b44..7c3eb7742d20e 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -581,14 +581,16 @@ pub enum TokenTree { TTTok(Span, ::parse::token::Token), // a delimited sequence (the delimiters appear as the first // and last elements of the vector) - TTDelim(@Vec ), + // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. + TTDelim(Rc>), // These only make sense for right-hand-sides of MBE macros: // a kleene-style repetition sequence with a span, a TTForest, // an optional separator, and a boolean where true indicates // zero or more (..), and false indicates one or more (+). - TTSeq(Span, @Vec , Option<::parse::token::Token>, bool), + // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. + TTSeq(Span, Rc>, Option<::parse::token::Token>, bool), // a syntactic variable that will be filled in by macro expansion. TTNonterminal(Span, Ident) diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index 1ce08b8303ec9..c9e444a9b8caf 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -13,6 +13,8 @@ use codemap; use ext::base; use print; +use std::rc::Rc; + pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, sp: codemap::Span, tt: &[ast::TokenTree]) @@ -20,7 +22,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, cx.print_backtrace(); println!("{}", print::pprust::tt_to_str(&ast::TTDelim( - @tt.iter().map(|x| (*x).clone()).collect()))); + Rc::new(tt.iter().map(|x| (*x).clone()).collect())))); // any so that `log_syntax` can be invoked as an expression and item. base::MacResult::dummy_any(sp) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index aa4f192f7792c..ac19342f47a1c 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -21,6 +21,7 @@ use parse::parser::{LifetimeAndTypesWithoutColons, Parser}; use parse::token::{Token, EOF, Nonterminal}; use parse::token; +use std::rc::Rc; use collections::HashMap; /* This is an Earley-like parser, without support for in-grammar nonterminals, @@ -102,7 +103,7 @@ pub struct MatcherPos { sep: Option, idx: uint, up: Option<~MatcherPos>, - matches: Vec>, + matches: Vec>>, match_lo: uint, match_hi: uint, sp_lo: BytePos, } @@ -165,14 +166,14 @@ pub fn initial_matcher_pos(ms: Vec , sep: Option, lo: BytePos) // ast::Matcher it was derived from. pub enum NamedMatch { - MatchedSeq(Vec<@NamedMatch> , codemap::Span), + MatchedSeq(Vec>, codemap::Span), MatchedNonterminal(Nonterminal) } -pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[@NamedMatch]) - -> HashMap { - fn n_rec(p_s: &ParseSess, m: &Matcher, res: &[@NamedMatch], - ret_val: &mut HashMap) { +pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[Rc]) + -> HashMap> { + fn n_rec(p_s: &ParseSess, m: &Matcher, res: &[Rc], + ret_val: &mut HashMap>) { match *m { codemap::Spanned {node: MatchTok(_), .. } => (), codemap::Spanned {node: MatchSeq(ref more_ms, _, _, _, _), .. } => { @@ -189,7 +190,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[@NamedMatch]) p_s.span_diagnostic .span_fatal(span, "duplicated bind name: " + string.get()) } - ret_val.insert(bind_name, res[idx]); + ret_val.insert(bind_name, res[idx].clone()); } } } @@ -199,16 +200,16 @@ pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[@NamedMatch]) } pub enum ParseResult { - Success(HashMap), + Success(HashMap>), Failure(codemap::Span, ~str), Error(codemap::Span, ~str) } -pub fn parse_or_else(sess: &ParseSess, - cfg: ast::CrateConfig, - rdr: R, - ms: Vec ) - -> HashMap { +pub fn parse_or_else(sess: &ParseSess, + cfg: ast::CrateConfig, + rdr: TtReader, + ms: Vec ) + -> HashMap> { match parse(sess, cfg, rdr, ms.as_slice()) { Success(m) => m, Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str), @@ -226,11 +227,11 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool { } } -pub fn parse(sess: &ParseSess, - cfg: ast::CrateConfig, - rdr: R, - ms: &[Matcher]) - -> ParseResult { +pub fn parse(sess: &ParseSess, + cfg: ast::CrateConfig, + mut rdr: TtReader, + ms: &[Matcher]) + -> ParseResult { let mut cur_eis = Vec::new(); cur_eis.push(initial_matcher_pos(ms.iter() .map(|x| (*x).clone()) @@ -282,8 +283,8 @@ pub fn parse(sess: &ParseSess, let sub = (*ei.matches.get(idx)).clone(); new_pos.matches .get_mut(idx) - .push(@MatchedSeq(sub, mk_sp(ei.sp_lo, - sp.hi))); + .push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo, + sp.hi)))); } new_pos.idx += 1; @@ -325,7 +326,7 @@ pub fn parse(sess: &ParseSess, for idx in range(match_idx_lo, match_idx_hi) { new_ei.matches .get_mut(idx) - .push(@MatchedSeq(Vec::new(), sp)); + .push(Rc::new(MatchedSeq(Vec::new(), sp))); } cur_eis.push(new_ei); @@ -395,14 +396,14 @@ pub fn parse(sess: &ParseSess, } rdr.next_token(); } else /* bb_eis.len() == 1 */ { - let mut rust_parser = Parser(sess, cfg.clone(), rdr.dup()); + let mut rust_parser = Parser(sess, cfg.clone(), ~rdr.clone()); let mut ei = bb_eis.pop().unwrap(); match ei.elts.get(ei.idx).node { MatchNonterminal(_, name, idx) => { let name_string = token::get_ident(name); - ei.matches.get_mut(idx).push(@MatchedNonterminal( - parse_nt(&mut rust_parser, name_string.get()))); + ei.matches.get_mut(idx).push(Rc::new(MatchedNonterminal( + parse_nt(&mut rust_parser, name_string.get())))); ei.idx += 1u; } _ => fail!() diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index b3e3023388b94..d4a883a63ebbf 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -28,6 +28,7 @@ use print; use util::small_vector::SmallVector; use std::cell::RefCell; +use std::rc::Rc; struct ParserAnyMacro<'a> { parser: RefCell>, @@ -85,8 +86,8 @@ impl<'a> AnyMacro for ParserAnyMacro<'a> { struct MacroRulesMacroExpander { name: Ident, - lhses: @Vec<@NamedMatch> , - rhses: @Vec<@NamedMatch> , + lhses: Vec>, + rhses: Vec>, } impl MacroExpander for MacroRulesMacroExpander { @@ -109,15 +110,15 @@ fn generic_extension(cx: &ExtCtxt, sp: Span, name: Ident, arg: &[ast::TokenTree], - lhses: &[@NamedMatch], - rhses: &[@NamedMatch]) + lhses: &[Rc], + rhses: &[Rc]) -> MacResult { if cx.trace_macros() { println!("{}! \\{ {} \\}", token::get_ident(name), - print::pprust::tt_to_str(&TTDelim(@arg.iter() - .map(|x| (*x).clone()) - .collect()))); + print::pprust::tt_to_str(&TTDelim(Rc::new(arg.iter() + .map(|x| (*x).clone()) + .collect())))); } // Which arm's failure should we report? (the one furthest along) @@ -220,12 +221,12 @@ pub fn add_new_extension(cx: &mut ExtCtxt, // Extract the arguments: let lhses = match **argument_map.get(&lhs_nm) { - MatchedSeq(ref s, _) => /* FIXME (#2543) */ @(*s).clone(), + MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(), _ => cx.span_bug(sp, "wrong-structured lhs") }; let rhses = match **argument_map.get(&rhs_nm) { - MatchedSeq(ref s, _) => /* FIXME (#2543) */ @(*s).clone(), + MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(), _ => cx.span_bug(sp, "wrong-structured rhs") }; diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 933fbe3d56624..bc8709befaee2 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -17,107 +17,79 @@ use parse::token::{EOF, INTERPOLATED, IDENT, Token, NtIdent}; use parse::token; use parse::lexer::TokenAndSpan; -use std::cell::{Cell, RefCell}; +use std::rc::Rc; use collections::HashMap; ///an unzipping of `TokenTree`s +#[deriving(Clone)] struct TtFrame { - forest: @Vec , - idx: Cell, + forest: Rc>, + idx: uint, dotdotdoted: bool, sep: Option, - up: Option<@TtFrame>, } +#[deriving(Clone)] pub struct TtReader<'a> { sp_diag: &'a SpanHandler, // the unzipped tree: - priv stack: RefCell<@TtFrame>, + priv stack: Vec, /* for MBE-style macro transcription */ - priv interpolations: RefCell>, - priv repeat_idx: RefCell >, - priv repeat_len: RefCell >, + priv interpolations: HashMap>, + priv repeat_idx: Vec, + priv repeat_len: Vec, /* cached: */ - cur_tok: RefCell, - cur_span: RefCell, + cur_tok: Token, + cur_span: Span, } /** This can do Macro-By-Example transcription. On the other hand, if * `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and * should) be none. */ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, - interp: Option>, + interp: Option>>, src: Vec ) -> TtReader<'a> { - let r = TtReader { + let mut r = TtReader { sp_diag: sp_diag, - stack: RefCell::new(@TtFrame { - forest: @src, - idx: Cell::new(0u), + stack: vec!(TtFrame { + forest: Rc::new(src), + idx: 0, dotdotdoted: false, sep: None, - up: None }), interpolations: match interp { /* just a convienience */ - None => RefCell::new(HashMap::new()), - Some(x) => RefCell::new(x), + None => HashMap::new(), + Some(x) => x, }, - repeat_idx: RefCell::new(Vec::new()), - repeat_len: RefCell::new(Vec::new()), + repeat_idx: Vec::new(), + repeat_len: Vec::new(), /* dummy values, never read: */ - cur_tok: RefCell::new(EOF), - cur_span: RefCell::new(DUMMY_SP), + cur_tok: EOF, + cur_span: DUMMY_SP, }; - tt_next_token(&r); /* get cur_tok and cur_span set up */ + tt_next_token(&mut r); /* get cur_tok and cur_span set up */ r } -fn dup_tt_frame(f: @TtFrame) -> @TtFrame { - @TtFrame { - forest: @(*f.forest).clone(), - idx: f.idx.clone(), - dotdotdoted: f.dotdotdoted, - sep: f.sep.clone(), - up: match f.up { - Some(up_frame) => Some(dup_tt_frame(up_frame)), - None => None - } - } -} - -pub fn dup_tt_reader<'a>(r: &TtReader<'a>) -> TtReader<'a> { - TtReader { - sp_diag: r.sp_diag, - stack: RefCell::new(dup_tt_frame(r.stack.get())), - repeat_idx: r.repeat_idx.clone(), - repeat_len: r.repeat_len.clone(), - cur_tok: r.cur_tok.clone(), - cur_span: r.cur_span.clone(), - interpolations: r.interpolations.clone(), - } -} - - -fn lookup_cur_matched_by_matched(r: &TtReader, start: @NamedMatch) - -> @NamedMatch { - fn red(ad: @NamedMatch, idx: &uint) -> @NamedMatch { +fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc) -> Rc { + r.repeat_idx.iter().fold(start, |ad, idx| { match *ad { MatchedNonterminal(_) => { // end of the line; duplicate henceforth - ad + ad.clone() } - MatchedSeq(ref ads, _) => *ads.get(*idx) + MatchedSeq(ref ads, _) => ads.get(*idx).clone() } - } - r.repeat_idx.borrow().iter().fold(start, red) + }) } -fn lookup_cur_matched(r: &TtReader, name: Ident) -> @NamedMatch { - let matched_opt = r.interpolations.borrow().find_copy(&name); +fn lookup_cur_matched(r: &TtReader, name: Ident) -> Rc { + let matched_opt = r.interpolations.find_copy(&name); match matched_opt { Some(s) => lookup_cur_matched_by_matched(r, s), None => { - r.sp_diag.span_fatal(r.cur_span.get(), + r.sp_diag.span_fatal(r.cur_span, format!("unknown macro variable `{}`", token::get_ident(name))); } @@ -167,143 +139,140 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { // return the next token from the TtReader. // EFFECT: advances the reader's token field -pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { +pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { // FIXME(pcwalton): Bad copy? let ret_val = TokenAndSpan { - tok: r.cur_tok.get(), - sp: r.cur_span.get(), + tok: r.cur_tok.clone(), + sp: r.cur_span.clone(), }; loop { - if r.stack.borrow().idx.get() < r.stack.borrow().forest.len() { - break; - } - - /* done with this set; pop or repeat? */ - if !r.stack.get().dotdotdoted || { - *r.repeat_idx.borrow().last().unwrap() == - *r.repeat_len.borrow().last().unwrap() - 1 - } { - - match r.stack.get().up { - None => { - r.cur_tok.set(EOF); + let should_pop = match r.stack.last() { + None => { + assert_eq!(ret_val.tok, EOF); return ret_val; - } - Some(tt_f) => { - if r.stack.get().dotdotdoted { - r.repeat_idx.borrow_mut().pop().unwrap(); - r.repeat_len.borrow_mut().pop().unwrap(); + } + Some(frame) => { + if frame.idx < frame.forest.len() { + break; } - - r.stack.set(tt_f); - r.stack.get().idx.set(r.stack.get().idx.get() + 1u); - } + !frame.dotdotdoted || + *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1 } + }; - } else { /* repeat */ - r.stack.get().idx.set(0u); - { - let mut repeat_idx = r.repeat_idx.borrow_mut(); - let last_repeat_idx = repeat_idx.len() - 1u; - *repeat_idx.get_mut(last_repeat_idx) += 1u; + /* done with this set; pop or repeat? */ + if should_pop { + let prev = r.stack.pop().unwrap(); + match r.stack.mut_last() { + None => { + r.cur_tok = EOF; + return ret_val; + } + Some(frame) => { + frame.idx += 1; + } } - match r.stack.get().sep.clone() { - Some(tk) => { - r.cur_tok.set(tk); /* repeat same span, I guess */ - return ret_val; - } - None => () + if prev.dotdotdoted { + r.repeat_idx.pop(); + r.repeat_len.pop(); + } + } else { /* repeat */ + *r.repeat_idx.mut_last().unwrap() += 1u; + r.stack.mut_last().unwrap().idx = 0; + match r.stack.last().unwrap().sep.clone() { + Some(tk) => { + r.cur_tok = tk; /* repeat same span, I guess */ + return ret_val; + } + None => {} } } } loop { /* because it's easiest, this handles `TTDelim` not starting - with a `TTTok`, even though it won't happen */ - // FIXME(pcwalton): Bad copy. - match (*r.stack.get().forest.get(r.stack.get().idx.get())).clone() { - TTDelim(tts) => { - r.stack.set(@TtFrame { - forest: tts, - idx: Cell::new(0u), - dotdotdoted: false, - sep: None, - up: Some(r.stack.get()) - }); - // if this could be 0-length, we'd need to potentially recur here - } - TTTok(sp, tok) => { - r.cur_span.set(sp); - r.cur_tok.set(tok); - r.stack.get().idx.set(r.stack.get().idx.get() + 1u); - return ret_val; - } - TTSeq(sp, tts, sep, zerok) => { + with a `TTTok`, even though it won't happen */ + let t = { + let frame = r.stack.last().unwrap(); // FIXME(pcwalton): Bad copy. - let t = TTSeq(sp, tts, sep.clone(), zerok); - match lockstep_iter_size(&t, r) { - LisUnconstrained => { - r.sp_diag.span_fatal( - sp, /* blame macro writer */ - "attempted to repeat an expression \ - containing no syntax \ - variables matched as repeating at this depth"); - } - LisContradiction(ref msg) => { - /* FIXME #2887 blame macro invoker instead*/ - r.sp_diag.span_fatal(sp, (*msg)); - } - LisConstraint(len, _) => { - if len == 0 { - if !zerok { - r.sp_diag.span_fatal(sp, /* FIXME #2887 blame invoker - */ - "this must repeat at least \ - once"); - } - - r.stack.get().idx.set(r.stack.get().idx.get() + 1u); - return tt_next_token(r); - } else { - r.repeat_len.borrow_mut().push(len); - r.repeat_idx.borrow_mut().push(0u); - r.stack.set(@TtFrame { - forest: tts, - idx: Cell::new(0u), - dotdotdoted: true, - sep: sep, - up: Some(r.stack.get()) - }); - } - } + (*frame.forest.get(frame.idx)).clone() + }; + match t { + TTDelim(tts) => { + r.stack.push(TtFrame { + forest: tts, + idx: 0, + dotdotdoted: false, + sep: None + }); + // if this could be 0-length, we'd need to potentially recur here } - } - // FIXME #2887: think about span stuff here - TTNonterminal(sp, ident) => { - match *lookup_cur_matched(r, ident) { - /* sidestep the interpolation tricks for ident because - (a) idents can be in lots of places, so it'd be a pain - (b) we actually can, since it's a token. */ - MatchedNonterminal(NtIdent(~sn,b)) => { - r.cur_span.set(sp); - r.cur_tok.set(IDENT(sn,b)); - r.stack.get().idx.set(r.stack.get().idx.get() + 1u); + TTTok(sp, tok) => { + r.cur_span = sp; + r.cur_tok = tok; + r.stack.mut_last().unwrap().idx += 1; return ret_val; - } - MatchedNonterminal(ref other_whole_nt) => { + } + TTSeq(sp, tts, sep, zerok) => { // FIXME(pcwalton): Bad copy. - r.cur_span.set(sp); - r.cur_tok.set(INTERPOLATED((*other_whole_nt).clone())); - r.stack.get().idx.set(r.stack.get().idx.get() + 1u); - return ret_val; - } - MatchedSeq(..) => { - r.sp_diag.span_fatal( - r.cur_span.get(), /* blame the macro writer */ - format!("variable '{}' is still repeating at this depth", - token::get_ident(ident))); - } + match lockstep_iter_size(&TTSeq(sp, tts.clone(), sep.clone(), zerok), r) { + LisUnconstrained => { + r.sp_diag.span_fatal( + sp.clone(), /* blame macro writer */ + "attempted to repeat an expression \ + containing no syntax \ + variables matched as repeating at this depth"); + } + LisContradiction(ref msg) => { + // FIXME #2887 blame macro invoker instead + r.sp_diag.span_fatal(sp.clone(), *msg); + } + LisConstraint(len, _) => { + if len == 0 { + if !zerok { + // FIXME #2887 blame invoker + r.sp_diag.span_fatal(sp.clone(), + "this must repeat at least once"); + } + + r.stack.mut_last().unwrap().idx += 1; + return tt_next_token(r); + } + r.repeat_len.push(len); + r.repeat_idx.push(0); + r.stack.push(TtFrame { + forest: tts, + idx: 0, + dotdotdoted: true, + sep: sep.clone() + }); + } + } + } + // FIXME #2887: think about span stuff here + TTNonterminal(sp, ident) => { + r.stack.mut_last().unwrap().idx += 1; + match *lookup_cur_matched(r, ident) { + /* sidestep the interpolation tricks for ident because + (a) idents can be in lots of places, so it'd be a pain + (b) we actually can, since it's a token. */ + MatchedNonterminal(NtIdent(~sn,b)) => { + r.cur_span = sp; + r.cur_tok = IDENT(sn,b); + return ret_val; + } + MatchedNonterminal(ref other_whole_nt) => { + // FIXME(pcwalton): Bad copy. + r.cur_span = sp; + r.cur_tok = INTERPOLATED((*other_whole_nt).clone()); + return ret_val; + } + MatchedSeq(..) => { + r.sp_diag.span_fatal( + r.cur_span, /* blame the macro writer */ + format!("variable '{}' is still repeating at this depth", + token::get_ident(ident))); + } + } } - } } } - } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 291502ff229b6..0f8c74f9ee071 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -16,6 +16,8 @@ use parse::token; use owned_slice::OwnedSlice; use util::small_vector::SmallVector; +use std::rc::Rc; + // We may eventually want to be able to fold over type parameters, too. pub trait Folder { fn fold_crate(&mut self, c: Crate) -> Crate { @@ -375,10 +377,10 @@ pub fn fold_tts(tts: &[TokenTree], fld: &mut T) -> Vec { match *tt { TTTok(span, ref tok) => TTTok(span,maybe_fold_ident(tok,fld)), - TTDelim(tts) => TTDelim(@fold_tts(tts.as_slice(), fld)), - TTSeq(span, pattern, ref sep, is_optional) => + TTDelim(ref tts) => TTDelim(Rc::new(fold_tts(tts.as_slice(), fld))), + TTSeq(span, ref pattern, ref sep, is_optional) => TTSeq(span, - @fold_tts(pattern.as_slice(), fld), + Rc::new(fold_tts(pattern.as_slice(), fld)), sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)), is_optional), TTNonterminal(sp,ref ident) => diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 43ae9b97350df..53586a665133c 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -133,44 +133,42 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str { fail!("not a doc-comment: {}", comment); } -fn read_to_eol(rdr: &StringReader) -> ~str { +fn read_to_eol(rdr: &mut StringReader) -> ~str { let mut val = ~""; while !rdr.curr_is('\n') && !is_eof(rdr) { - val.push_char(rdr.curr.get().unwrap()); + val.push_char(rdr.curr.unwrap()); bump(rdr); } if rdr.curr_is('\n') { bump(rdr); } return val; } -fn read_one_line_comment(rdr: &StringReader) -> ~str { +fn read_one_line_comment(rdr: &mut StringReader) -> ~str { let val = read_to_eol(rdr); assert!((val[0] == '/' as u8 && val[1] == '/' as u8) || (val[0] == '#' as u8 && val[1] == '!' as u8)); return val; } -fn consume_non_eol_whitespace(rdr: &StringReader) { - while is_whitespace(rdr.curr.get()) && !rdr.curr_is('\n') && - !is_eof(rdr) { +fn consume_non_eol_whitespace(rdr: &mut StringReader) { + while is_whitespace(rdr.curr) && !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } } -fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec ) { +fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec) { debug!(">>> blank-line comment"); - let v: Vec<~str> = Vec::new(); comments.push(Comment { style: BlankLine, - lines: v, - pos: rdr.last_pos.get(), + lines: Vec::new(), + pos: rdr.last_pos, }); } -fn consume_whitespace_counting_blank_lines(rdr: &StringReader, - comments: &mut Vec ) { - while is_whitespace(rdr.curr.get()) && !is_eof(rdr) { - if rdr.col.get() == CharPos(0u) && rdr.curr_is('\n') { +fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, + comments: &mut Vec) { + while is_whitespace(rdr.curr) && !is_eof(rdr) { + if rdr.col == CharPos(0u) && rdr.curr_is('\n') { push_blank_line_comment(rdr, &mut *comments); } bump(rdr); @@ -178,10 +176,10 @@ fn consume_whitespace_counting_blank_lines(rdr: &StringReader, } -fn read_shebang_comment(rdr: &StringReader, code_to_the_left: bool, - comments: &mut Vec ) { +fn read_shebang_comment(rdr: &mut StringReader, code_to_the_left: bool, + comments: &mut Vec) { debug!(">>> shebang comment"); - let p = rdr.last_pos.get(); + let p = rdr.last_pos; debug!("<<< shebang comment"); comments.push(Comment { style: if code_to_the_left { Trailing } else { Isolated }, @@ -190,10 +188,10 @@ fn read_shebang_comment(rdr: &StringReader, code_to_the_left: bool, }); } -fn read_line_comments(rdr: &StringReader, code_to_the_left: bool, - comments: &mut Vec ) { +fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, + comments: &mut Vec) { debug!(">>> line comments"); - let p = rdr.last_pos.get(); + let p = rdr.last_pos; let mut lines: Vec<~str> = Vec::new(); while rdr.curr_is('/') && nextch_is(rdr, '/') { let line = read_one_line_comment(rdr); @@ -247,13 +245,13 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<~str> , lines.push(s1); } -fn read_block_comment(rdr: &StringReader, +fn read_block_comment(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec ) { debug!(">>> block comment"); - let p = rdr.last_pos.get(); + let p = rdr.last_pos; let mut lines: Vec<~str> = Vec::new(); - let col: CharPos = rdr.col.get(); + let col = rdr.col; bump(rdr); bump(rdr); @@ -262,7 +260,7 @@ fn read_block_comment(rdr: &StringReader, // doc-comments are not really comments, they are attributes if (rdr.curr_is('*') && !nextch_is(rdr, '*')) || rdr.curr_is('!') { while !(rdr.curr_is('*') && nextch_is(rdr, '/')) && !is_eof(rdr) { - curr_line.push_char(rdr.curr.get().unwrap()); + curr_line.push_char(rdr.curr.unwrap()); bump(rdr); } if !is_eof(rdr) { @@ -286,7 +284,7 @@ fn read_block_comment(rdr: &StringReader, curr_line = ~""; bump(rdr); } else { - curr_line.push_char(rdr.curr.get().unwrap()); + curr_line.push_char(rdr.curr.unwrap()); if rdr.curr_is('/') && nextch_is(rdr, '*') { bump(rdr); bump(rdr); @@ -324,7 +322,7 @@ fn peeking_at_comment(rdr: &StringReader) -> bool { !lexer::nextnextch_is(rdr, '[')); } -fn consume_comment(rdr: &StringReader, +fn consume_comment(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec ) { debug!(">>> consume comment"); @@ -355,7 +353,7 @@ pub fn gather_comments_and_literals(span_diagnostic: let src = str::from_utf8_owned(src).unwrap(); let cm = CodeMap::new(); let filemap = cm.new_filemap(path, src); - let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); + let mut rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); let mut comments: Vec = Vec::new(); let mut literals: Vec = Vec::new(); @@ -363,20 +361,20 @@ pub fn gather_comments_and_literals(span_diagnostic: while !is_eof(&rdr) { loop { let mut code_to_the_left = !first_read; - consume_non_eol_whitespace(&rdr); + consume_non_eol_whitespace(&mut rdr); if rdr.curr_is('\n') { code_to_the_left = false; - consume_whitespace_counting_blank_lines(&rdr, &mut comments); + consume_whitespace_counting_blank_lines(&mut rdr, &mut comments); } while peeking_at_comment(&rdr) { - consume_comment(&rdr, code_to_the_left, &mut comments); - consume_whitespace_counting_blank_lines(&rdr, &mut comments); + consume_comment(&mut rdr, code_to_the_left, &mut comments); + consume_whitespace_counting_blank_lines(&mut rdr, &mut comments); } break; } - let bstart = rdr.last_pos.get(); + let bstart = rdr.last_pos; rdr.next_token(); //discard, and look ahead; we're working with internal state let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index ccee21f62816a..c18571deaf5be 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -12,11 +12,10 @@ use ast; use codemap::{BytePos, CharPos, CodeMap, Pos, Span}; use codemap; use diagnostic::SpanHandler; -use ext::tt::transcribe::{dup_tt_reader, tt_next_token}; +use ext::tt::transcribe::tt_next_token; use parse::token; use parse::token::{str_to_ident}; -use std::cell::{Cell, RefCell}; use std::char; use std::mem::replace; use std::num::from_str_radix; @@ -27,11 +26,10 @@ pub use ext::tt::transcribe::{TtReader, new_tt_reader}; pub trait Reader { fn is_eof(&self) -> bool; - fn next_token(&self) -> TokenAndSpan; + fn next_token(&mut self) -> TokenAndSpan; fn fatal(&self, ~str) -> !; fn span_diag<'a>(&'a self) -> &'a SpanHandler; fn peek(&self) -> TokenAndSpan; - fn dup(&self) -> ~Reader:; } #[deriving(Clone, Eq, Show)] @@ -43,30 +41,30 @@ pub struct TokenAndSpan { pub struct StringReader<'a> { span_diagnostic: &'a SpanHandler, // The absolute offset within the codemap of the next character to read - pos: Cell, + pos: BytePos, // The absolute offset within the codemap of the last character read(curr) - last_pos: Cell, + last_pos: BytePos, // The column of the next character to read - col: Cell, + col: CharPos, // The last character to be read - curr: Cell>, + curr: Option, filemap: Rc, /* cached: */ - peek_tok: RefCell, - peek_span: RefCell, + peek_tok: token::Token, + peek_span: Span, } impl<'a> StringReader<'a> { pub fn curr_is(&self, c: char) -> bool { - self.curr.get() == Some(c) + self.curr == Some(c) } } pub fn new_string_reader<'a>(span_diagnostic: &'a SpanHandler, filemap: Rc) -> StringReader<'a> { - let r = new_low_level_string_reader(span_diagnostic, filemap); - string_advance_token(&r); /* fill in peek_* */ + let mut r = new_low_level_string_reader(span_diagnostic, filemap); + string_advance_token(&mut r); /* fill in peek_* */ r } @@ -76,97 +74,79 @@ pub fn new_low_level_string_reader<'a>(span_diagnostic: &'a SpanHandler, -> StringReader<'a> { // Force the initial reader bump to start on a fresh line let initial_char = '\n'; - let r = StringReader { + let mut r = StringReader { span_diagnostic: span_diagnostic, - pos: Cell::new(filemap.start_pos), - last_pos: Cell::new(filemap.start_pos), - col: Cell::new(CharPos(0)), - curr: Cell::new(Some(initial_char)), + pos: filemap.start_pos, + last_pos: filemap.start_pos, + col: CharPos(0), + curr: Some(initial_char), filemap: filemap, /* dummy values; not read */ - peek_tok: RefCell::new(token::EOF), - peek_span: RefCell::new(codemap::DUMMY_SP), + peek_tok: token::EOF, + peek_span: codemap::DUMMY_SP, }; - bump(&r); + bump(&mut r); r } -// duplicating the string reader is probably a bad idea, in -// that using them will cause interleaved pushes of line -// offsets to the underlying filemap... -fn dup_string_reader<'a>(r: &StringReader<'a>) -> StringReader<'a> { - StringReader { - span_diagnostic: r.span_diagnostic, - pos: Cell::new(r.pos.get()), - last_pos: Cell::new(r.last_pos.get()), - col: Cell::new(r.col.get()), - curr: Cell::new(r.curr.get()), - filemap: r.filemap.clone(), - peek_tok: r.peek_tok.clone(), - peek_span: r.peek_span.clone(), - } -} - impl<'a> Reader for StringReader<'a> { fn is_eof(&self) -> bool { is_eof(self) } // return the next token. EFFECT: advances the string_reader. - fn next_token(&self) -> TokenAndSpan { + fn next_token(&mut self) -> TokenAndSpan { let ret_val = TokenAndSpan { - tok: replace(&mut *self.peek_tok.borrow_mut(), token::UNDERSCORE), - sp: self.peek_span.get(), + tok: replace(&mut self.peek_tok, token::UNDERSCORE), + sp: self.peek_span, }; string_advance_token(self); ret_val } fn fatal(&self, m: ~str) -> ! { - self.span_diagnostic.span_fatal(self.peek_span.get(), m) + self.span_diagnostic.span_fatal(self.peek_span, m) } fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic } fn peek(&self) -> TokenAndSpan { // FIXME(pcwalton): Bad copy! TokenAndSpan { - tok: self.peek_tok.get(), - sp: self.peek_span.get(), + tok: self.peek_tok.clone(), + sp: self.peek_span.clone(), } } - fn dup(&self) -> ~Reader: { ~dup_string_reader(self) as ~Reader: } } impl<'a> Reader for TtReader<'a> { fn is_eof(&self) -> bool { - *self.cur_tok.borrow() == token::EOF + self.cur_tok == token::EOF } - fn next_token(&self) -> TokenAndSpan { + fn next_token(&mut self) -> TokenAndSpan { let r = tt_next_token(self); debug!("TtReader: r={:?}", r); - return r; + r } fn fatal(&self, m: ~str) -> ! { - self.sp_diag.span_fatal(self.cur_span.get(), m); + self.sp_diag.span_fatal(self.cur_span, m); } fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag } fn peek(&self) -> TokenAndSpan { TokenAndSpan { - tok: self.cur_tok.get(), - sp: self.cur_span.get(), + tok: self.cur_tok.clone(), + sp: self.cur_span.clone(), } } - fn dup(&self) -> ~Reader: { ~dup_tt_reader(self) as ~Reader: } } // report a lexical error spanning [`from_pos`, `to_pos`) -fn fatal_span(rdr: &StringReader, +fn fatal_span(rdr: &mut StringReader, from_pos: BytePos, to_pos: BytePos, m: ~str) -> ! { - rdr.peek_span.set(codemap::mk_sp(from_pos, to_pos)); + rdr.peek_span = codemap::mk_sp(from_pos, to_pos); rdr.fatal(m); } // report a lexical error spanning [`from_pos`, `to_pos`), appending an // escaped character to the error message -fn fatal_span_char(rdr: &StringReader, +fn fatal_span_char(rdr: &mut StringReader, from_pos: BytePos, to_pos: BytePos, m: ~str, @@ -180,36 +160,35 @@ fn fatal_span_char(rdr: &StringReader, // report a lexical error spanning [`from_pos`, `to_pos`), appending the // offending string to the error message -fn fatal_span_verbose(rdr: &StringReader, +fn fatal_span_verbose(rdr: &mut StringReader, from_pos: BytePos, to_pos: BytePos, m: ~str) -> ! { let mut m = m; m.push_str(": "); - let s = rdr.filemap.src.slice( - byte_offset(rdr, from_pos).to_uint(), - byte_offset(rdr, to_pos).to_uint()); - m.push_str(s); + let from = byte_offset(rdr, from_pos).to_uint(); + let to = byte_offset(rdr, to_pos).to_uint(); + m.push_str(rdr.filemap.src.slice(from, to)); fatal_span(rdr, from_pos, to_pos, m); } // EFFECT: advance peek_tok and peek_span to refer to the next token. // EFFECT: update the interner, maybe. -fn string_advance_token(r: &StringReader) { +fn string_advance_token(r: &mut StringReader) { match consume_whitespace_and_comments(r) { Some(comment) => { - r.peek_span.set(comment.sp); - r.peek_tok.set(comment.tok); + r.peek_span = comment.sp; + r.peek_tok = comment.tok; }, None => { if is_eof(r) { - r.peek_tok.set(token::EOF); + r.peek_tok = token::EOF; } else { - let start_bytepos = r.last_pos.get(); - r.peek_tok.set(next_token_inner(r)); - r.peek_span.set(codemap::mk_sp(start_bytepos, - r.last_pos.get())); + let start_bytepos = r.last_pos; + r.peek_tok = next_token_inner(r); + r.peek_span = codemap::mk_sp(start_bytepos, + r.last_pos); }; } } @@ -227,7 +206,7 @@ pub fn with_str_from( start: BytePos, f: |s: &str| -> T) -> T { - with_str_from_to(rdr, start, rdr.last_pos.get(), f) + with_str_from_to(rdr, start, rdr.last_pos, f) } /// Calls `f` with astring slice of the source text spanning from `start` @@ -245,36 +224,36 @@ fn with_str_from_to( // EFFECT: advance the StringReader by one character. If a newline is // discovered, add it to the FileMap's list of line start offsets. -pub fn bump(rdr: &StringReader) { - rdr.last_pos.set(rdr.pos.get()); - let current_byte_offset = byte_offset(rdr, rdr.pos.get()).to_uint(); +pub fn bump(rdr: &mut StringReader) { + rdr.last_pos = rdr.pos; + let current_byte_offset = byte_offset(rdr, rdr.pos).to_uint(); if current_byte_offset < rdr.filemap.src.len() { - assert!(rdr.curr.get().is_some()); - let last_char = rdr.curr.get().unwrap(); + assert!(rdr.curr.is_some()); + let last_char = rdr.curr.unwrap(); let next = rdr.filemap.src.char_range_at(current_byte_offset); let byte_offset_diff = next.next - current_byte_offset; - rdr.pos.set(rdr.pos.get() + Pos::from_uint(byte_offset_diff)); - rdr.curr.set(Some(next.ch)); - rdr.col.set(rdr.col.get() + CharPos(1u)); + rdr.pos = rdr.pos + Pos::from_uint(byte_offset_diff); + rdr.curr = Some(next.ch); + rdr.col = rdr.col + CharPos(1u); if last_char == '\n' { - rdr.filemap.next_line(rdr.last_pos.get()); - rdr.col.set(CharPos(0u)); + rdr.filemap.next_line(rdr.last_pos); + rdr.col = CharPos(0u); } if byte_offset_diff > 1 { - rdr.filemap.record_multibyte_char(rdr.last_pos.get(), byte_offset_diff); + rdr.filemap.record_multibyte_char(rdr.last_pos, byte_offset_diff); } } else { - rdr.curr.set(None); + rdr.curr = None; } } pub fn is_eof(rdr: &StringReader) -> bool { - rdr.curr.get().is_none() + rdr.curr.is_none() } pub fn nextch(rdr: &StringReader) -> Option { - let offset = byte_offset(rdr, rdr.pos.get()).to_uint(); + let offset = byte_offset(rdr, rdr.pos).to_uint(); if offset < rdr.filemap.src.len() { Some(rdr.filemap.src.char_at(offset)) } else { @@ -286,7 +265,7 @@ pub fn nextch_is(rdr: &StringReader, c: char) -> bool { } pub fn nextnextch(rdr: &StringReader) -> Option { - let offset = byte_offset(rdr, rdr.pos.get()).to_uint(); + let offset = byte_offset(rdr, rdr.pos).to_uint(); let s = rdr.filemap.deref().src.as_slice(); if offset >= s.len() { return None } let str::CharRange { next, .. } = s.char_range_at(offset); @@ -332,9 +311,9 @@ fn is_hex_digit(c: Option) -> bool { // EFFECT: eats whitespace and comments. // returns a Some(sugared-doc-attr) if one exists, None otherwise. -fn consume_whitespace_and_comments(rdr: &StringReader) +fn consume_whitespace_and_comments(rdr: &mut StringReader) -> Option { - while is_whitespace(rdr.curr.get()) { bump(rdr); } + while is_whitespace(rdr.curr) { bump(rdr); } return consume_any_line_comment(rdr); } @@ -345,7 +324,7 @@ pub fn is_line_non_doc_comment(s: &str) -> bool { // PRECONDITION: rdr.curr is not whitespace // EFFECT: eats any kind of comment. // returns a Some(sugared-doc-attr) if one exists, None otherwise -fn consume_any_line_comment(rdr: &StringReader) +fn consume_any_line_comment(rdr: &mut StringReader) -> Option { if rdr.curr_is('/') { match nextch(rdr) { @@ -354,7 +333,7 @@ fn consume_any_line_comment(rdr: &StringReader) bump(rdr); // line comments starting with "///" or "//!" are doc-comments if rdr.curr_is('/') || rdr.curr_is('!') { - let start_bpos = rdr.pos.get() - BytePos(3); + let start_bpos = rdr.pos - BytePos(3); while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } @@ -363,7 +342,7 @@ fn consume_any_line_comment(rdr: &StringReader) if !is_line_non_doc_comment(string) { Some(TokenAndSpan{ tok: token::DOC_COMMENT(str_to_ident(string)), - sp: codemap::mk_sp(start_bpos, rdr.pos.get()) + sp: codemap::mk_sp(start_bpos, rdr.pos) }) } else { None @@ -394,7 +373,7 @@ fn consume_any_line_comment(rdr: &StringReader) // we're at the beginning of the file... let cmap = CodeMap::new(); cmap.files.borrow_mut().push(rdr.filemap.clone()); - let loc = cmap.lookup_char_pos_adj(rdr.last_pos.get()); + let loc = cmap.lookup_char_pos_adj(rdr.last_pos); if loc.line == 1u && loc.col == CharPos(0u) { while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } return consume_whitespace_and_comments(rdr); @@ -411,10 +390,10 @@ pub fn is_block_non_doc_comment(s: &str) -> bool { } // might return a sugared-doc-attr -fn consume_block_comment(rdr: &StringReader) -> Option { +fn consume_block_comment(rdr: &mut StringReader) -> Option { // block comments starting with "/**" or "/*!" are doc-comments let is_doc_comment = rdr.curr_is('*') || rdr.curr_is('!'); - let start_bpos = rdr.pos.get() - BytePos(if is_doc_comment {3} else {2}); + let start_bpos = rdr.pos - BytePos(if is_doc_comment {3} else {2}); let mut level: int = 1; while level > 0 { @@ -424,7 +403,7 @@ fn consume_block_comment(rdr: &StringReader) -> Option { } else { ~"unterminated block comment" }; - fatal_span(rdr, start_bpos, rdr.last_pos.get(), msg); + fatal_span(rdr, start_bpos, rdr.last_pos, msg); } else if rdr.curr_is('/') && nextch_is(rdr, '*') { level += 1; bump(rdr); @@ -444,7 +423,7 @@ fn consume_block_comment(rdr: &StringReader) -> Option { if !is_block_non_doc_comment(string) { Some(TokenAndSpan{ tok: token::DOC_COMMENT(str_to_ident(string)), - sp: codemap::mk_sp(start_bpos, rdr.pos.get()) + sp: codemap::mk_sp(start_bpos, rdr.pos) }) } else { None @@ -458,14 +437,14 @@ fn consume_block_comment(rdr: &StringReader) -> Option { if res.is_some() { res } else { consume_whitespace_and_comments(rdr) } } -fn scan_exponent(rdr: &StringReader, start_bpos: BytePos) -> Option<~str> { +fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<~str> { // \x00 hits the `return None` case immediately, so this is fine. - let mut c = rdr.curr.get().unwrap_or('\x00'); + let mut c = rdr.curr.unwrap_or('\x00'); let mut rslt = ~""; if c == 'e' || c == 'E' { rslt.push_char(c); bump(rdr); - c = rdr.curr.get().unwrap_or('\x00'); + c = rdr.curr.unwrap_or('\x00'); if c == '-' || c == '+' { rslt.push_char(c); bump(rdr); @@ -474,16 +453,16 @@ fn scan_exponent(rdr: &StringReader, start_bpos: BytePos) -> Option<~str> { if exponent.len() > 0u { return Some(rslt + exponent); } else { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"scan_exponent: bad fp literal"); } } else { return None::<~str>; } } -fn scan_digits(rdr: &StringReader, radix: uint) -> ~str { +fn scan_digits(rdr: &mut StringReader, radix: uint) -> ~str { let mut rslt = ~""; loop { - let c = rdr.curr.get(); + let c = rdr.curr; if c == Some('_') { bump(rdr); continue; } match c.and_then(|cc| char::to_digit(cc, radix)) { Some(_) => { @@ -495,7 +474,7 @@ fn scan_digits(rdr: &StringReader, radix: uint) -> ~str { }; } -fn check_float_base(rdr: &StringReader, start_bpos: BytePos, last_bpos: BytePos, +fn check_float_base(rdr: &mut StringReader, start_bpos: BytePos, last_bpos: BytePos, base: uint) { match base { 16u => fatal_span(rdr, start_bpos, last_bpos, @@ -508,12 +487,12 @@ fn check_float_base(rdr: &StringReader, start_bpos: BytePos, last_bpos: BytePos, } } -fn scan_number(c: char, rdr: &StringReader) -> token::Token { +fn scan_number(c: char, rdr: &mut StringReader) -> token::Token { let mut num_str; let mut base = 10u; let mut c = c; let mut n = nextch(rdr).unwrap_or('\x00'); - let start_bpos = rdr.last_pos.get(); + let start_bpos = rdr.last_pos; if c == '0' && n == 'x' { bump(rdr); bump(rdr); @@ -528,7 +507,7 @@ fn scan_number(c: char, rdr: &StringReader) -> token::Token { base = 2u; } num_str = scan_digits(rdr, base); - c = rdr.curr.get().unwrap_or('\x00'); + c = rdr.curr.unwrap_or('\x00'); nextch(rdr); if c == 'u' || c == 'i' { enum Result { Signed(ast::IntTy), Unsigned(ast::UintTy) } @@ -538,7 +517,7 @@ fn scan_number(c: char, rdr: &StringReader) -> token::Token { else { Unsigned(ast::TyU) } }; bump(rdr); - c = rdr.curr.get().unwrap_or('\x00'); + c = rdr.curr.unwrap_or('\x00'); if c == '8' { bump(rdr); tp = if signed { Signed(ast::TyI8) } @@ -562,12 +541,12 @@ fn scan_number(c: char, rdr: &StringReader) -> token::Token { else { Unsigned(ast::TyU64) }; } if num_str.len() == 0u { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"no valid digits found for number"); } let parsed = match from_str_radix::(num_str, base as uint) { Some(p) => p, - None => fatal_span(rdr, start_bpos, rdr.last_pos.get(), + None => fatal_span(rdr, start_bpos, rdr.last_pos, ~"int literal is too large") }; @@ -594,37 +573,37 @@ fn scan_number(c: char, rdr: &StringReader) -> token::Token { if rdr.curr_is('f') { bump(rdr); - c = rdr.curr.get().unwrap_or('\x00'); + c = rdr.curr.unwrap_or('\x00'); n = nextch(rdr).unwrap_or('\x00'); if c == '3' && n == '2' { bump(rdr); bump(rdr); - check_float_base(rdr, start_bpos, rdr.last_pos.get(), base); + check_float_base(rdr, start_bpos, rdr.last_pos, base); return token::LIT_FLOAT(str_to_ident(num_str), ast::TyF32); } else if c == '6' && n == '4' { bump(rdr); bump(rdr); - check_float_base(rdr, start_bpos, rdr.last_pos.get(), base); + check_float_base(rdr, start_bpos, rdr.last_pos, base); return token::LIT_FLOAT(str_to_ident(num_str), ast::TyF64); /* FIXME (#2252): if this is out of range for either a 32-bit or 64-bit float, it won't be noticed till the back-end. */ } else { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"expected `f32` or `f64` suffix"); } } if is_float { - check_float_base(rdr, start_bpos, rdr.last_pos.get(), base); + check_float_base(rdr, start_bpos, rdr.last_pos, base); return token::LIT_FLOAT_UNSUFFIXED(str_to_ident(num_str)); } else { if num_str.len() == 0u { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"no valid digits found for number"); } let parsed = match from_str_radix::(num_str, base as uint) { Some(p) => p, - None => fatal_span(rdr, start_bpos, rdr.last_pos.get(), + None => fatal_span(rdr, start_bpos, rdr.last_pos, ~"int literal is too large") }; @@ -633,14 +612,14 @@ fn scan_number(c: char, rdr: &StringReader) -> token::Token { } } -fn scan_numeric_escape(rdr: &StringReader, n_hex_digits: uint) -> char { +fn scan_numeric_escape(rdr: &mut StringReader, n_hex_digits: uint) -> char { let mut accum_int = 0; let mut i = n_hex_digits; - let start_bpos = rdr.last_pos.get(); + let start_bpos = rdr.last_pos; while i != 0u && !is_eof(rdr) { - let n = rdr.curr.get(); + let n = rdr.curr; if !is_hex_digit(n) { - fatal_span_char(rdr, rdr.last_pos.get(), rdr.pos.get(), + fatal_span_char(rdr, rdr.last_pos, rdr.pos, ~"illegal character in numeric character escape", n.unwrap()); } @@ -650,13 +629,13 @@ fn scan_numeric_escape(rdr: &StringReader, n_hex_digits: uint) -> char { i -= 1u; } if i != 0 && is_eof(rdr) { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"unterminated numeric character escape"); } match char::from_u32(accum_int as u32) { Some(x) => x, - None => fatal_span(rdr, start_bpos, rdr.last_pos.get(), + None => fatal_span(rdr, start_bpos, rdr.last_pos, ~"illegal numeric character escape") } } @@ -683,14 +662,14 @@ fn ident_continue(c: Option) -> bool { // return the next token from the string // EFFECT: advances the input past that token // EFFECT: updates the interner -fn next_token_inner(rdr: &StringReader) -> token::Token { - let c = rdr.curr.get(); +fn next_token_inner(rdr: &mut StringReader) -> token::Token { + let c = rdr.curr; if ident_start(c) && !nextch_is(rdr, '"') && !nextch_is(rdr, '#') { // Note: r as in r" or r#" is part of a raw string literal, // not an identifier, and is handled further down. - let start = rdr.last_pos.get(); - while ident_continue(rdr.curr.get()) { + let start = rdr.last_pos; + while ident_continue(rdr.curr) { bump(rdr); } @@ -708,7 +687,7 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { if is_dec_digit(c) { return scan_number(c.unwrap(), rdr); } - fn binop(rdr: &StringReader, op: token::BinOp) -> token::Token { + fn binop(rdr: &mut StringReader, op: token::BinOp) -> token::Token { bump(rdr); if rdr.curr_is('=') { bump(rdr); @@ -783,12 +762,12 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { } '<' => { bump(rdr); - match rdr.curr.get().unwrap_or('\x00') { + match rdr.curr.unwrap_or('\x00') { '=' => { bump(rdr); return token::LE; } '<' => { return binop(rdr, token::SHL); } '-' => { bump(rdr); - match rdr.curr.get().unwrap_or('\x00') { + match rdr.curr.unwrap_or('\x00') { '>' => { bump(rdr); return token::DARROW; } _ => { return token::LARROW; } } @@ -798,7 +777,7 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { } '>' => { bump(rdr); - match rdr.curr.get().unwrap_or('\x00') { + match rdr.curr.unwrap_or('\x00') { '=' => { bump(rdr); return token::GE; } '>' => { return binop(rdr, token::SHR); } _ => { return token::GT; } @@ -807,41 +786,41 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { '\'' => { // Either a character constant 'a' OR a lifetime name 'abc bump(rdr); - let start = rdr.last_pos.get(); + let start = rdr.last_pos; // the eof will be picked up by the final `'` check below - let mut c2 = rdr.curr.get().unwrap_or('\x00'); + let mut c2 = rdr.curr.unwrap_or('\x00'); bump(rdr); // If the character is an ident start not followed by another single // quote, then this is a lifetime name: if ident_start(Some(c2)) && !rdr.curr_is('\'') { - while ident_continue(rdr.curr.get()) { + while ident_continue(rdr.curr) { bump(rdr); } - return with_str_from(rdr, start, |lifetime_name| { - let ident = str_to_ident(lifetime_name); - let tok = &token::IDENT(ident, false); - - if token::is_keyword(token::keywords::Self, tok) { - fatal_span(rdr, start, rdr.last_pos.get(), - ~"invalid lifetime name: 'self is no longer a special lifetime"); - } else if token::is_any_keyword(tok) && - !token::is_keyword(token::keywords::Static, tok) { - fatal_span(rdr, start, rdr.last_pos.get(), - ~"invalid lifetime name"); - } else { - token::LIFETIME(ident) - } - }) + let ident = with_str_from(rdr, start, |lifetime_name| { + str_to_ident(lifetime_name) + }); + let tok = &token::IDENT(ident, false); + + if token::is_keyword(token::keywords::Self, tok) { + fatal_span(rdr, start, rdr.last_pos, + ~"invalid lifetime name: 'self is no longer a special lifetime"); + } else if token::is_any_keyword(tok) && + !token::is_keyword(token::keywords::Static, tok) { + fatal_span(rdr, start, rdr.last_pos, + ~"invalid lifetime name"); + } else { + return token::LIFETIME(ident); + } } // Otherwise it is a character constant: match c2 { '\\' => { // '\X' for some X must be a character constant: - let escaped = rdr.curr.get(); - let escaped_pos = rdr.last_pos.get(); + let escaped = rdr.curr; + let escaped_pos = rdr.last_pos; bump(rdr); match escaped { None => {} @@ -858,7 +837,7 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { 'u' => scan_numeric_escape(rdr, 4u), 'U' => scan_numeric_escape(rdr, 8u), c2 => { - fatal_span_char(rdr, escaped_pos, rdr.last_pos.get(), + fatal_span_char(rdr, escaped_pos, rdr.last_pos, ~"unknown character escape", c2) } } @@ -866,7 +845,7 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { } } '\t' | '\n' | '\r' | '\'' => { - fatal_span_char(rdr, start, rdr.last_pos.get(), + fatal_span_char(rdr, start, rdr.last_pos, ~"character constant must be escaped", c2); } _ => {} @@ -877,7 +856,7 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { // character before position `start` is an // ascii single quote. start - BytePos(1), - rdr.last_pos.get(), + rdr.last_pos, ~"unterminated character constant"); } bump(rdr); // advance curr past token @@ -885,25 +864,25 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { } '"' => { let mut accum_str = ~""; - let start_bpos = rdr.last_pos.get(); + let start_bpos = rdr.last_pos; bump(rdr); while !rdr.curr_is('"') { if is_eof(rdr) { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"unterminated double quote string"); } - let ch = rdr.curr.get().unwrap(); + let ch = rdr.curr.unwrap(); bump(rdr); match ch { '\\' => { if is_eof(rdr) { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"unterminated double quote string"); } - let escaped = rdr.curr.get().unwrap(); - let escaped_pos = rdr.last_pos.get(); + let escaped = rdr.curr.unwrap(); + let escaped_pos = rdr.last_pos; bump(rdr); match escaped { 'n' => accum_str.push_char('\n'), @@ -924,7 +903,7 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { accum_str.push_char(scan_numeric_escape(rdr, 8u)); } c2 => { - fatal_span_char(rdr, escaped_pos, rdr.last_pos.get(), + fatal_span_char(rdr, escaped_pos, rdr.last_pos, ~"unknown string escape", c2); } } @@ -936,7 +915,7 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { return token::LIT_STR(str_to_ident(accum_str)); } 'r' => { - let start_bpos = rdr.last_pos.get(); + let start_bpos = rdr.last_pos; bump(rdr); let mut hash_count = 0u; while rdr.curr_is('#') { @@ -945,24 +924,24 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { } if is_eof(rdr) { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"unterminated raw string"); } else if !rdr.curr_is('"') { - fatal_span_char(rdr, start_bpos, rdr.last_pos.get(), + fatal_span_char(rdr, start_bpos, rdr.last_pos, ~"only `#` is allowed in raw string delimitation; \ found illegal character", - rdr.curr.get().unwrap()); + rdr.curr.unwrap()); } bump(rdr); - let content_start_bpos = rdr.last_pos.get(); + let content_start_bpos = rdr.last_pos; let mut content_end_bpos; 'outer: loop { if is_eof(rdr) { - fatal_span(rdr, start_bpos, rdr.last_pos.get(), + fatal_span(rdr, start_bpos, rdr.last_pos, ~"unterminated raw string"); } if rdr.curr_is('"') { - content_end_bpos = rdr.last_pos.get(); + content_end_bpos = rdr.last_pos; for _ in range(0, hash_count) { bump(rdr); if !rdr.curr_is('#') { @@ -1006,14 +985,14 @@ fn next_token_inner(rdr: &StringReader) -> token::Token { '^' => { return binop(rdr, token::CARET); } '%' => { return binop(rdr, token::PERCENT); } c => { - fatal_span_char(rdr, rdr.last_pos.get(), rdr.pos.get(), + fatal_span_char(rdr, rdr.last_pos, rdr.pos, ~"unknown start of token", c); } } } -fn consume_whitespace(rdr: &StringReader) { - while is_whitespace(rdr.curr.get()) && !is_eof(rdr) { bump(rdr); } +fn consume_whitespace(rdr: &mut StringReader) { + while is_whitespace(rdr.curr) && !is_eof(rdr) { bump(rdr); } } #[cfg(test)] @@ -1041,7 +1020,7 @@ mod test { #[test] fn t1 () { let span_handler = mk_sh(); - let string_reader = setup(&span_handler, + let mut string_reader = setup(&span_handler, ~"/* my source file */ \ fn main() { println!(\"zebra\"); }\n"); let id = str_to_ident("fn"); @@ -1051,7 +1030,7 @@ mod test { sp:Span {lo:BytePos(21),hi:BytePos(23),expn_info: None}}; assert_eq!(tok1,tok2); // the 'main' id is already read: - assert_eq!(string_reader.last_pos.get().clone(), BytePos(28)); + assert_eq!(string_reader.last_pos.clone(), BytePos(28)); // read another token: let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan{ @@ -1059,12 +1038,12 @@ mod test { sp:Span {lo:BytePos(24),hi:BytePos(28),expn_info: None}}; assert_eq!(tok3,tok4); // the lparen is already read: - assert_eq!(string_reader.last_pos.get().clone(), BytePos(29)) + assert_eq!(string_reader.last_pos.clone(), BytePos(29)) } // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) - fn check_tokenization (string_reader: StringReader, expected: Vec ) { + fn check_tokenization (mut string_reader: StringReader, expected: Vec ) { for expected_tok in expected.iter() { assert_eq!(&string_reader.next_token().tok, expected_tok); } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 2df93deea14b6..f2a7f543bd642 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -366,13 +366,13 @@ mod test { [ast::TTTok(_,_), ast::TTTok(_,token::NOT), ast::TTTok(_,_), - ast::TTDelim(delim_elts)] => { + ast::TTDelim(ref delim_elts)] => { let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); match delim_elts { [ast::TTTok(_,token::LPAREN), - ast::TTDelim(first_set), + ast::TTDelim(ref first_set), ast::TTTok(_,token::FAT_ARROW), - ast::TTDelim(second_set), + ast::TTDelim(ref second_set), ast::TTTok(_,token::RPAREN)] => { let first_set: &[ast::TokenTree] = first_set.as_slice(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 0ae43db831584..0d33c26343852 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -80,6 +80,7 @@ use owned_slice::OwnedSlice; use collections::HashSet; use std::kinds::marker; use std::mem::replace; +use std::rc::Rc; use std::vec; #[allow(non_camel_case_types)] @@ -274,7 +275,7 @@ struct ParsedItemsAndViewItems { /* ident is handled by common.rs */ -pub fn Parser<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, rdr: ~Reader:) +pub fn Parser<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, mut rdr: ~Reader:) -> Parser<'a> { let tok0 = rdr.next_token(); let span = tok0.sp; @@ -328,7 +329,7 @@ pub struct Parser<'a> { restriction: restriction, quote_depth: uint, // not (yet) related to the quasiquoter reader: ~Reader:, - interner: @token::IdentInterner, + interner: Rc, /// The set of seen errors about obsolete syntax. Used to suppress /// extra detail when the same error is seen twice obsolete_set: HashSet, @@ -2101,7 +2102,7 @@ impl<'a> Parser<'a> { let seq = match seq { Spanned { node, .. } => node, }; - TTSeq(mk_sp(sp.lo, p.span.hi), @seq, s, z) + TTSeq(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z) } else { TTNonterminal(sp, p.parse_ident()) } @@ -2144,7 +2145,7 @@ impl<'a> Parser<'a> { result.push(parse_any_tt_tok(self)); self.open_braces.pop().unwrap(); - TTDelim(@result) + TTDelim(Rc::new(result)) } _ => parse_non_delim_tt_tok(self) } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 7bb920bdf56dc..01bdaa381cb2e 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -22,6 +22,7 @@ use std::char; use std::fmt; use std::local_data; use std::path::BytesContainer; +use std::rc::Rc; #[allow(non_camel_case_types)] #[deriving(Clone, Encodable, Decodable, Eq, TotalEq, Hash, Show)] @@ -531,13 +532,14 @@ pub type IdentInterner = StrInterner; // if an interner exists in TLS, return it. Otherwise, prepare a // fresh one. -pub fn get_ident_interner() -> @IdentInterner { - local_data_key!(key: @::parse::token::IdentInterner) - match local_data::get(key, |k| k.map(|k| *k)) { +// FIXME(eddyb) #8726 This should probably use a task-local reference. +pub fn get_ident_interner() -> Rc { + local_data_key!(key: Rc<::parse::token::IdentInterner>) + match local_data::get(key, |k| k.map(|k| k.clone())) { Some(interner) => interner, None => { - let interner = @mk_fresh_ident_interner(); - local_data::set(key, interner); + let interner = Rc::new(mk_fresh_ident_interner()); + local_data::set(key, interner.clone()); interner } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 6309f83abdd29..b410e0c7169ab 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -26,13 +26,12 @@ use print::pp::{Breaks, Consistent, Inconsistent, eof}; use print::pp; use std::cast; -use std::cell::RefCell; use std::char; use std::str; use std::io; use std::io::{IoResult, MemWriter}; +use std::rc::Rc; -// The &mut State is stored here to prevent recursive type. pub enum AnnNode<'a> { NodeBlock(&'a ast::Block), NodeItem(&'a ast::Item), @@ -57,11 +56,11 @@ pub struct CurrentCommentAndLiteral { pub struct State<'a> { s: pp::Printer, cm: Option<&'a CodeMap>, - intr: @token::IdentInterner, + intr: Rc, comments: Option >, literals: Option >, cur_cmnt_and_lit: CurrentCommentAndLiteral, - boxes: RefCell >, + boxes: Vec, ann: &'a PpAnn } @@ -82,7 +81,7 @@ pub fn rust_printer_annotated<'a>(writer: ~io::Writer, cur_cmnt: 0, cur_lit: 0 }, - boxes: RefCell::new(Vec::new()), + boxes: Vec::new(), ann: ann } } @@ -124,7 +123,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap, cur_cmnt: 0, cur_lit: 0 }, - boxes: RefCell::new(Vec::new()), + boxes: Vec::new(), ann: ann }; try!(s.print_mod(&krate.module, krate.attrs.as_slice())); @@ -238,23 +237,23 @@ pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> ~str { impl<'a> State<'a> { pub fn ibox(&mut self, u: uint) -> IoResult<()> { - self.boxes.borrow_mut().push(pp::Inconsistent); + self.boxes.push(pp::Inconsistent); pp::ibox(&mut self.s, u) } pub fn end(&mut self) -> IoResult<()> { - self.boxes.borrow_mut().pop().unwrap(); + self.boxes.pop().unwrap(); pp::end(&mut self.s) } pub fn cbox(&mut self, u: uint) -> IoResult<()> { - self.boxes.borrow_mut().push(pp::Consistent); + self.boxes.push(pp::Consistent); pp::cbox(&mut self.s, u) } // "raw box" pub fn rbox(&mut self, u: uint, b: pp::Breaks) -> IoResult<()> { - self.boxes.borrow_mut().push(b); + self.boxes.push(b); pp::rbox(&mut self.s, u, b) } @@ -321,8 +320,8 @@ impl<'a> State<'a> { self.s.last_token().is_eof() || self.s.last_token().is_hardbreak_tok() } - pub fn in_cbox(&mut self) -> bool { - match self.boxes.borrow().last() { + pub fn in_cbox(&self) -> bool { + match self.boxes.last() { Some(&last_box) => last_box == pp::Consistent, None => false }