diff --git a/src/librustc_incremental/assert_dep_graph.rs b/src/librustc_incremental/assert_dep_graph.rs index 28aab1fdd4167..3f6b3a4ab6c5e 100644 --- a/src/librustc_incremental/assert_dep_graph.rs +++ b/src/librustc_incremental/assert_dep_graph.rs @@ -77,9 +77,11 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { // Find annotations supplied by user (if any). let (if_this_changed, then_this_would_need) = { - let mut visitor = IfThisChanged { tcx: tcx, - if_this_changed: vec![], - then_this_would_need: vec![] }; + let mut visitor = IfThisChanged { + tcx: tcx, + if_this_changed: vec![], + then_this_would_need: vec![], + }; visitor.process_attrs(ast::CRATE_NODE_ID, &tcx.map.krate().attrs); tcx.map.krate().visit_all_items(&mut visitor); (visitor.if_this_changed, visitor.then_this_would_need) @@ -89,7 +91,8 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { assert!(tcx.sess.opts.debugging_opts.query_dep_graph, "cannot use the `#[{}]` or `#[{}]` annotations \ without supplying `-Z query-dep-graph`", - ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED); + ATTR_IF_THIS_CHANGED, + ATTR_THEN_THIS_WOULD_NEED); } // Check paths. @@ -99,7 +102,7 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { type Sources = Vec<(Span, DefId, DepNode)>; type Targets = Vec<(Span, InternedString, ast::NodeId, DepNode)>; -struct IfThisChanged<'a, 'tcx:'a> { +struct IfThisChanged<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, if_this_changed: Sources, then_this_would_need: Targets, @@ -153,15 +156,11 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> { } } None => { - self.tcx.sess.span_fatal( - attr.span, - &format!("missing DepNode variant")); + self.tcx.sess.span_fatal(attr.span, &format!("missing DepNode variant")); } }; - self.then_this_would_need.push((attr.span, - dep_node_interned.clone().unwrap(), - node_id, - dep_node)); + self.then_this_would_need + .push((attr.span, dep_node_interned.clone().unwrap(), node_id, dep_node)); } } } @@ -175,14 +174,12 @@ impl<'a, 'tcx> Visitor<'tcx> for IfThisChanged<'a, 'tcx> { fn check_paths<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, if_this_changed: &Sources, - then_this_would_need: &Targets) -{ + then_this_would_need: &Targets) { // Return early here so as not to construct the query, which is not cheap. if if_this_changed.is_empty() { for &(target_span, _, _, _) in then_this_would_need { - tcx.sess.span_err( - target_span, - &format!("no #[rustc_if_this_changed] annotation detected")); + tcx.sess.span_err(target_span, + &format!("no #[rustc_if_this_changed] annotation detected")); } return; @@ -192,15 +189,12 @@ fn check_paths<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let dependents = query.transitive_successors(source_dep_node); for &(target_span, ref target_pass, _, ref target_dep_node) in then_this_would_need { if !dependents.contains(&target_dep_node) { - tcx.sess.span_err( - target_span, - &format!("no path from `{}` to `{}`", - tcx.item_path_str(source_def_id), - target_pass)); + tcx.sess.span_err(target_span, + &format!("no path from `{}` to `{}`", + tcx.item_path_str(source_def_id), + target_pass)); } else { - tcx.sess.span_err( - target_span, - &format!("OK")); + tcx.sess.span_err(target_span, &format!("OK")); } } } @@ -213,22 +207,22 @@ fn dump_graph(tcx: TyCtxt) { let nodes = match env::var("RUST_DEP_GRAPH_FILTER") { Ok(string) => { // Expect one of: "-> target", "source -> target", or "source ->". - let edge_filter = EdgeFilter::new(&string).unwrap_or_else(|e| { - bug!("invalid filter: {}", e) - }); + let edge_filter = EdgeFilter::new(&string) + .unwrap_or_else(|e| bug!("invalid filter: {}", e)); let sources = node_set(&query, &edge_filter.source); let targets = node_set(&query, &edge_filter.target); filter_nodes(&query, &sources, &targets) } Err(_) => { query.nodes() - .into_iter() - .collect() + .into_iter() + .collect() } }; let edges = filter_edges(&query, &nodes); - { // dump a .txt file with just the edges: + { + // dump a .txt file with just the edges: let txt_path = format!("{}.txt", path); let mut file = File::create(&txt_path).unwrap(); for &(ref source, ref target) in &edges { @@ -236,7 +230,8 @@ fn dump_graph(tcx: TyCtxt) { } } - { // dump a .dot file in graphviz format: + { + // dump a .dot file in graphviz format: let dot_path = format!("{}.dot", path); let mut v = Vec::new(); dot::render(&GraphvizDepGraph(nodes, edges), &mut v).unwrap(); @@ -272,10 +267,14 @@ impl<'a, 'tcx, 'q> dot::Labeller<'a> for GraphvizDepGraph<'q> { dot::Id::new("DependencyGraph").unwrap() } fn node_id(&self, n: &&'q DepNode) -> dot::Id { - let s: String = - format!("{:?}", n).chars() - .map(|c| if c == '_' || c.is_alphanumeric() { c } else { '_' }) - .collect(); + let s: String = format!("{:?}", n) + .chars() + .map(|c| if c == '_' || c.is_alphanumeric() { + c + } else { + '_' + }) + .collect(); debug!("n={:?} s={:?}", n, s); dot::Id::new(s).unwrap() } @@ -287,9 +286,9 @@ impl<'a, 'tcx, 'q> dot::Labeller<'a> for GraphvizDepGraph<'q> { // Given an optional filter like `"x,y,z"`, returns either `None` (no // filter) or the set of nodes whose labels contain all of those // substrings. -fn node_set<'q>(query: &'q DepGraphQuery, filter: &DepNodeFilter) - -> Option>> -{ +fn node_set<'q>(query: &'q DepGraphQuery, + filter: &DepNodeFilter) + -> Option>> { debug!("node_set(filter={:?})", filter); if filter.accepts_all() { @@ -302,8 +301,7 @@ fn node_set<'q>(query: &'q DepGraphQuery, filter: &DepNodeFilter) fn filter_nodes<'q>(query: &'q DepGraphQuery, sources: &Option>>, targets: &Option>>) - -> FnvHashSet<&'q DepNode> -{ + -> FnvHashSet<&'q DepNode> { if let &Some(ref sources) = sources { if let &Some(ref targets) = targets { walk_between(query, sources, targets) @@ -320,11 +318,12 @@ fn filter_nodes<'q>(query: &'q DepGraphQuery, fn walk_nodes<'q>(query: &'q DepGraphQuery, starts: &FnvHashSet<&'q DepNode>, direction: Direction) - -> FnvHashSet<&'q DepNode> -{ + -> FnvHashSet<&'q DepNode> { let mut set = FnvHashSet(); for &start in starts { - debug!("walk_nodes: start={:?} outgoing?={:?}", start, direction == OUTGOING); + debug!("walk_nodes: start={:?} outgoing?={:?}", + start, + direction == OUTGOING); if set.insert(start) { let mut stack = vec![query.indices[start]]; while let Some(index) = stack.pop() { @@ -344,15 +343,19 @@ fn walk_nodes<'q>(query: &'q DepGraphQuery, fn walk_between<'q>(query: &'q DepGraphQuery, sources: &FnvHashSet<&'q DepNode>, targets: &FnvHashSet<&'q DepNode>) - -> FnvHashSet<&'q DepNode> -{ + -> FnvHashSet<&'q DepNode> { // This is a bit tricky. We want to include a node only if it is: // (a) reachable from a source and (b) will reach a target. And we // have to be careful about cycles etc. Luckily efficiency is not // a big concern! #[derive(Copy, Clone, PartialEq)] - enum State { Undecided, Deciding, Included, Excluded } + enum State { + Undecided, + Deciding, + Included, + Excluded, + } let mut node_states = vec![State::Undecided; query.graph.len_nodes()]; @@ -365,18 +368,14 @@ fn walk_between<'q>(query: &'q DepGraphQuery, } return query.nodes() - .into_iter() - .filter(|&n| { - let index = query.indices[n]; - node_states[index.0] == State::Included - }) - .collect(); - - fn recurse(query: &DepGraphQuery, - node_states: &mut [State], - node: NodeIndex) - -> bool - { + .into_iter() + .filter(|&n| { + let index = query.indices[n]; + node_states[index.0] == State::Included + }) + .collect(); + + fn recurse(query: &DepGraphQuery, node_states: &mut [State], node: NodeIndex) -> bool { match node_states[node.0] { // known to reach a target State::Included => return true, @@ -387,7 +386,7 @@ fn walk_between<'q>(query: &'q DepGraphQuery, // backedge, not yet known, say false State::Deciding => return false, - State::Undecided => { } + State::Undecided => {} } node_states[node.0] = State::Deciding; @@ -411,10 +410,9 @@ fn walk_between<'q>(query: &'q DepGraphQuery, fn filter_edges<'q>(query: &'q DepGraphQuery, nodes: &FnvHashSet<&'q DepNode>) - -> Vec<(&'q DepNode, &'q DepNode)> -{ + -> Vec<(&'q DepNode, &'q DepNode)> { query.edges() - .into_iter() - .filter(|&(source, target)| nodes.contains(source) && nodes.contains(target)) - .collect() + .into_iter() + .filter(|&(source, target)| nodes.contains(source) && nodes.contains(target)) + .collect() } diff --git a/src/librustc_incremental/calculate_svh/caching_codemap_view.rs b/src/librustc_incremental/calculate_svh/caching_codemap_view.rs index ad9c48420e217..1e245dc0fcfc0 100644 --- a/src/librustc_incremental/calculate_svh/caching_codemap_view.rs +++ b/src/librustc_incremental/calculate_svh/caching_codemap_view.rs @@ -68,7 +68,7 @@ impl<'tcx> CachingCodemapView<'tcx> { // No cache hit ... let mut oldest = 0; - for index in 1 .. self.line_cache.len() { + for index in 1..self.line_cache.len() { if self.line_cache[index].time_stamp < self.line_cache[oldest].time_stamp { oldest = index; } diff --git a/src/librustc_incremental/calculate_svh/def_path_hash.rs b/src/librustc_incremental/calculate_svh/def_path_hash.rs index 8aa134ba3bfd0..a01c9d475b169 100644 --- a/src/librustc_incremental/calculate_svh/def_path_hash.rs +++ b/src/librustc_incremental/calculate_svh/def_path_hash.rs @@ -21,16 +21,17 @@ impl<'a, 'tcx> DefPathHashes<'a, 'tcx> { pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self { DefPathHashes { tcx: tcx, - data: DefIdMap() + data: DefIdMap(), } } pub fn hash(&mut self, def_id: DefId) -> u64 { let tcx = self.tcx; - *self.data.entry(def_id) - .or_insert_with(|| { - let def_path = tcx.def_path(def_id); - def_path.deterministic_hash(tcx) - }) + *self.data + .entry(def_id) + .or_insert_with(|| { + let def_path = tcx.def_path(def_id); + def_path.deterministic_hash(tcx) + }) } } diff --git a/src/librustc_incremental/calculate_svh/hasher.rs b/src/librustc_incremental/calculate_svh/hasher.rs index 49683a81227b1..3b314c7de50d2 100644 --- a/src/librustc_incremental/calculate_svh/hasher.rs +++ b/src/librustc_incremental/calculate_svh/hasher.rs @@ -24,7 +24,7 @@ impl IchHasher { let hash_size = mem::size_of::(); IchHasher { state: ArchIndependentHasher::new(Blake2bHasher::new(hash_size, &[])), - bytes_hashed: 0 + bytes_hashed: 0, } } diff --git a/src/librustc_incremental/calculate_svh/mod.rs b/src/librustc_incremental/calculate_svh/mod.rs index 3b0b37bb01ce3..03f8de6b104bf 100644 --- a/src/librustc_incremental/calculate_svh/mod.rs +++ b/src/librustc_incremental/calculate_svh/mod.rs @@ -112,7 +112,8 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64); - record_time(&tcx.sess.perf_stats.svh_time, || visitor.compute_crate_hash()); + record_time(&tcx.sess.perf_stats.svh_time, + || visitor.compute_crate_hash()); visitor.hashes } @@ -146,10 +147,11 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> { let bytes_hashed = state.bytes_hashed(); let item_hash = state.finish(); self.hashes.insert(DepNode::Hir(def_id), item_hash); - debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, item_hash); + debug!("calculate_item_hash: def_id={:?} hash={:?}", + def_id, + item_hash); - let bytes_hashed = self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() + - bytes_hashed; + let bytes_hashed = self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() + bytes_hashed; self.tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed); } @@ -167,18 +169,18 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> { // crate hash. { let def_path_hashes = &mut self.def_path_hashes; - let mut item_hashes: Vec<_> = - self.hashes.iter() - .map(|(item_dep_node, &item_hash)| { - // convert from a DepNode tp a - // DepNode where the u64 is the - // hash of the def-id's def-path: - let item_dep_node = - item_dep_node.map_def(|&did| Some(def_path_hashes.hash(did))) - .unwrap(); - (item_dep_node, item_hash) - }) - .collect(); + let mut item_hashes: Vec<_> = self.hashes + .iter() + .map(|(item_dep_node, &item_hash)| { + // convert from a DepNode tp a + // DepNode where the u64 is the + // hash of the def-id's def-path: + let item_dep_node = + item_dep_node.map_def(|&did| Some(def_path_hashes.hash(did))) + .unwrap(); + (item_dep_node, item_hash) + }) + .collect(); item_hashes.sort(); // avoid artificial dependencies on item ordering item_hashes.hash(&mut crate_state); } @@ -210,4 +212,3 @@ impl<'a, 'tcx> visit::Visitor<'tcx> for HashItemsVisitor<'a, 'tcx> { visit::walk_foreign_item(self, item); } } - diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index 51c894e1b78f0..12eee45439451 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -36,15 +36,13 @@ use super::def_path_hash::DefPathHashes; use super::caching_codemap_view::CachingCodemapView; use super::hasher::IchHasher; -const IGNORED_ATTRIBUTES: &'static [&'static str] = &[ - "cfg", - ::ATTR_IF_THIS_CHANGED, - ::ATTR_THEN_THIS_WOULD_NEED, - ::ATTR_DIRTY, - ::ATTR_CLEAN, - ::ATTR_DIRTY_METADATA, - ::ATTR_CLEAN_METADATA -]; +const IGNORED_ATTRIBUTES: &'static [&'static str] = &["cfg", + ::ATTR_IF_THIS_CHANGED, + ::ATTR_THEN_THIS_WOULD_NEED, + ::ATTR_DIRTY, + ::ATTR_CLEAN, + ::ATTR_DIRTY_METADATA, + ::ATTR_CLEAN_METADATA]; pub struct StrictVersionHashVisitor<'a, 'hash: 'a, 'tcx: 'hash> { pub tcx: TyCtxt<'hash, 'tcx, 'tcx>, @@ -149,7 +147,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { // and assigns each a distinct tag to feed into the hash computation. #[derive(Hash)] enum SawAbiComponent<'a> { - // FIXME (#14132): should we include (some function of) // ident.ctxt as well? SawIdent(token::InternedString), @@ -184,9 +181,7 @@ enum SawAbiComponent<'a> { SawAssocTypeBinding, SawAttribute(ast::AttrStyle), SawMacroDef, - SawSpan(Option<(&'a str, usize, BytePos)>, - Option<(&'a str, usize, BytePos)>, - SawSpanExpnKind), + SawSpan(Option<(&'a str, usize, BytePos)>, Option<(&'a str, usize, BytePos)>, SawSpanExpnKind), } /// SawExprComponent carries all of the information that we want @@ -208,7 +203,6 @@ enum SawAbiComponent<'a> { /// Ty, TraitItem and ImplItem follow the same methodology. #[derive(Hash)] enum SawExprComponent<'a> { - SawExprLoop(Option), SawExprField(token::InternedString), SawExprTupField(usize), @@ -243,35 +237,35 @@ enum SawExprComponent<'a> { fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> { match *node { - ExprBox(..) => SawExprBox, - ExprArray(..) => SawExprArray, - ExprCall(..) => SawExprCall, - ExprMethodCall(..) => SawExprMethodCall, - ExprTup(..) => SawExprTup, - ExprBinary(op, ..) => SawExprBinary(op.node), - ExprUnary(op, _) => SawExprUnary(op), - ExprLit(ref lit) => SawExprLit(lit.node.clone()), - ExprCast(..) => SawExprCast, - ExprType(..) => SawExprType, - ExprIf(..) => SawExprIf, - ExprWhile(..) => SawExprWhile, - ExprLoop(_, id) => SawExprLoop(id.map(|id| id.node.as_str())), - ExprMatch(..) => SawExprMatch, + ExprBox(..) => SawExprBox, + ExprArray(..) => SawExprArray, + ExprCall(..) => SawExprCall, + ExprMethodCall(..) => SawExprMethodCall, + ExprTup(..) => SawExprTup, + ExprBinary(op, ..) => SawExprBinary(op.node), + ExprUnary(op, _) => SawExprUnary(op), + ExprLit(ref lit) => SawExprLit(lit.node.clone()), + ExprCast(..) => SawExprCast, + ExprType(..) => SawExprType, + ExprIf(..) => SawExprIf, + ExprWhile(..) => SawExprWhile, + ExprLoop(_, id) => SawExprLoop(id.map(|id| id.node.as_str())), + ExprMatch(..) => SawExprMatch, ExprClosure(cc, _, _, _) => SawExprClosure(cc), - ExprBlock(..) => SawExprBlock, - ExprAssign(..) => SawExprAssign, - ExprAssignOp(op, ..) => SawExprAssignOp(op.node), - ExprField(_, name) => SawExprField(name.node.as_str()), - ExprTupField(_, id) => SawExprTupField(id.node), - ExprIndex(..) => SawExprIndex, - ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)), - ExprAddrOf(m, _) => SawExprAddrOf(m), - ExprBreak(id) => SawExprBreak(id.map(|id| id.node.as_str())), - ExprAgain(id) => SawExprAgain(id.map(|id| id.node.as_str())), - ExprRet(..) => SawExprRet, - ExprInlineAsm(ref a,..) => SawExprInlineAsm(a), - ExprStruct(..) => SawExprStruct, - ExprRepeat(..) => SawExprRepeat, + ExprBlock(..) => SawExprBlock, + ExprAssign(..) => SawExprAssign, + ExprAssignOp(op, ..) => SawExprAssignOp(op.node), + ExprField(_, name) => SawExprField(name.node.as_str()), + ExprTupField(_, id) => SawExprTupField(id.node), + ExprIndex(..) => SawExprIndex, + ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)), + ExprAddrOf(m, _) => SawExprAddrOf(m), + ExprBreak(id) => SawExprBreak(id.map(|id| id.node.as_str())), + ExprAgain(id) => SawExprAgain(id.map(|id| id.node.as_str())), + ExprRet(..) => SawExprRet, + ExprInlineAsm(ref a, ..) => SawExprInlineAsm(a), + ExprStruct(..) => SawExprStruct, + ExprRepeat(..) => SawExprRepeat, } } @@ -290,7 +284,7 @@ enum SawItemComponent { SawItemUnion, SawItemTrait(Unsafety), SawItemDefaultImpl(Unsafety), - SawItemImpl(Unsafety, ImplPolarity) + SawItemImpl(Unsafety, ImplPolarity), } fn saw_item(node: &Item_) -> SawItemComponent { @@ -298,7 +292,7 @@ fn saw_item(node: &Item_) -> SawItemComponent { ItemExternCrate(..) => SawItemExternCrate, ItemUse(..) => SawItemUse, ItemStatic(_, mutability, _) => SawItemStatic(mutability), - ItemConst(..) =>SawItemConst, + ItemConst(..) => SawItemConst, ItemFn(_, unsafety, constness, abi, _, _) => SawItemFn(unsafety, constness, abi), ItemMod(..) => SawItemMod, ItemForeignMod(..) => SawItemForeignMod, @@ -308,7 +302,7 @@ fn saw_item(node: &Item_) -> SawItemComponent { ItemUnion(..) => SawItemUnion, ItemTrait(unsafety, ..) => SawItemTrait(unsafety), ItemDefaultImpl(unsafety, _) => SawItemDefaultImpl(unsafety), - ItemImpl(unsafety, implpolarity, ..) => SawItemImpl(unsafety, implpolarity) + ItemImpl(unsafety, implpolarity, ..) => SawItemImpl(unsafety, implpolarity), } } @@ -324,7 +318,7 @@ enum SawPatComponent { SawPatRef(Mutability), SawPatLit, SawPatRange, - SawPatSlice + SawPatSlice, } fn saw_pat(node: &PatKind) -> SawPatComponent { @@ -339,7 +333,7 @@ fn saw_pat(node: &PatKind) -> SawPatComponent { PatKind::Ref(_, mutability) => SawPatRef(mutability), PatKind::Lit(..) => SawPatLit, PatKind::Range(..) => SawPatRange, - PatKind::Slice(..) => SawPatSlice + PatKind::Slice(..) => SawPatSlice, } } @@ -357,24 +351,24 @@ enum SawTyComponent { SawTyPolyTraitRef, SawTyImplTrait, SawTyTypeof, - SawTyInfer + SawTyInfer, } fn saw_ty(node: &Ty_) -> SawTyComponent { match *node { - TySlice(..) => SawTySlice, - TyArray(..) => SawTyArray, - TyPtr(ref mty) => SawTyPtr(mty.mutbl), - TyRptr(_, ref mty) => SawTyRptr(mty.mutbl), - TyBareFn(ref barefnty) => SawTyBareFn(barefnty.unsafety, barefnty.abi), - TyNever => SawTyNever, - TyTup(..) => SawTyTup, - TyPath(..) => SawTyPath, - TyObjectSum(..) => SawTyObjectSum, - TyPolyTraitRef(..) => SawTyPolyTraitRef, - TyImplTrait(..) => SawTyImplTrait, - TyTypeof(..) => SawTyTypeof, - TyInfer => SawTyInfer + TySlice(..) => SawTySlice, + TyArray(..) => SawTyArray, + TyPtr(ref mty) => SawTyPtr(mty.mutbl), + TyRptr(_, ref mty) => SawTyRptr(mty.mutbl), + TyBareFn(ref barefnty) => SawTyBareFn(barefnty.unsafety, barefnty.abi), + TyNever => SawTyNever, + TyTup(..) => SawTyTup, + TyPath(..) => SawTyPath, + TyObjectSum(..) => SawTyObjectSum, + TyPolyTraitRef(..) => SawTyPolyTraitRef, + TyImplTrait(..) => SawTyImplTrait, + TyTypeof(..) => SawTyTypeof, + TyInfer => SawTyInfer, } } @@ -382,24 +376,26 @@ fn saw_ty(node: &Ty_) -> SawTyComponent { enum SawTraitOrImplItemComponent { SawTraitOrImplItemConst, SawTraitOrImplItemMethod(Unsafety, Constness, Abi), - SawTraitOrImplItemType + SawTraitOrImplItemType, } fn saw_trait_item(ti: &TraitItem_) -> SawTraitOrImplItemComponent { match *ti { ConstTraitItem(..) => SawTraitOrImplItemConst, - MethodTraitItem(ref sig, _) => - SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi), - TypeTraitItem(..) => SawTraitOrImplItemType + MethodTraitItem(ref sig, _) => { + SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi) + } + TypeTraitItem(..) => SawTraitOrImplItemType, } } fn saw_impl_item(ii: &ImplItemKind) -> SawTraitOrImplItemComponent { match *ii { ImplItemKind::Const(..) => SawTraitOrImplItemConst, - ImplItemKind::Method(ref sig, _) => - SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi), - ImplItemKind::Type(..) => SawTraitOrImplItemType + ImplItemKind::Method(ref sig, _) => { + SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi) + } + ImplItemKind::Type(..) => SawTraitOrImplItemType, } } @@ -444,10 +440,7 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has visit::walk_struct_def(self, s); } - fn visit_variant(&mut self, - v: &'tcx Variant, - g: &'tcx Generics, - item_id: NodeId) { + fn visit_variant(&mut self, v: &'tcx Variant, g: &'tcx Generics, item_id: NodeId) { debug!("visit_variant: st={:?}", self.st); SawVariant.hash(self.st); hash_attrs!(self, &v.node.attrs); @@ -527,7 +520,8 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has fn visit_mod(&mut self, m: &'tcx Mod, _s: Span, n: NodeId) { debug!("visit_mod: st={:?}", self.st); - SawMod.hash(self.st); visit::walk_mod(self, m, n) + SawMod.hash(self.st); + visit::walk_mod(self, m, n) } fn visit_ty(&mut self, t: &'tcx Ty) { @@ -705,17 +699,17 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { } if let Some(traits) = self.tcx.trait_map.get(&id) { - debug!("hash_resolve: id={:?} traits={:?} st={:?}", id, traits, self.st); + debug!("hash_resolve: id={:?} traits={:?} st={:?}", + id, + traits, + self.st); traits.len().hash(self.st); // The ordering of the candidates is not fixed. So we hash // the def-ids and then sort them and hash the collection. - let mut candidates: Vec<_> = - traits.iter() - .map(|&TraitCandidate { def_id, import_id: _ }| { - self.compute_def_id_hash(def_id) - }) - .collect(); + let mut candidates: Vec<_> = traits.iter() + .map(|&TraitCandidate { def_id, import_id: _ }| self.compute_def_id_hash(def_id)) + .collect(); candidates.sort(); candidates.hash(self.st); } @@ -825,8 +819,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { for i in indices { let attr = &attributes[i].node; - if !attr.is_sugared_doc && - !IGNORED_ATTRIBUTES.contains(&&*attr.value.name()) { + if !attr.is_sugared_doc && !IGNORED_ATTRIBUTES.contains(&&*attr.value.name()) { SawAttribute(attr.style).hash(self.st); self.hash_meta_item(&*attr.value); } @@ -838,7 +831,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { F: Fn(&T) -> K { let mut indices = Vec::with_capacity(items.len()); - indices.extend(0 .. items.len()); + indices.extend(0..items.len()); indices.sort_by_key(|index| get_key(&items[*index])); indices } diff --git a/src/librustc_incremental/ich/fingerprint.rs b/src/librustc_incremental/ich/fingerprint.rs index 005ac3896ce4c..1270319e1c805 100644 --- a/src/librustc_incremental/ich/fingerprint.rs +++ b/src/librustc_incremental/ich/fingerprint.rs @@ -23,8 +23,8 @@ impl Fingerprint { pub fn from_smaller_hash(hash: u64) -> Fingerprint { let mut result = Fingerprint::zero(); - result.0[0] = (hash >> 0) as u8; - result.0[1] = (hash >> 8) as u8; + result.0[0] = (hash >> 0) as u8; + result.0[1] = (hash >> 8) as u8; result.0[2] = (hash >> 16) as u8; result.0[3] = (hash >> 24) as u8; result.0[4] = (hash >> 32) as u8; @@ -35,14 +35,9 @@ impl Fingerprint { } pub fn to_smaller_hash(&self) -> u64 { - ((self.0[0] as u64) << 0) | - ((self.0[1] as u64) << 8) | - ((self.0[2] as u64) << 16) | - ((self.0[3] as u64) << 24) | - ((self.0[4] as u64) << 32) | - ((self.0[5] as u64) << 40) | - ((self.0[6] as u64) << 48) | - ((self.0[7] as u64) << 56) + ((self.0[0] as u64) << 0) | ((self.0[1] as u64) << 8) | ((self.0[2] as u64) << 16) | + ((self.0[3] as u64) << 24) | ((self.0[4] as u64) << 32) | + ((self.0[5] as u64) << 40) | ((self.0[6] as u64) << 48) | ((self.0[7] as u64) << 56) } } @@ -69,7 +64,7 @@ impl Decodable for Fingerprint { impl ::std::fmt::Display for Fingerprint { fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { - for i in 0 .. self.0.len() { + for i in 0..self.0.len() { if i > 0 { write!(formatter, "::")?; } diff --git a/src/librustc_incremental/lib.rs b/src/librustc_incremental/lib.rs index 577e50699bffd..fe60fa5b531f5 100644 --- a/src/librustc_incremental/lib.rs +++ b/src/librustc_incremental/lib.rs @@ -27,12 +27,15 @@ #![feature(core_intrinsics)] extern crate graphviz; -#[macro_use] extern crate rustc; +#[macro_use] +extern crate rustc; extern crate rustc_data_structures; extern crate serialize as rustc_serialize; -#[macro_use] extern crate log; -#[macro_use] extern crate syntax; +#[macro_use] +extern crate log; +#[macro_use] +extern crate syntax; extern crate syntax_pos; const ATTR_DIRTY: &'static str = "rustc_dirty"; diff --git a/src/librustc_incremental/persist/data.rs b/src/librustc_incremental/persist/data.rs index 734ffe6a94412..db9ea093be53f 100644 --- a/src/librustc_incremental/persist/data.rs +++ b/src/librustc_incremental/persist/data.rs @@ -106,7 +106,7 @@ pub struct SerializedMetadataHashes { /// is only populated if -Z query-dep-graph is specified. It will be /// empty otherwise. Importing crates are perfectly happy with just having /// the DefIndex. - pub index_map: FnvHashMap + pub index_map: FnvHashMap, } /// The hash for some metadata that (when saving) will be exported diff --git a/src/librustc_incremental/persist/directory.rs b/src/librustc_incremental/persist/directory.rs index d238121872be6..2b24f92b26122 100644 --- a/src/librustc_incremental/persist/directory.rs +++ b/src/librustc_incremental/persist/directory.rs @@ -26,7 +26,7 @@ use std::collections::HashMap; #[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub struct DefPathIndex { - index: u32 + index: u32, } #[derive(RustcEncodable, RustcDecodable)] @@ -49,14 +49,19 @@ pub struct CrateInfo { impl DefIdDirectory { pub fn new(krates: Vec) -> DefIdDirectory { - DefIdDirectory { paths: vec![], krates: krates } + DefIdDirectory { + paths: vec![], + krates: krates, + } } fn max_current_crate(&self, tcx: TyCtxt) -> CrateNum { - tcx.sess.cstore.crates() - .into_iter() - .max() - .unwrap_or(LOCAL_CRATE) + tcx.sess + .cstore + .crates() + .into_iter() + .max() + .unwrap_or(LOCAL_CRATE) } /// Returns a string form for `index`; useful for debugging @@ -72,7 +77,8 @@ impl DefIdDirectory { pub fn krate_still_valid(&self, tcx: TyCtxt, max_current_crate: CrateNum, - krate: CrateNum) -> bool { + krate: CrateNum) + -> bool { // Check that the crate-number still matches. For now, if it // doesn't, just return None. We could do better, such as // finding the new number. @@ -96,35 +102,33 @@ impl DefIdDirectory { format!("{}/{}", name, disambiguator) } - let new_krates: HashMap<_, _> = - once(LOCAL_CRATE) + let new_krates: HashMap<_, _> = once(LOCAL_CRATE) .chain(tcx.sess.cstore.crates()) - .map(|krate| (make_key(&tcx.crate_name(krate), - &tcx.crate_disambiguator(krate)), krate)) + .map(|krate| (make_key(&tcx.crate_name(krate), &tcx.crate_disambiguator(krate)), krate)) .collect(); - let ids = self.paths.iter() - .map(|path| { - let old_krate_id = path.krate.as_usize(); - assert!(old_krate_id < self.krates.len()); - let old_crate_info = &self.krates[old_krate_id]; - let old_crate_key = make_key(&old_crate_info.name, - &old_crate_info.disambiguator); - if let Some(&new_crate_key) = new_krates.get(&old_crate_key) { - tcx.retrace_path(new_crate_key, &path.data) - } else { - debug!("crate {:?} no longer exists", old_crate_key); - None - } - }) - .collect(); + let ids = self.paths + .iter() + .map(|path| { + let old_krate_id = path.krate.as_usize(); + assert!(old_krate_id < self.krates.len()); + let old_crate_info = &self.krates[old_krate_id]; + let old_crate_key = make_key(&old_crate_info.name, &old_crate_info.disambiguator); + if let Some(&new_crate_key) = new_krates.get(&old_crate_key) { + tcx.retrace_path(new_crate_key, &path.data) + } else { + debug!("crate {:?} no longer exists", old_crate_key); + None + } + }) + .collect(); RetracedDefIdDirectory { ids: ids } } } #[derive(Debug, RustcEncodable, RustcDecodable)] pub struct RetracedDefIdDirectory { - ids: Vec> + ids: Vec>, } impl RetracedDefIdDirectory { @@ -137,22 +141,21 @@ impl RetracedDefIdDirectory { } } -pub struct DefIdDirectoryBuilder<'a,'tcx:'a> { +pub struct DefIdDirectoryBuilder<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, hash: DefIdMap, directory: DefIdDirectory, } -impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> { +impl<'a, 'tcx> DefIdDirectoryBuilder<'a, 'tcx> { pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> { - let mut krates: Vec<_> = - once(LOCAL_CRATE) + let mut krates: Vec<_> = once(LOCAL_CRATE) .chain(tcx.sess.cstore.crates()) .map(|krate| { CrateInfo { krate: krate, name: tcx.crate_name(krate).to_string(), - disambiguator: tcx.crate_disambiguator(krate).to_string() + disambiguator: tcx.crate_disambiguator(krate).to_string(), } }) .collect(); @@ -176,14 +179,15 @@ impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> { debug!("DefIdDirectoryBuilder: def_id={:?}", def_id); let tcx = self.tcx; let paths = &mut self.directory.paths; - self.hash.entry(def_id) - .or_insert_with(|| { - let def_path = tcx.def_path(def_id); - let index = paths.len() as u32; - paths.push(def_path); - DefPathIndex { index: index } - }) - .clone() + self.hash + .entry(def_id) + .or_insert_with(|| { + let def_path = tcx.def_path(def_id); + let index = paths.len() as u32; + paths.push(def_path); + DefPathIndex { index: index } + }) + .clone() } pub fn lookup_def_path(&self, id: DefPathIndex) -> &DefPath { @@ -202,7 +206,7 @@ impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> { impl Debug for DefIdDirectory { fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { fmt.debug_list() - .entries(self.paths.iter().enumerate()) - .finish() + .entries(self.paths.iter().enumerate()) + .finish() } } diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index 94478f6603a6e..04c181bb08733 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -67,10 +67,9 @@ pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } let _ignore = tcx.dep_graph.in_ignore(); - let dirty_inputs: FnvHashSet> = - dirty_inputs.iter() - .filter_map(|d| retraced.map(d)) - .collect(); + let dirty_inputs: FnvHashSet> = dirty_inputs.iter() + .filter_map(|d| retraced.map(d)) + .collect(); let query = tcx.dep_graph.query(); debug!("query-nodes: {:?}", query.nodes()); let krate = tcx.map.krate(); @@ -81,14 +80,13 @@ pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, }); } -pub struct DirtyCleanVisitor<'a, 'tcx:'a> { +pub struct DirtyCleanVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, query: &'a DepGraphQuery, dirty_inputs: FnvHashSet>, } impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { - fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode { for item in attr.meta_item_list().unwrap_or(&[]) { if item.check_name(LABEL) { @@ -115,16 +113,15 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { debug!("assert_dirty({:?})", dep_node); match dep_node { - DepNode::Krate | - DepNode::Hir(_) => { + DepNode::Krate | DepNode::Hir(_) => { // HIR nodes are inputs, so if we are asserting that the HIR node is // dirty, we check the dirty input set. if !self.dirty_inputs.contains(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); - self.tcx.sess.span_err( - item.span, - &format!("`{:?}` not found in dirty set, but should be dirty", - dep_node_str)); + self.tcx.sess.span_err(item.span, + &format!("`{:?}` not found in dirty set, but should \ + be dirty", + dep_node_str)); } } _ => { @@ -132,9 +129,10 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { // the dep-graph contains the node. if self.query.contains_node(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); - self.tcx.sess.span_err( - item.span, - &format!("`{:?}` found in dep graph, but should be dirty", dep_node_str)); + self.tcx.sess.span_err(item.span, + &format!("`{:?}` found in dep graph, but should be \ + dirty", + dep_node_str)); } } } @@ -144,25 +142,24 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { debug!("assert_clean({:?})", dep_node); match dep_node { - DepNode::Krate | - DepNode::Hir(_) => { + DepNode::Krate | DepNode::Hir(_) => { // For HIR nodes, check the inputs. if self.dirty_inputs.contains(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); - self.tcx.sess.span_err( - item.span, - &format!("`{:?}` found in dirty-node set, but should be clean", - dep_node_str)); + self.tcx.sess.span_err(item.span, + &format!("`{:?}` found in dirty-node set, but should \ + be clean", + dep_node_str)); } } _ => { // Otherwise, check if the dep-node exists. if !self.query.contains_node(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); - self.tcx.sess.span_err( - item.span, - &format!("`{:?}` not found in dep graph, but should be clean", - dep_node_str)); + self.tcx.sess.span_err(item.span, + &format!("`{:?}` not found in dep graph, but should \ + be clean", + dep_node_str)); } } } @@ -187,13 +184,14 @@ impl<'a, 'tcx> Visitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> { } pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - prev_metadata_hashes: &FnvHashMap, - current_metadata_hashes: &FnvHashMap) { + prev_metadata_hashes: &FnvHashMap, + current_metadata_hashes: &FnvHashMap) { if !tcx.sess.opts.debugging_opts.query_dep_graph { return; } - tcx.dep_graph.with_ignore(||{ + tcx.dep_graph.with_ignore(|| { let krate = tcx.map.krate(); krate.visit_all_items(&mut DirtyCleanMetadataVisitor { tcx: tcx, @@ -203,7 +201,7 @@ pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, }); } -pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> { +pub struct DirtyCleanMetadataVisitor<'a, 'tcx: 'a, 'm> { tcx: TyCtxt<'a, 'tcx, 'tcx>, prev_metadata_hashes: &'m FnvHashMap, current_metadata_hashes: &'m FnvHashMap, @@ -228,7 +226,6 @@ impl<'a, 'tcx, 'm> Visitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { } impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { - fn assert_state(&self, should_be_clean: bool, def_id: DefId, span: Span) { let item_path = self.tcx.item_path_str(def_id); debug!("assert_state({})", item_path); @@ -237,24 +234,23 @@ impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { let hashes_are_equal = prev_hash == self.current_metadata_hashes[&def_id]; if should_be_clean && !hashes_are_equal { - self.tcx.sess.span_err( - span, - &format!("Metadata hash of `{}` is dirty, but should be clean", - item_path)); + self.tcx.sess.span_err(span, + &format!("Metadata hash of `{}` is dirty, but should be \ + clean", + item_path)); } let should_be_dirty = !should_be_clean; if should_be_dirty && hashes_are_equal { - self.tcx.sess.span_err( - span, - &format!("Metadata hash of `{}` is clean, but should be dirty", - item_path)); + self.tcx.sess.span_err(span, + &format!("Metadata hash of `{}` is clean, but should be \ + dirty", + item_path)); } } else { - self.tcx.sess.span_err( - span, - &format!("Could not find previous metadata hash of `{}`", - item_path)); + self.tcx.sess.span_err(span, + &format!("Could not find previous metadata hash of `{}`", + item_path)); } } } @@ -280,9 +276,7 @@ fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool { } } - tcx.sess.span_fatal( - attr.span, - &format!("no cfg attribute")); + tcx.sess.span_fatal(attr.span, &format!("no cfg attribute")); } fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> InternedString { diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs index 7c2b69e762b93..12598ead7762b 100644 --- a/src/librustc_incremental/persist/file_format.rs +++ b/src/librustc_incremental/persist/file_format.rs @@ -39,8 +39,7 @@ const RUSTC_VERSION: Option<&'static str> = option_env!("CFG_VERSION"); pub fn write_file_header(stream: &mut W) -> io::Result<()> { stream.write_all(FILE_MAGIC)?; - stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8, - (HEADER_FORMAT_VERSION >> 8) as u8])?; + stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8, (HEADER_FORMAT_VERSION >> 8) as u8])?; let rustc_version = rustc_version(); assert_eq!(rustc_version.len(), (rustc_version.len() as u8) as usize); @@ -72,7 +71,7 @@ pub fn read_file(path: &Path) -> io::Result>> { let mut file_magic = [0u8; 4]; file.read_exact(&mut file_magic)?; if file_magic != FILE_MAGIC { - return Ok(None) + return Ok(None); } } @@ -85,7 +84,7 @@ pub fn read_file(path: &Path) -> io::Result>> { ((header_format_version[1] as u16) << 8); if header_format_version != HEADER_FORMAT_VERSION { - return Ok(None) + return Ok(None); } } @@ -112,11 +111,10 @@ pub fn read_file(path: &Path) -> io::Result>> { fn rustc_version() -> String { if nightly_options::is_nightly_build() { if let Some(val) = env::var_os("RUSTC_FORCE_INCR_COMP_ARTIFACT_HEADER") { - return val.to_string_lossy().into_owned() + return val.to_string_lossy().into_owned(); } } - RUSTC_VERSION.expect("Cannot use rustc without explicit version for \ - incremental compilation") - .to_string() + RUSTC_VERSION.expect("Cannot use rustc without explicit version for incremental compilation") + .to_string() } diff --git a/src/librustc_incremental/persist/fs.rs b/src/librustc_incremental/persist/fs.rs index ff7c3d0512e4f..7f6c1cd34cfdd 100644 --- a/src/librustc_incremental/persist/fs.rs +++ b/src/librustc_incremental/persist/fs.rs @@ -158,16 +158,16 @@ pub fn lock_file_path(session_dir: &Path) -> PathBuf { assert_no_characters_lost(&directory_name); let dash_indices: Vec<_> = directory_name.match_indices("-") - .map(|(idx, _)| idx) - .collect(); + .map(|(idx, _)| idx) + .collect(); if dash_indices.len() != 3 { bug!("Encountered incremental compilation session directory with \ malformed name: {}", session_dir.display()) } - crate_dir.join(&directory_name[0 .. dash_indices[2]]) - .with_extension(&LOCK_FILE_EXT[1..]) + crate_dir.join(&directory_name[0..dash_indices[2]]) + .with_extension(&LOCK_FILE_EXT[1..]) } pub fn in_incr_comp_dir_sess(sess: &Session, file_name: &str) -> PathBuf { @@ -214,8 +214,7 @@ pub fn prepare_session_directory(tcx: TyCtxt) -> Result { // Find a suitable source directory to copy from. Ignore those that we // have already tried before. - let source_directory = find_source_directory(&crate_dir, - &source_directories_already_tried); + let source_directory = find_source_directory(&crate_dir, &source_directories_already_tried); let source_directory = if let Some(dir) = source_directory { dir @@ -225,7 +224,7 @@ pub fn prepare_session_directory(tcx: TyCtxt) -> Result { directory."); tcx.sess.init_incr_comp_session(session_dir, directory_lock); - return Ok(false) + return Ok(false); }; debug!("attempting to copy data from source: {}", @@ -234,7 +233,8 @@ pub fn prepare_session_directory(tcx: TyCtxt) -> Result { let print_file_copy_stats = tcx.sess.opts.debugging_opts.incremental_info; // Try copying over all files from the source directory - if let Ok(allows_links) = copy_files(&session_dir, &source_directory, + if let Ok(allows_links) = copy_files(&session_dir, + &source_directory, print_file_copy_stats) { debug!("successfully copied data from: {}", source_directory.display()); @@ -245,14 +245,13 @@ pub fn prepare_session_directory(tcx: TyCtxt) -> Result { instead. Consider moving the cache \ directory to a file system which supports \ hard linking in session dir `{}`", - session_dir.display()) - ); + session_dir.display())); } tcx.sess.init_incr_comp_session(session_dir, directory_lock); - return Ok(true) + return Ok(true); } else { - debug!("copying failed - trying next directory"); + debug!("copying failed - trying next directory"); // Something went wrong while trying to copy/link files from the // source directory. Try again with a different one. @@ -290,7 +289,7 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) { // publish this session directory. Rather, we'll just delete it. debug!("finalize_session_directory() - invalidating session directory: {}", - incr_comp_session_dir.display()); + incr_comp_session_dir.display()); if let Err(err) = safe_remove_dir_all(&*incr_comp_session_dir) { sess.warn(&format!("Error deleting incremental compilation \ @@ -305,18 +304,18 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) { } debug!("finalize_session_directory() - session directory: {}", - incr_comp_session_dir.display()); + incr_comp_session_dir.display()); let old_sub_dir_name = incr_comp_session_dir.file_name() - .unwrap() - .to_string_lossy(); + .unwrap() + .to_string_lossy(); assert_no_characters_lost(&old_sub_dir_name); // Keep the 's-{timestamp}-{random-number}' prefix, but replace the // '-working' part with the SVH of the crate let dash_indices: Vec<_> = old_sub_dir_name.match_indices("-") - .map(|(idx, _)| idx) - .collect(); + .map(|(idx, _)| idx) + .collect(); if dash_indices.len() != 3 { bug!("Encountered incremental compilation session directory with \ malformed name: {}", @@ -324,14 +323,15 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) { } // State: "s-{timestamp}-{random-number}-" - let mut new_sub_dir_name = String::from(&old_sub_dir_name[.. dash_indices[2] + 1]); + let mut new_sub_dir_name = String::from(&old_sub_dir_name[..dash_indices[2] + 1]); // Append the svh new_sub_dir_name.push_str(&encode_base_36(svh.as_u64())); // Create the full path let new_path = incr_comp_session_dir.parent().unwrap().join(new_sub_dir_name); - debug!("finalize_session_directory() - new path: {}", new_path.display()); + debug!("finalize_session_directory() - new path: {}", + new_path.display()); match std_fs::rename(&*incr_comp_session_dir, &new_path) { Ok(_) => { @@ -373,18 +373,19 @@ fn copy_files(target_dir: &Path, // nobody deletes it out from under us while we are reading from it. let lock_file_path = lock_file_path(source_dir); let _lock = if let Ok(lock) = flock::Lock::new(&lock_file_path, - false, // don't wait, - false, // don't create - false) { // not exclusive + false, // don't wait, + false, // don't create + false) { + // not exclusive lock } else { // Could not acquire the lock, don't try to copy from here - return Err(()) + return Err(()); }; let source_dir_iterator = match source_dir.read_dir() { Ok(it) => it, - Err(_) => return Err(()) + Err(_) => return Err(()), }; let mut files_linked = 0; @@ -400,24 +401,20 @@ fn copy_files(target_dir: &Path, debug!("copying into session dir: {}", source_path.display()); match fs_util::link_or_copy(source_path, target_file_path) { - Ok(fs_util::LinkOrCopy::Link) => { - files_linked += 1 - } - Ok(fs_util::LinkOrCopy::Copy) => { - files_copied += 1 - } - Err(_) => return Err(()) + Ok(fs_util::LinkOrCopy::Link) => files_linked += 1, + Ok(fs_util::LinkOrCopy::Copy) => files_copied += 1, + Err(_) => return Err(()), } } - Err(_) => { - return Err(()) - } + Err(_) => return Err(()), } } if print_stats_on_success { - println!("incr. comp. session directory: {} files hard-linked", files_linked); - println!("incr. comp. session directory: {} files copied", files_copied); + println!("incr. comp. session directory: {} files hard-linked", + files_linked); + println!("incr. comp. session directory: {} files copied", + files_copied); } Ok(files_linked > 0 || files_copied == 0) @@ -429,18 +426,21 @@ fn generate_session_dir_path(crate_dir: &Path) -> PathBuf { let timestamp = timestamp_to_string(SystemTime::now()); debug!("generate_session_dir_path: timestamp = {}", timestamp); let random_number = thread_rng().next_u32(); - debug!("generate_session_dir_path: random_number = {}", random_number); + debug!("generate_session_dir_path: random_number = {}", + random_number); let directory_name = format!("s-{}-{}-working", - timestamp, - encode_base_36(random_number as u64)); - debug!("generate_session_dir_path: directory_name = {}", directory_name); + timestamp, + encode_base_36(random_number as u64)); + debug!("generate_session_dir_path: directory_name = {}", + directory_name); let directory_path = crate_dir.join(directory_name); - debug!("generate_session_dir_path: directory_path = {}", directory_path.display()); + debug!("generate_session_dir_path: directory_path = {}", + directory_path.display()); directory_path } -fn create_dir(sess: &Session, path: &Path, dir_tag: &str) -> Result<(),()> { +fn create_dir(sess: &Session, path: &Path, dir_tag: &str) -> Result<(), ()> { match fs_util::create_dir_racy(path) { Ok(()) => { debug!("{} directory created successfully", dir_tag); @@ -458,27 +458,25 @@ fn create_dir(sess: &Session, path: &Path, dir_tag: &str) -> Result<(),()> { } /// Allocate a the lock-file and lock it. -fn lock_directory(sess: &Session, - session_dir: &Path) - -> Result<(flock::Lock, PathBuf), ()> { +fn lock_directory(sess: &Session, session_dir: &Path) -> Result<(flock::Lock, PathBuf), ()> { let lock_file_path = lock_file_path(session_dir); debug!("lock_directory() - lock_file: {}", lock_file_path.display()); match flock::Lock::new(&lock_file_path, false, // don't wait - true, // create the lock file + true, // create the lock file true) { // the lock should be exclusive Ok(lock) => Ok((lock, lock_file_path)), Err(err) => { sess.err(&format!("incremental compilation: could not create \ - session directory lock file: {}", err)); + session directory lock file: {}", + err)); Err(()) } } } -fn delete_session_dir_lock_file(sess: &Session, - lock_file_path: &Path) { +fn delete_session_dir_lock_file(sess: &Session, lock_file_path: &Path) { if let Err(err) = safe_remove_file(&lock_file_path) { sess.warn(&format!("Error deleting lock file for incremental \ compilation session directory `{}`: {}", @@ -502,7 +500,7 @@ fn find_source_directory(crate_dir: &Path, fn find_source_directory_in_iter(iter: I, source_directories_already_tried: &FnvHashSet) -> Option - where I: Iterator + where I: Iterator { let mut best_candidate = (UNIX_EPOCH, None); @@ -514,15 +512,15 @@ fn find_source_directory_in_iter(iter: I, assert_no_characters_lost(&directory_name); if source_directories_already_tried.contains(&session_dir) || - !is_session_directory(&directory_name) || - !is_finalized(&directory_name) { + !is_session_directory(&directory_name) || !is_finalized(&directory_name) { debug!("find_source_directory_in_iter - ignoring."); - continue + continue; } let timestamp = extract_timestamp_from_session_dir(&directory_name) .unwrap_or_else(|_| { - bug!("unexpected incr-comp session dir: {}", session_dir.display()) + bug!("unexpected incr-comp session dir: {}", + session_dir.display()) }); if timestamp > best_candidate.0 { @@ -538,28 +536,26 @@ fn is_finalized(directory_name: &str) -> bool { } fn is_session_directory(directory_name: &str) -> bool { - directory_name.starts_with("s-") && - !directory_name.ends_with(LOCK_FILE_EXT) + directory_name.starts_with("s-") && !directory_name.ends_with(LOCK_FILE_EXT) } fn is_session_directory_lock_file(file_name: &str) -> bool { file_name.starts_with("s-") && file_name.ends_with(LOCK_FILE_EXT) } -fn extract_timestamp_from_session_dir(directory_name: &str) - -> Result { +fn extract_timestamp_from_session_dir(directory_name: &str) -> Result { if !is_session_directory(directory_name) { - return Err(()) + return Err(()); } let dash_indices: Vec<_> = directory_name.match_indices("-") - .map(|(idx, _)| idx) - .collect(); + .map(|(idx, _)| idx) + .collect(); if dash_indices.len() != 3 { - return Err(()) + return Err(()); } - string_to_timestamp(&directory_name[dash_indices[0]+1 .. dash_indices[1]]) + string_to_timestamp(&directory_name[dash_indices[0] + 1..dash_indices[1]]) } const BASE_36: &'static [u8] = b"0123456789abcdefghijklmnopqrstuvwxyz"; @@ -580,8 +576,7 @@ fn encode_base_36(mut n: u64) -> String { fn timestamp_to_string(timestamp: SystemTime) -> String { let duration = timestamp.duration_since(UNIX_EPOCH).unwrap(); - let micros = duration.as_secs() * 1_000_000 + - (duration.subsec_nanos() as u64) / 1000; + let micros = duration.as_secs() * 1_000_000 + (duration.subsec_nanos() as u64) / 1000; encode_base_36(micros) } @@ -589,7 +584,7 @@ fn string_to_timestamp(s: &str) -> Result { let micros_since_unix_epoch = u64::from_str_radix(s, 36); if micros_since_unix_epoch.is_err() { - return Err(()) + return Err(()); } let micros_since_unix_epoch = micros_since_unix_epoch.unwrap(); @@ -600,7 +595,9 @@ fn string_to_timestamp(s: &str) -> Result { } fn crate_path_tcx(tcx: TyCtxt, cnum: CrateNum) -> PathBuf { - crate_path(tcx.sess, &tcx.crate_name(cnum), &tcx.crate_disambiguator(cnum)) + crate_path(tcx.sess, + &tcx.crate_name(cnum), + &tcx.crate_disambiguator(cnum)) } /// Finds the session directory containing the correct metadata hashes file for @@ -615,58 +612,58 @@ pub fn find_metadata_hashes_for(tcx: TyCtxt, cnum: CrateNum) -> Option let crate_directory = crate_path_tcx(tcx, cnum); if !crate_directory.exists() { - return None + return None; } let dir_entries = match crate_directory.read_dir() { Ok(dir_entries) => dir_entries, Err(e) => { tcx.sess - .err(&format!("incremental compilation: Could not read crate directory `{}`: {}", - crate_directory.display(), e)); - return None + .err(&format!("incremental compilation: Could not read crate directory `{}`: {}", + crate_directory.display(), + e)); + return None; } }; let target_svh = tcx.sess.cstore.crate_hash(cnum); let target_svh = encode_base_36(target_svh.as_u64()); - let sub_dir = find_metadata_hashes_iter(&target_svh, dir_entries.filter_map(|e| { - e.ok().map(|e| e.file_name().to_string_lossy().into_owned()) - })); + let sub_dir = + find_metadata_hashes_iter(&target_svh, + dir_entries.filter_map(|e| { + e.ok().map(|e| e.file_name().to_string_lossy().into_owned()) + })); sub_dir.map(|sub_dir_name| crate_directory.join(&sub_dir_name)) } fn find_metadata_hashes_iter<'a, I>(target_svh: &str, iter: I) -> Option - where I: Iterator + where I: Iterator { for sub_dir_name in iter { if !is_session_directory(&sub_dir_name) || !is_finalized(&sub_dir_name) { // This is not a usable session directory - continue + continue; } let is_match = if let Some(last_dash_pos) = sub_dir_name.rfind("-") { - let candidate_svh = &sub_dir_name[last_dash_pos + 1 .. ]; + let candidate_svh = &sub_dir_name[last_dash_pos + 1..]; target_svh == candidate_svh } else { // some kind of invalid directory name - continue + continue; }; if is_match { - return Some(OsString::from(sub_dir_name)) + return Some(OsString::from(sub_dir_name)); } } None } -fn crate_path(sess: &Session, - crate_name: &str, - crate_disambiguator: &str) - -> PathBuf { +fn crate_path(sess: &Session, crate_name: &str, crate_disambiguator: &str) -> PathBuf { use std::hash::{Hasher, Hash}; use std::collections::hash_map::DefaultHasher; @@ -696,11 +693,11 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { let session_directory = sess.incr_comp_session_dir(); debug!("garbage_collect_session_directories() - session directory: {}", - session_directory.display()); + session_directory.display()); let crate_directory = session_directory.parent().unwrap(); debug!("garbage_collect_session_directories() - crate directory: {}", - crate_directory.display()); + crate_directory.display()); // First do a pass over the crate directory, collecting lock files and // session directories @@ -712,7 +709,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { Ok(dir_entry) => dir_entry, _ => { // Ignore any errors - continue + continue; } }; @@ -731,19 +728,18 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { } // Now map from lock files to session directories - let lock_file_to_session_dir: FnvHashMap> = - lock_files.into_iter() - .map(|lock_file_name| { - assert!(lock_file_name.ends_with(LOCK_FILE_EXT)); - let dir_prefix_end = lock_file_name.len() - LOCK_FILE_EXT.len(); - let session_dir = { - let dir_prefix = &lock_file_name[0 .. dir_prefix_end]; - session_directories.iter() - .find(|dir_name| dir_name.starts_with(dir_prefix)) - }; - (lock_file_name, session_dir.map(String::clone)) - }) - .collect(); + let lock_file_to_session_dir: FnvHashMap> = lock_files.into_iter() + .map(|lock_file_name| { + assert!(lock_file_name.ends_with(LOCK_FILE_EXT)); + let dir_prefix_end = lock_file_name.len() - LOCK_FILE_EXT.len(); + let session_dir = { + let dir_prefix = &lock_file_name[0..dir_prefix_end]; + session_directories.iter() + .find(|dir_name| dir_name.starts_with(dir_prefix)) + }; + (lock_file_name, session_dir.map(String::clone)) + }) + .collect(); // Delete all lock files, that don't have an associated directory. They must // be some kind of leftover @@ -763,7 +759,8 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { if is_old_enough_to_be_collected(timestamp) { debug!("garbage_collect_session_directories() - deleting \ - garbage lock file: {}", lock_file_path.display()); + garbage lock file: {}", + lock_file_path.display()); delete_session_dir_lock_file(sess, &lock_file_path); } else { debug!("garbage_collect_session_directories() - lock file with \ @@ -774,19 +771,16 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { } // Filter out `None` directories - let lock_file_to_session_dir: FnvHashMap = - lock_file_to_session_dir.into_iter() - .filter_map(|(lock_file_name, directory_name)| { - directory_name.map(|n| (lock_file_name, n)) - }) - .collect(); + let lock_file_to_session_dir: FnvHashMap = lock_file_to_session_dir.into_iter() + .filter_map(|(lock_file_name, directory_name)| directory_name.map(|n| (lock_file_name, n))) + .collect(); let mut deletion_candidates = vec![]; let mut definitely_delete = vec![]; for (lock_file_name, directory_name) in &lock_file_to_session_dir { debug!("garbage_collect_session_directories() - inspecting: {}", - directory_name); + directory_name); let timestamp = match extract_timestamp_from_session_dir(directory_name) { Ok(timestamp) => timestamp, @@ -801,14 +795,15 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { if is_finalized(directory_name) { let lock_file_path = crate_directory.join(lock_file_name); match flock::Lock::new(&lock_file_path, - false, // don't wait - false, // don't create the lock-file + false, // don't wait + false, // don't create the lock-file true) { // get an exclusive lock Ok(lock) => { debug!("garbage_collect_session_directories() - \ successfully acquired lock"); debug!("garbage_collect_session_directories() - adding \ - deletion candidate: {}", directory_name); + deletion candidate: {}", + directory_name); // Note that we are holding on to the lock deletion_candidates.push((timestamp, @@ -834,16 +829,15 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { // leave this directory alone. let lock_file_path = crate_directory.join(lock_file_name); match flock::Lock::new(&lock_file_path, - false, // don't wait - false, // don't create the lock-file + false, // don't wait + false, // don't create the lock-file true) { // get an exclusive lock Ok(lock) => { debug!("garbage_collect_session_directories() - \ successfully acquired lock"); // Note that we are holding on to the lock - definitely_delete.push((crate_directory.join(directory_name), - Some(lock))); + definitely_delete.push((crate_directory.join(directory_name), Some(lock))); } Err(_) => { debug!("garbage_collect_session_directories() - \ @@ -859,7 +853,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { // Delete all but the most recent of the candidates for (path, lock) in all_except_most_recent(deletion_candidates) { debug!("garbage_collect_session_directories() - deleting `{}`", - path.display()); + path.display()); if let Err(err) = safe_remove_dir_all(&path) { sess.warn(&format!("Failed to garbage collect finalized incremental \ @@ -878,7 +872,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { for (path, lock) in definitely_delete { debug!("garbage_collect_session_directories() - deleting `{}`", - path.display()); + path.display()); if let Err(err) = safe_remove_dir_all(&path) { sess.warn(&format!("Failed to garbage collect incremental \ @@ -900,14 +894,14 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { fn all_except_most_recent(deletion_candidates: Vec<(SystemTime, PathBuf, Option)>) -> FnvHashMap> { let most_recent = deletion_candidates.iter() - .map(|&(timestamp, ..)| timestamp) - .max(); + .map(|&(timestamp, ..)| timestamp) + .max(); if let Some(most_recent) = most_recent { deletion_candidates.into_iter() - .filter(|&(timestamp, ..)| timestamp != most_recent) - .map(|(_, path, lock)| (path, lock)) - .collect() + .filter(|&(timestamp, ..)| timestamp != most_recent) + .map(|(_, path, lock)| (path, lock)) + .collect() } else { FnvHashMap() } @@ -939,32 +933,35 @@ fn safe_remove_file(p: &Path) -> io::Result<()> { #[test] fn test_all_except_most_recent() { - assert_eq!(all_except_most_recent( - vec![ + assert_eq!(all_except_most_recent(vec![ (UNIX_EPOCH + Duration::new(4, 0), PathBuf::from("4"), None), (UNIX_EPOCH + Duration::new(1, 0), PathBuf::from("1"), None), (UNIX_EPOCH + Duration::new(5, 0), PathBuf::from("5"), None), (UNIX_EPOCH + Duration::new(3, 0), PathBuf::from("3"), None), (UNIX_EPOCH + Duration::new(2, 0), PathBuf::from("2"), None), - ]).keys().cloned().collect::>(), - vec![ + ]) + .keys() + .cloned() + .collect::>(), + vec![ PathBuf::from("1"), PathBuf::from("2"), PathBuf::from("3"), PathBuf::from("4"), - ].into_iter().collect::>() - ); - - assert_eq!(all_except_most_recent( - vec![ - ]).keys().cloned().collect::>(), - FnvHashSet() - ); + ] + .into_iter() + .collect::>()); + + assert_eq!(all_except_most_recent(vec![]) + .keys() + .cloned() + .collect::>(), + FnvHashSet()); } #[test] fn test_timestamp_serialization() { - for i in 0 .. 1_000u64 { + for i in 0..1_000u64 { let time = UNIX_EPOCH + Duration::new(i * 1_434_578, (i as u32) * 239_000); let s = timestamp_to_string(time); assert_eq!(Ok(time), string_to_timestamp(&s)); @@ -976,78 +973,80 @@ fn test_find_source_directory_in_iter() { let already_visited = FnvHashSet(); // Find newest - assert_eq!(find_source_directory_in_iter( - vec![PathBuf::from("crate-dir/s-3234-0000-svh"), - PathBuf::from("crate-dir/s-2234-0000-svh"), - PathBuf::from("crate-dir/s-1234-0000-svh")].into_iter(), &already_visited), - Some(PathBuf::from("crate-dir/s-3234-0000-svh"))); + assert_eq!(find_source_directory_in_iter(vec![PathBuf::from("crate-dir/s-3234-0000-svh"), + PathBuf::from("crate-dir/s-2234-0000-svh"), + PathBuf::from("crate-dir/s-1234-0000-svh")] + .into_iter(), + &already_visited), + Some(PathBuf::from("crate-dir/s-3234-0000-svh"))); // Filter out "-working" - assert_eq!(find_source_directory_in_iter( - vec![PathBuf::from("crate-dir/s-3234-0000-working"), - PathBuf::from("crate-dir/s-2234-0000-svh"), - PathBuf::from("crate-dir/s-1234-0000-svh")].into_iter(), &already_visited), - Some(PathBuf::from("crate-dir/s-2234-0000-svh"))); + assert_eq!(find_source_directory_in_iter(vec![PathBuf::from("crate-dir/s-3234-0000-working"), + PathBuf::from("crate-dir/s-2234-0000-svh"), + PathBuf::from("crate-dir/s-1234-0000-svh")] + .into_iter(), + &already_visited), + Some(PathBuf::from("crate-dir/s-2234-0000-svh"))); // Handle empty assert_eq!(find_source_directory_in_iter(vec![].into_iter(), &already_visited), None); // Handle only working - assert_eq!(find_source_directory_in_iter( - vec![PathBuf::from("crate-dir/s-3234-0000-working"), - PathBuf::from("crate-dir/s-2234-0000-working"), - PathBuf::from("crate-dir/s-1234-0000-working")].into_iter(), &already_visited), - None); + assert_eq!(find_source_directory_in_iter(vec![PathBuf::from("crate-dir/s-3234-0000-working"), + PathBuf::from("crate-dir/s-2234-0000-working"), + PathBuf::from("crate-dir/s-1234-0000-working")] + .into_iter(), + &already_visited), + None); } #[test] -fn test_find_metadata_hashes_iter() -{ +fn test_find_metadata_hashes_iter() { assert_eq!(find_metadata_hashes_iter("testsvh2", - vec![ + vec![ String::from("s-timestamp1-testsvh1"), String::from("s-timestamp2-testsvh2"), String::from("s-timestamp3-testsvh3"), - ].into_iter()), - Some(OsString::from("s-timestamp2-testsvh2")) - ); + ] + .into_iter()), + Some(OsString::from("s-timestamp2-testsvh2"))); assert_eq!(find_metadata_hashes_iter("testsvh2", - vec![ + vec![ String::from("s-timestamp1-testsvh1"), String::from("s-timestamp2-testsvh2"), String::from("invalid-name"), - ].into_iter()), - Some(OsString::from("s-timestamp2-testsvh2")) - ); + ] + .into_iter()), + Some(OsString::from("s-timestamp2-testsvh2"))); assert_eq!(find_metadata_hashes_iter("testsvh2", - vec![ + vec![ String::from("s-timestamp1-testsvh1"), String::from("s-timestamp2-testsvh2-working"), String::from("s-timestamp3-testsvh3"), - ].into_iter()), - None - ); + ] + .into_iter()), + None); assert_eq!(find_metadata_hashes_iter("testsvh1", - vec![ + vec![ String::from("s-timestamp1-random1-working"), String::from("s-timestamp2-random2-working"), String::from("s-timestamp3-random3-working"), - ].into_iter()), - None - ); + ] + .into_iter()), + None); assert_eq!(find_metadata_hashes_iter("testsvh2", - vec![ + vec![ String::from("timestamp1-testsvh2"), String::from("timestamp2-testsvh2"), String::from("timestamp3-testsvh2"), - ].into_iter()), - None - ); + ] + .into_iter()), + None); } #[test] @@ -1063,7 +1062,7 @@ fn test_encode_base_36() { test(37); test(u64::max_value()); - for i in 0 .. 1_000 { + for i in 0..1_000 { test(i * 983); } } diff --git a/src/librustc_incremental/persist/hash.rs b/src/librustc_incremental/persist/hash.rs index e365cbbd3a9a1..71b6d56dad9b2 100644 --- a/src/librustc_incremental/persist/hash.rs +++ b/src/librustc_incremental/persist/hash.rs @@ -44,8 +44,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { pub fn is_hashable(dep_node: &DepNode) -> bool { match *dep_node { - DepNode::Krate | - DepNode::Hir(_) => true, + DepNode::Krate | DepNode::Hir(_) => true, DepNode::MetaData(def_id) => !def_id.is_local(), _ => false, } @@ -53,9 +52,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { pub fn hash(&mut self, dep_node: &DepNode) -> Option { match *dep_node { - DepNode::Krate => { - Some(self.incremental_hashes_map[dep_node]) - } + DepNode::Krate => Some(self.incremental_hashes_map[dep_node]), // HIR nodes (which always come from our crate) are an input: DepNode::Hir(def_id) => { @@ -76,9 +73,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { // MetaData nodes from *our* crates are an *output*; we // don't hash them, but we do compute a hash for them and // save it for others to use. - DepNode::MetaData(def_id) if !def_id.is_local() => { - Some(self.metadata_hash(def_id)) - } + DepNode::MetaData(def_id) if !def_id.is_local() => Some(self.metadata_hash(def_id)), _ => { // Other kinds of nodes represent computed by-products @@ -134,34 +129,34 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { // Lock the directory we'll be reading the hashes from. let lock_file_path = lock_file_path(&session_dir); let _lock = match flock::Lock::new(&lock_file_path, - false, // don't wait - false, // don't create the lock-file + false, // don't wait + false, // don't create the lock-file false) { // shared lock Ok(lock) => lock, Err(err) => { debug!("Could not acquire lock on `{}` while trying to \ load metadata hashes: {}", - lock_file_path.display(), - err); + lock_file_path.display(), + err); // Could not acquire the lock. The directory is probably in // in the process of being deleted. It's OK to just exit // here. It's the same scenario as if the file had not // existed in the first place. - return + return; } }; let hashes_file_path = metadata_hash_import_path(&session_dir); - match file_format::read_file(&hashes_file_path) - { + match file_format::read_file(&hashes_file_path) { Ok(Some(data)) => { match self.load_from_data(cnum, &data, svh) { Ok(()) => { } Err(err) => { bug!("decoding error in dep-graph from `{}`: {}", - &hashes_file_path.display(), err); + &hashes_file_path.display(), + err); } } } @@ -169,9 +164,9 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { // If the file is not found, that's ok. } Err(err) => { - self.tcx.sess.err( - &format!("could not load dep information from `{}`: {}", - hashes_file_path.display(), err)); + self.tcx.sess.err(&format!("could not load dep information from `{}`: {}", + hashes_file_path.display(), + err)); } } } @@ -180,7 +175,8 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { fn load_from_data(&mut self, cnum: CrateNum, data: &[u8], - expected_svh: Svh) -> Result<(), String> { + expected_svh: Svh) + -> Result<(), String> { debug!("load_from_data(cnum={})", cnum); // Load up the hashes for the def-ids from this crate. @@ -198,11 +194,16 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { // the hashes are stored with just a def-index, which is // always relative to the old crate; convert that to use // our internal crate number - let def_id = DefId { krate: cnum, index: serialized_hash.def_index }; + let def_id = DefId { + krate: cnum, + index: serialized_hash.def_index, + }; // record the hash for this dep-node let old = self.item_metadata_hashes.insert(def_id, serialized_hash.hash); - debug!("load_from_data: def_id={:?} hash={}", def_id, serialized_hash.hash); + debug!("load_from_data: def_id={:?} hash={}", + def_id, + serialized_hash.hash); assert!(old.is_none(), "already have hash for {:?}", def_id); } Ok(()) diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 1f43e79ace3ae..de282f4068ed5 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -19,7 +19,7 @@ use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap}; use rustc_serialize::Decodable as RustcDecodable; use rustc_serialize::opaque::Decoder; use std::fs; -use std::path::{Path}; +use std::path::Path; use IncrementalHashesMap; use ich::Fingerprint; @@ -55,7 +55,7 @@ pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // We successfully allocated a session directory, but there is no // dep-graph data in it to load (because this is the first // compilation session with this incr. comp. dir.) - return + return; } Err(()) => { // Something went wrong while trying to allocate the session @@ -73,23 +73,25 @@ fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let dep_graph_path = dep_graph_path(tcx.sess); let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) { Some(p) => p, - None => return // no file + None => return, // no file }; let work_products_path = work_products_path(tcx.sess); let work_products_data = match load_data(tcx.sess, &work_products_path) { Some(p) => p, - None => return // no file + None => return, // no file }; - match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) { + match decode_dep_graph(tcx, + incremental_hashes_map, + &dep_graph_data, + &work_products_data) { Ok(dirty_nodes) => dirty_nodes, Err(err) => { - tcx.sess.warn( - &format!("decoding error in dep-graph from `{}` and `{}`: {}", - dep_graph_path.display(), - work_products_path.display(), - err)); + tcx.sess.warn(&format!("decoding error in dep-graph from `{}` and `{}`: {}", + dep_graph_path.display(), + work_products_path.display(), + err)); } } } @@ -102,16 +104,17 @@ fn load_data(sess: &Session, path: &Path) -> Option> { // compiler version. Neither is an error. } Err(err) => { - sess.err( - &format!("could not load dep-graph from `{}`: {}", - path.display(), err)); + sess.err(&format!("could not load dep-graph from `{}`: {}", + path.display(), + err)); } } if let Err(err) = delete_all_session_dir_contents(sess) { sess.err(&format!("could not clear incompatible incremental \ compilation session directory `{}`: {}", - path.display(), err)); + path.display(), + err)); } None @@ -123,8 +126,7 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, incremental_hashes_map: &IncrementalHashesMap, dep_graph_data: &[u8], work_products_data: &[u8]) - -> Result<(), String> -{ + -> Result<(), String> { // Decode the list of work_products let mut work_product_decoder = Decoder::new(work_products_data, 0); let work_products = >::decode(&mut work_product_decoder)?; @@ -175,13 +177,13 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // the set of symbols that go into a work-product: if any symbols // have been removed (or added) the hash will be different and // we'll ignore the work-product then. - let retraced_edges: Vec<_> = - serialized_dep_graph.edges.iter() - .filter_map(|&(ref raw_source_node, ref raw_target_node)| { - retraced.map(raw_target_node) - .map(|target_node| (raw_source_node, target_node)) - }) - .collect(); + let retraced_edges: Vec<_> = serialized_dep_graph.edges + .iter() + .filter_map(|&(ref raw_source_node, ref raw_target_node)| { + retraced.map(raw_target_node) + .map(|target_node| (raw_source_node, target_node)) + }) + .collect(); // Compute which work-products have an input that has changed or // been removed. Put the dirty ones into a set. @@ -215,7 +217,9 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let source_node = retraced.map(raw_source_node).unwrap(); - debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source_node, target_node); + debug!("decode_dep_graph: clean edge: {:?} -> {:?}", + source_node, + target_node); let _task = dep_graph.in_task(target_node); dep_graph.read(source_node); @@ -248,8 +252,8 @@ fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let current_hash = hcx.hash(&dep_node).unwrap(); if current_hash == hash.hash { debug!("initial_dirty_nodes: {:?} is clean (hash={:?})", - dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(), - current_hash); + dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(), + current_hash); continue; } debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}", @@ -279,36 +283,35 @@ fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, debug!("reconcile_work_products: dep-node for {:?} is dirty", swp); delete_dirty_work_product(tcx, swp); } else { - let all_files_exist = - swp.work_product - .saved_files - .iter() - .all(|&(_, ref file_name)| { - let path = in_incr_comp_dir_sess(tcx.sess, &file_name); - path.exists() - }); + let all_files_exist = swp.work_product + .saved_files + .iter() + .all(|&(_, ref file_name)| { + let path = in_incr_comp_dir_sess(tcx.sess, &file_name); + path.exists() + }); if all_files_exist { debug!("reconcile_work_products: all files for {:?} exist", swp); tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product); } else { - debug!("reconcile_work_products: some file for {:?} does not exist", swp); + debug!("reconcile_work_products: some file for {:?} does not exist", + swp); delete_dirty_work_product(tcx, swp); } } } } -fn delete_dirty_work_product(tcx: TyCtxt, - swp: SerializedWorkProduct) { +fn delete_dirty_work_product(tcx: TyCtxt, swp: SerializedWorkProduct) { debug!("delete_dirty_work_product({:?})", swp); for &(_, ref file_name) in &swp.work_product.saved_files { let path = in_incr_comp_dir_sess(tcx.sess, file_name); match fs::remove_file(&path) { Ok(()) => { } Err(err) => { - tcx.sess.warn( - &format!("file-system error deleting outdated file `{}`: {}", - path.display(), err)); + tcx.sess.warn(&format!("file-system error deleting outdated file `{}`: {}", + path.display(), + err)); } } } @@ -318,7 +321,7 @@ fn load_prev_metadata_hashes(tcx: TyCtxt, retraced: &RetracedDefIdDirectory, output: &mut FnvHashMap) { if !tcx.sess.opts.debugging_opts.query_dep_graph { - return + return; } debug!("load_prev_metadata_hashes() - Loading previous metadata hashes"); @@ -327,23 +330,27 @@ fn load_prev_metadata_hashes(tcx: TyCtxt, if !file_path.exists() { debug!("load_prev_metadata_hashes() - Couldn't find file containing \ - hashes at `{}`", file_path.display()); - return + hashes at `{}`", + file_path.display()); + return; } - debug!("load_prev_metadata_hashes() - File: {}", file_path.display()); + debug!("load_prev_metadata_hashes() - File: {}", + file_path.display()); let data = match file_format::read_file(&file_path) { Ok(Some(data)) => data, Ok(None) => { debug!("load_prev_metadata_hashes() - File produced by incompatible \ - compiler version: {}", file_path.display()); - return + compiler version: {}", + file_path.display()); + return; } Err(err) => { debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}", - file_path.display(), err); - return + file_path.display(), + err); + return; } }; @@ -354,7 +361,8 @@ fn load_prev_metadata_hashes(tcx: TyCtxt, debug!("load_prev_metadata_hashes() - Mapping DefIds"); - assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.hashes.len()); + assert_eq!(serialized_hashes.index_map.len(), + serialized_hashes.hashes.len()); for serialized_hash in serialized_hashes.hashes { let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index]; if let Some(def_id) = retraced.def_id(def_path_index) { @@ -366,4 +374,3 @@ fn load_prev_metadata_hashes(tcx: TyCtxt, debug!("load_prev_metadata_hashes() - successfully loaded {} hashes", serialized_hashes.index_map.len()); } - diff --git a/src/librustc_incremental/persist/preds.rs b/src/librustc_incremental/persist/preds.rs index fe1d627253f28..bc0769e0afa1e 100644 --- a/src/librustc_incremental/persist/preds.rs +++ b/src/librustc_incremental/persist/preds.rs @@ -39,16 +39,18 @@ impl<'q> Predecessors<'q> { let inputs: FnvHashMap<_, _> = all_nodes.iter() .enumerate() - .filter(|&(_, node)| match node.data { - DepNode::WorkProduct(_) => true, - DepNode::MetaData(ref def_id) => def_id.is_local(), + .filter(|&(_, node)| { + match node.data { + DepNode::WorkProduct(_) => true, + DepNode::MetaData(ref def_id) => def_id.is_local(), - // if -Z query-dep-graph is passed, save more extended data - // to enable better unit testing - DepNode::TypeckItemBody(_) | - DepNode::TransCrateItem(_) => tcx.sess.opts.debugging_opts.query_dep_graph, + // if -Z query-dep-graph is passed, save more extended data + // to enable better unit testing + DepNode::TypeckItemBody(_) | + DepNode::TransCrateItem(_) => tcx.sess.opts.debugging_opts.query_dep_graph, - _ => false, + _ => false, + } }) .map(|(node_index, node)| { dfs.reset(NodeIndex(node_index)); @@ -63,7 +65,7 @@ impl<'q> Predecessors<'q> { let mut hashes = FnvHashMap(); for input in inputs.values().flat_map(|v| v.iter().cloned()) { hashes.entry(input) - .or_insert_with(|| hcx.hash(input).unwrap()); + .or_insert_with(|| hcx.hash(input).unwrap()); } Predecessors { diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs index bc156b0e8913b..13a02b63df7c6 100644 --- a/src/librustc_incremental/persist/save.rs +++ b/src/librustc_incremental/persist/save.rs @@ -51,22 +51,20 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // IMPORTANT: We are saving the metadata hashes *before* the dep-graph, // since metadata-encoding might add new entries to the // DefIdDirectory (which is saved in the dep-graph file). - save_in(sess, - metadata_hash_export_path(sess), - |e| encode_metadata_hashes(tcx, - svh, - &preds, - &mut builder, - &mut current_metadata_hashes, - e)); + save_in(sess, metadata_hash_export_path(sess), |e| { + encode_metadata_hashes(tcx, + svh, + &preds, + &mut builder, + &mut current_metadata_hashes, + e) + }); save_in(sess, dep_graph_path(sess), |e| encode_dep_graph(&preds, &mut builder, e)); let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow(); - dirty_clean::check_dirty_clean_metadata(tcx, - &*prev_metadata_hashes, - ¤t_metadata_hashes); + dirty_clean::check_dirty_clean_metadata(tcx, &*prev_metadata_hashes, ¤t_metadata_hashes); } pub fn save_work_products(sess: &Session) { @@ -198,7 +196,7 @@ pub fn encode_metadata_hashes(tcx: TyCtxt, // (I initially wrote this with an iterator, but it seemed harder to read.) let mut serialized_hashes = SerializedMetadataHashes { hashes: vec![], - index_map: FnvHashMap() + index_map: FnvHashMap(), }; let mut def_id_hashes = FnvHashMap(); diff --git a/src/librustc_incremental/persist/work_product.rs b/src/librustc_incremental/persist/work_product.rs index a9ebd27ce9928..fee66ac264bc6 100644 --- a/src/librustc_incremental/persist/work_product.rs +++ b/src/librustc_incremental/persist/work_product.rs @@ -31,24 +31,23 @@ pub fn save_trans_partition(sess: &Session, } let work_product_id = Arc::new(WorkProductId(cgu_name.to_string())); - let saved_files: Option> = - files.iter() - .map(|&(kind, ref path)| { - let file_name = format!("cgu-{}.{}", cgu_name, kind.extension()); - let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name); - match link_or_copy(path, &path_in_incr_dir) { - Ok(_) => Some((kind, file_name)), - Err(err) => { - sess.warn(&format!("error copying object file `{}` \ - to incremental directory as `{}`: {}", - path.display(), - path_in_incr_dir.display(), - err)); - None - } - } - }) - .collect(); + let saved_files: Option> = files.iter() + .map(|&(kind, ref path)| { + let file_name = format!("cgu-{}.{}", cgu_name, kind.extension()); + let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name); + match link_or_copy(path, &path_in_incr_dir) { + Ok(_) => Some((kind, file_name)), + Err(err) => { + sess.warn(&format!("error copying object file `{}` to incremental directory \ + as `{}`: {}", + path.display(), + path_in_incr_dir.display(), + err)); + None + } + } + }) + .collect(); let saved_files = match saved_files { Some(v) => v, None => return,