diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 43d9264eaca9..77762bd80a24 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -358,6 +358,7 @@ impl Config { pub fn default_opts() -> Config { let mut config = Config::default(); + config.llvm_optimize = true; config.llvm_version_check = true; config.backtrace = true; @@ -376,7 +377,7 @@ impl Config { config.deny_warnings = true; config.missing_tools = false; - // set by bootstrap.py + // set by `bootstrap.py` config.build = INTERNER.intern_str(&env::var("BUILD").expect("'BUILD' to be set")); config.src = Config::path_from_python("SRC"); config.out = Config::path_from_python("BUILD_DIR"); @@ -410,7 +411,7 @@ impl Config { config.out = dir; } - // If --target was specified but --host wasn't specified, don't run any host-only tests. + // If `--target` was specified but `--host` wasn't specified, don't run any host-only tests. let has_hosts = !flags.host.is_empty(); let has_targets = !flags.target.is_empty(); config.skip_only_host_steps = !has_hosts && has_targets; @@ -428,7 +429,7 @@ impl Config { }).unwrap_or_else(|| TomlConfig::default()); let build = toml.build.clone().unwrap_or_default(); - // set by bootstrap.py + // set by `bootstrap.py` config.hosts.push(config.build.clone()); for host in build.host.iter() { let host = INTERNER.intern_str(host); @@ -487,8 +488,8 @@ impl Config { config.mandir = install.mandir.clone().map(PathBuf::from); } - // Store off these values as options because if they're not provided - // we'll infer default values for them later + // Store away these values as options because if they're not provided + // we'll infer default values for them later. let mut llvm_assertions = None; let mut debug = None; let mut debug_assertions = None; @@ -552,7 +553,7 @@ impl Config { set(&mut config.channel, rust.channel.clone()); set(&mut config.rust_dist_src, rust.dist_src); set(&mut config.verbose_tests, rust.verbose_tests); - // in the case "false" is set explicitly, do not overwrite the command line args + // In the case "false" is set explicitly, do not overwrite the command-line args. if let Some(true) = rust.incremental { config.incremental = true; } diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 5d7581c8211b..d87ab593d4da 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -997,27 +997,27 @@ impl Build { self.package_vers(channel::CFG_RELEASE_NUM) } - /// Returns the value of `package_vers` above for Cargo + /// Returns the value of `package_vers` above for Cargo. fn cargo_package_vers(&self) -> String { self.package_vers(&self.release_num("cargo")) } - /// Returns the value of `package_vers` above for rls + /// Returns the value of `package_vers` above for rls. fn rls_package_vers(&self) -> String { self.package_vers(&self.release_num("rls")) } - /// Returns the value of `package_vers` above for clippy + /// Returns the value of `package_vers` above for clippy. fn clippy_package_vers(&self) -> String { self.package_vers(&self.release_num("clippy")) } - /// Returns the value of `package_vers` above for miri + /// Returns the value of `package_vers` above for miri. fn miri_package_vers(&self) -> String { self.package_vers(&self.release_num("miri")) } - /// Returns the value of `package_vers` above for rustfmt + /// Returns the value of `package_vers` above for rustfmt. fn rustfmt_package_vers(&self) -> String { self.package_vers(&self.release_num("rustfmt")) } diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index f9c1be20b8bc..d22420e76dcd 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -410,7 +410,7 @@ impl<'a> Parser<'a> { &self.input[start..self.input.len()] } - /// Parses an Argument structure, or what's contained within braces inside the format string + /// Parses an `Argument` structure, or what's contained within braces inside the format string. fn argument(&mut self) -> Argument<'a> { let pos = self.position(); let format = self.format(); @@ -464,7 +464,7 @@ impl<'a> Parser<'a> { } /// Parses a format specifier at the current position, returning all of the - /// relevant information in the FormatSpec struct. + /// relevant information in the `FormatSpec` struct. fn format(&mut self) -> FormatSpec<'a> { let mut spec = FormatSpec { fill: None, @@ -571,7 +571,7 @@ impl<'a> Parser<'a> { spec } - /// Parses a Count parameter at the current position. This does not check + /// Parses a `Count` parameter at the current position. This does not check /// for 'CountIsNextParam' because that is only used in precision, not /// width. fn count(&mut self, start: usize) -> (Count, Option) { diff --git a/src/librustc_ast_borrowck/borrowck/move_data.rs b/src/librustc_ast_borrowck/borrowck/move_data.rs index 67d818161b1b..a5107d230080 100644 --- a/src/librustc_ast_borrowck/borrowck/move_data.rs +++ b/src/librustc_ast_borrowck/borrowck/move_data.rs @@ -17,10 +17,10 @@ use log::debug; #[derive(Default)] pub struct MoveData<'tcx> { - /// Move paths. See section "Move paths" in `README.md`. + /// Move paths. See the section "Move paths" in `README.md`. pub paths: RefCell>>, - /// Cache of loan path to move path index, for easy lookup. + /// A cache of move path indexes by loan paths, for easy lookup. pub path_map: RefCell>, MovePathIndex>>, /// Each move or uninitialized variable gets an entry here. @@ -48,7 +48,7 @@ pub struct FlowedMoveData<'tcx> { pub dfcx_assign: AssignDataFlow<'tcx>, } -/// Index into `MoveData.paths`, used like a pointer +/// Index into `MoveData.paths`, used like a pointer. #[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] pub struct MovePathIndex(usize); @@ -67,7 +67,7 @@ impl Clone for MovePathIndex { #[allow(non_upper_case_globals)] const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX); -/// Index into `MoveData.moves`, used like a pointer +/// Index into `MoveData.moves`, used like a pointer. #[derive(Copy, Clone, PartialEq)] pub struct MoveIndex(usize); @@ -81,17 +81,17 @@ impl MoveIndex { const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX); pub struct MovePath<'tcx> { - /// Loan path corresponding to this move path + /// Loan path corresponding to this move path. pub loan_path: Rc>, - /// Parent pointer, `InvalidMovePathIndex` if root + /// Parent pointer, `InvalidMovePathIndex` if root. pub parent: MovePathIndex, /// Head of linked list of moves to this path, - /// `InvalidMoveIndex` if not moved + /// `InvalidMoveIndex` if not moved. pub first_move: MoveIndex, - /// First node in linked list of children, `InvalidMovePathIndex` if leaf + /// First node in linked list of children, `InvalidMovePathIndex` if leaf. pub first_child: MovePathIndex, /// Next node in linked list of parent's children (siblings), @@ -102,25 +102,25 @@ pub struct MovePath<'tcx> { #[derive(Copy, Clone)] pub struct Move { - /// Path being moved. + /// The path being moved. pub path: MovePathIndex, - /// ID of node that is doing the move. + /// The ID of the node that is doing the move. pub id: hir::ItemLocalId, - /// Next node in linked list of moves from `path`, or `InvalidMoveIndex` + /// The next node in the linked list of moves from `path`, or `InvalidMoveIndex`. pub next_move: MoveIndex } #[derive(Copy, Clone)] pub struct Assignment { - /// Path being assigned. + /// The path being assigned. pub path: MovePathIndex, - /// ID where assignment occurs + /// The ID where assignment occurs. pub id: hir::ItemLocalId, - /// span of node where assignment occurs + /// The span of node where assignment occurs. pub span: Span, } @@ -200,12 +200,12 @@ impl MoveData<'tcx> { } fn move_next_move(&self, index: MoveIndex) -> MoveIndex { - //! Type safe indexing operator + //! Type-safe indexing operator. (*self.moves.borrow())[index.get()].next_move } fn is_var_path(&self, index: MovePathIndex) -> bool { - //! True if `index` refers to a variable + //! `true` if `index` refers to a variable. self.path_parent(index) == InvalidMovePathIndex } @@ -274,7 +274,7 @@ impl MoveData<'tcx> { } /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but - /// does not add new move paths + /// does not add new move paths. fn add_existing_base_paths(&self, lp: &Rc>, result: &mut Vec) { match self.path_map.borrow().get(lp).cloned() { @@ -486,7 +486,7 @@ impl MoveData<'tcx> { return true; } - // FIXME(#19596) This is a workaround, but there should be better way to do this + // FIXME(#19596): this is a workaround, but there should be better way to do this. fn each_extending_path_(&self, index: MovePathIndex, f: &mut F) -> bool where F: FnMut(MovePathIndex) -> bool, { @@ -630,7 +630,7 @@ impl<'tcx> FlowedMoveData<'tcx> { // // OK scenario: // - // 4. move of `a.b.c`, use of `a.b.d` + // 4. Move of `a.b.c`, use of `a.b.d` let base_indices = self.move_data.existing_base_paths(loan_path); if base_indices.is_empty() { @@ -656,7 +656,7 @@ impl<'tcx> FlowedMoveData<'tcx> { let cont = self.move_data.each_base_path(moved_path, |p| { if p == loan_path_index { // Scenario 3: some extension of `loan_path` - // was moved + // was moved. f(the_move, &self.move_data.path_loan_path(moved_path)) } else { @@ -683,7 +683,7 @@ impl<'tcx> FlowedMoveData<'tcx> { match self.move_data.existing_move_path(loan_path) { Some(i) => i, None => { - // if there were any assignments, it'd have an index + // If there were any assignments, it'd have an index. return true; } } @@ -704,27 +704,27 @@ impl<'tcx> FlowedMoveData<'tcx> { impl BitwiseOperator for MoveDataFlowOperator { #[inline] fn join(&self, succ: usize, pred: usize) -> usize { - succ | pred // moves from both preds are in scope + succ | pred // Moves from both predicates are in scope. } } impl DataFlowOperator for MoveDataFlowOperator { #[inline] fn initial_value(&self) -> bool { - false // no loans in scope by default + false // No loans in scope by default. } } impl BitwiseOperator for AssignDataFlowOperator { #[inline] fn join(&self, succ: usize, pred: usize) -> usize { - succ | pred // moves from both preds are in scope + succ | pred // Moves from both predicates are in scope. } } impl DataFlowOperator for AssignDataFlowOperator { #[inline] fn initial_value(&self) -> bool { - false // no assignments in scope by default + false // No assignments in scope by default. } } diff --git a/src/librustc_codegen_llvm/attributes.rs b/src/librustc_codegen_llvm/attributes.rs index 33b50401b22f..90760e10478d 100644 --- a/src/librustc_codegen_llvm/attributes.rs +++ b/src/librustc_codegen_llvm/attributes.rs @@ -24,7 +24,7 @@ pub use syntax::attr::{self, InlineAttr, OptimizeAttr}; use crate::context::CodegenCx; use crate::value::Value; -/// Mark LLVM function to use provided inline heuristic. +/// Marks an LLVM function to use the provided inline heuristic. #[inline] pub fn inline(cx: &CodegenCx<'ll, '_>, val: &'ll Value, inline: InlineAttr) { use self::InlineAttr::*; @@ -44,19 +44,19 @@ pub fn inline(cx: &CodegenCx<'ll, '_>, val: &'ll Value, inline: InlineAttr) { }; } -/// Tell LLVM to emit or not emit the information necessary to unwind the stack for the function. +/// Tells LLVM to emit or not emit the information necessary to unwind the stack for the function. #[inline] pub fn emit_uwtable(val: &'ll Value, emit: bool) { Attribute::UWTable.toggle_llfn(Function, val, emit); } -/// Tell LLVM whether the function can or cannot unwind. +/// Tells LLVM whether the function can or cannot unwind. #[inline] fn unwind(val: &'ll Value, can_unwind: bool) { Attribute::NoUnwind.toggle_llfn(Function, val, !can_unwind); } -/// Tell LLVM if this function should be 'naked', i.e., skip the epilogue and prologue. +/// Tells LLVM if this function should be 'naked', i.e., skip the epilogue and prologue. #[inline] pub fn naked(val: &'ll Value, is_naked: bool) { Attribute::Naked.toggle_llfn(Function, val, is_naked); @@ -70,7 +70,7 @@ pub fn set_frame_pointer_elimination(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) } } -/// Tell LLVM what instrument function to insert. +/// Tells LLVM what instrument function to insert. #[inline] pub fn set_instrument_function(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) { if cx.sess().instrument_mcount() { @@ -78,7 +78,7 @@ pub fn set_instrument_function(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) { // `post-inline-ee-instrument` LLVM pass. // The function name varies on platforms. - // See test/CodeGen/mcount.c in clang. + // See `test/CodeGen/mcount.c` in Clang. let mcount_name = CString::new( cx.sess().target.target.options.target_mcount.as_str().as_bytes()).unwrap(); @@ -90,7 +90,7 @@ pub fn set_instrument_function(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) { pub fn set_probestack(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) { // Only use stack probes if the target specification indicates that we - // should be using stack probes + // should be using stack probes. if !cx.sess().target.target.options.stack_probes { return } @@ -167,7 +167,7 @@ pub fn apply_target_cpu_attr(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) { /// Sets the `NonLazyBind` LLVM attribute on a given function, /// assuming the codegen options allow skipping the PLT. pub fn non_lazy_bind(sess: &Session, llfn: &'ll Value) { - // Don't generate calls through PLT if it's not necessary + // Don't generate calls through PLT if it's not necessary. if !sess.needs_plt() { Attribute::NonLazyBind.apply_llfn(Function, llfn); } @@ -194,8 +194,7 @@ pub(crate) fn default_optimisation_attrs(sess: &Session, llfn: &'ll Value) { } } - -/// Composite function which sets LLVM attributes for function depending on its AST (`#[attribute]`) +/// Composite function that sets LLVM attributes for function depending on its AST (`#[attribute]`) /// attributes. pub fn from_fn_attrs( cx: &CodegenCx<'ll, 'tcx>, @@ -268,10 +267,10 @@ pub fn from_fn_attrs( // optimize based on this! false } else if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::UNWIND) { - // If a specific #[unwind] attribute is present, use that + // If a specific `#[unwind]` attribute is present, use that. true } else if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_ALLOCATOR_NOUNWIND) { - // Special attribute for allocator functions, which can't unwind + // Special attribute for allocator functions, which can't unwind. false } else if let Some(id) = id { let sig = cx.tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig); @@ -286,11 +285,11 @@ pub fn from_fn_attrs( false } else { // Anything else defined in Rust is assumed that it can possibly - // unwind + // unwind. true } } else { - // assume this can possibly unwind, avoiding the application of a + // Assume this can possibly unwind, avoiding the application of a // `nounwind` attribute below. true }); diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs index 4acbe0356b47..a296a76728ce 100644 --- a/src/librustc_codegen_ssa/base.rs +++ b/src/librustc_codegen_ssa/base.rs @@ -486,7 +486,7 @@ pub fn codegen_crate( ) -> OngoingCodegen { check_for_rustc_errors_attr(tcx); - // Skip crate items and just output metadata in -Z no-codegen mode. + // Skip crate items and just output metadata in `-Z no-codegen` mode. if tcx.sess.opts.debugging_opts.no_codegen || !tcx.sess.opts.output_types.should_codegen() { let ongoing_codegen = start_async_codegen( diff --git a/src/librustc_codegen_ssa/mir/mod.rs b/src/librustc_codegen_ssa/mir/mod.rs index 00e9ca01f4dd..d78e0e1bbe90 100644 --- a/src/librustc_codegen_ssa/mir/mod.rs +++ b/src/librustc_codegen_ssa/mir/mod.rs @@ -241,7 +241,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let memory_locals = analyze::non_ssa_locals(&fx); - // Allocate variable and temp allocas + // Allocate variable and temp allocas. fx.locals = { // FIXME(dlrobertson): This is ugly. Find a better way of getting the `PlaceRef` or // `LocalRef` from `arg_local_refs` diff --git a/src/librustc_codegen_ssa/mir/statement.rs b/src/librustc_codegen_ssa/mir/statement.rs index 594f45c83375..2c9abd4a1fec 100644 --- a/src/librustc_codegen_ssa/mir/statement.rs +++ b/src/librustc_codegen_ssa/mir/statement.rs @@ -1,11 +1,9 @@ -use rustc::mir; +use super::{FunctionCx, LocalRef, OperandValue}; -use crate::traits::BuilderMethods; -use super::FunctionCx; -use super::LocalRef; -use super::OperandValue; use crate::traits::*; +use rustc::mir; + impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { pub fn codegen_statement( &mut self, @@ -55,7 +53,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } // If the type is zero-sized, it's already been set here, - // but we still need to make sure we codegen the operand + // but we still need to make sure we codegen the operand. self.codegen_rvalue_operand(bx, rvalue).0 } } diff --git a/src/librustc_codegen_ssa/traits/builder.rs b/src/librustc_codegen_ssa/traits/builder.rs index 3a144f0b0e0a..8cc29ea1ae70 100644 --- a/src/librustc_codegen_ssa/traits/builder.rs +++ b/src/librustc_codegen_ssa/traits/builder.rs @@ -256,10 +256,10 @@ pub trait BuilderMethods<'a, 'tcx>: fn atomic_fence(&mut self, order: AtomicOrdering, scope: SynchronizationScope); fn set_invariant_load(&mut self, load: Self::Value); - /// Called for `StorageLive` + /// Called for StorageLive statements. fn lifetime_start(&mut self, ptr: Self::Value, size: Size); - /// Called for `StorageDead` + /// Called for StorageDead statements. fn lifetime_end(&mut self, ptr: Self::Value, size: Size); fn call( diff --git a/src/librustc_data_structures/box_region.rs b/src/librustc_data_structures/box_region.rs index 278dcdf2bee4..1fa10c6c97e0 100644 --- a/src/librustc_data_structures/box_region.rs +++ b/src/librustc_data_structures/box_region.rs @@ -107,8 +107,8 @@ macro_rules! declare_box_region_type { } $v fn access FnOnce($($args,)*) -> R, R>(&mut self, f: F) -> R { - // Turn the FnOnce closure into *mut dyn FnMut() - // so we can pass it in to the generator using the BOX_REGION_ARG thread local + // Turn the `FnOnce` closure into `*mut dyn FnMut()` + // so we can pass it in to the generator using the `BOX_REGION_ARG` thread local. let mut r = None; let mut f = Some(f); let mut_f: &mut dyn for<$($lifetimes)*> FnMut(($($args,)*)) = @@ -118,12 +118,12 @@ macro_rules! declare_box_region_type { }; let mut_f = mut_f as *mut dyn for<$($lifetimes)*> FnMut(($($args,)*)); - // Get the generator to call our closure + // Get the generator to call our closure. unsafe { self.0.access(::std::mem::transmute(mut_f)); } - // Unwrap the result + // Unwrap the result. r.unwrap() } diff --git a/src/librustc_data_structures/fingerprint.rs b/src/librustc_data_structures/fingerprint.rs index c8012bb94246..d8e01f7794a4 100644 --- a/src/librustc_data_structures/fingerprint.rs +++ b/src/librustc_data_structures/fingerprint.rs @@ -85,16 +85,17 @@ impl stable_hasher::StableHasherResult for Fingerprint { impl_stable_hash_via_hash!(Fingerprint); impl rustc_serialize::UseSpecializedEncodable for Fingerprint { } - impl rustc_serialize::UseSpecializedDecodable for Fingerprint { } impl rustc_serialize::SpecializedEncoder for Encoder { + #[inline] fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> { f.encode_opaque(self) } } impl<'a> rustc_serialize::SpecializedDecoder for Decoder<'a> { + #[inline] fn specialized_decode(&mut self) -> Result { Fingerprint::decode_opaque(self) } diff --git a/src/librustc_data_structures/indexed_vec.rs b/src/librustc_data_structures/indexed_vec.rs index 6f40d059be27..54844bf1e6cc 100644 --- a/src/librustc_data_structures/indexed_vec.rs +++ b/src/librustc_data_structures/indexed_vec.rs @@ -61,25 +61,25 @@ impl Idx for u32 { macro_rules! newtype_index { // ---- public rules ---- - // Use default constants + // Use default constants. ($(#[$attrs:meta])* $v:vis struct $name:ident { .. }) => ( $crate::newtype_index!( - // Leave out derives marker so we can use its absence to ensure it comes first + // Leave out derives marker so we can use its absence to ensure it comes first. @attrs [$(#[$attrs])*] @type [$name] - // shave off 256 indices at the end to allow space for packing these indices into enums + // Shave off 256 indices at the end to allow space for packing these indices into enums. @max [0xFFFF_FF00] @vis [$v] @debug_format ["{}"]); ); - // Define any constants + // Define any constants. ($(#[$attrs:meta])* $v:vis struct $name:ident { $($tokens:tt)+ }) => ( $crate::newtype_index!( - // Leave out derives marker so we can use its absence to ensure it comes first + // Leave out derives marker so we can use its absence to ensure it comes first. @attrs [$(#[$attrs])*] @type [$name] - // shave off 256 indices at the end to allow space for packing these indices into enums + // Shave off 256 indices at the end to allow space for packing these indices into enums. @max [0xFFFF_FF00] @vis [$v] @debug_format ["{}"] @@ -88,7 +88,7 @@ macro_rules! newtype_index { // ---- private rules ---- - // Base case, user-defined constants (if any) have already been defined + // Base case where user-defined constants (if any) have already been defined. (@derives [$($derives:ident,)*] @attrs [$(#[$attrs:meta])*] @type [$type:ident] @@ -266,13 +266,13 @@ macro_rules! newtype_index { @debug_format [$debug_format]); ); - // base case for handle_debug where format is custom. No Debug implementation is emitted. + // Base case for `handle_debug` where format is custom. No `Debug` implementation is emitted. (@handle_debug @derives [$($_derives:ident,)*] @type [$type:ident] @debug_format [custom]) => (); - // base case for handle_debug, no debug overrides found, so use default + // Base case for `handle_debug` where no debug overrides found. Use default. (@handle_debug @derives [] @type [$type:ident] @@ -284,7 +284,7 @@ macro_rules! newtype_index { } ); - // Debug is requested for derive, don't generate any Debug implementation. + // `Debug` is requested for derive, don't generate any `Debug` implementation. (@handle_debug @derives [Debug, $($derives:ident,)*] @type [$type:ident] @@ -302,7 +302,7 @@ macro_rules! newtype_index { @debug_format [$debug_format]); ); - // Append comma to end of derives list if it's missing + // Append comma to end of derives list if it's missing. (@attrs [$(#[$attrs:meta])*] @type [$type:ident] @max [$max:expr] @@ -361,7 +361,7 @@ macro_rules! newtype_index { ); // The case where no derives are added, but encodable is overridden. Don't - // derive serialization traits + // derive serialization traits. (@attrs [$(#[$attrs:meta])*] @type [$type:ident] @max [$max:expr] @@ -379,7 +379,7 @@ macro_rules! newtype_index { $($tokens)*); ); - // The case where no derives are added, add serialization derives by default + // The case where no derives are added. Add serialization derives by default. (@attrs [$(#[$attrs:meta])*] @type [$type:ident] @max [$max:expr] @@ -405,7 +405,7 @@ macro_rules! newtype_index { } ); - // Rewrite final without comma to one that includes comma + // Rewrite final without comma to one that includes comma. (@derives [$($derives:ident,)*] @attrs [$(#[$attrs:meta])*] @type [$type:ident] @@ -423,7 +423,7 @@ macro_rules! newtype_index { $name = $constant,); ); - // Rewrite final const without comma to one that includes comma + // Rewrite final const without comma to one that includes comma. (@derives [$($derives:ident,)*] @attrs [$(#[$attrs:meta])*] @type [$type:ident] @@ -442,7 +442,7 @@ macro_rules! newtype_index { $(#[doc = $doc])* const $name = $constant,); ); - // Replace existing default for max + // Replace existing default for `max`. (@derives [$($derives:ident,)*] @attrs [$(#[$attrs:meta])*] @type [$type:ident] @@ -461,7 +461,7 @@ macro_rules! newtype_index { $($tokens)*); ); - // Replace existing default for debug_format + // Replace existing default for `debug_format`. (@derives [$($derives:ident,)*] @attrs [$(#[$attrs:meta])*] @type [$type:ident] @@ -480,7 +480,7 @@ macro_rules! newtype_index { $($tokens)*); ); - // Assign a user-defined constant + // Assign a user-defined constant. (@derives [$($derives:ident,)*] @attrs [$(#[$attrs:meta])*] @type [$type:ident] @@ -674,7 +674,7 @@ impl IndexVec { self.raw.get_mut(index.index()) } - /// Returns mutable references to two distinct elements, a and b. Panics if a == b. + /// Returns mutable references to two distinct elements, `a` and `b`. Panics if `a == b`. #[inline] pub fn pick2_mut(&mut self, a: I, b: I) -> (&mut T, &mut T) { let (ai, bi) = (a.index(), b.index()); diff --git a/src/librustc_data_structures/sync.rs b/src/librustc_data_structures/sync.rs index 3277b85c2814..047195a4f289 100644 --- a/src/librustc_data_structures/sync.rs +++ b/src/librustc_data_structures/sync.rs @@ -1,21 +1,22 @@ -//! This module defines types which are thread safe if cfg!(parallel_compiler) is true. +//! This module defines types which are thread-safe if `cfg!(parallel_compiler)` is `true`. //! -//! `Lrc` is an alias of either Rc or Arc. +//! `Lrc` is an alias of either `Rc` or `Arc`. //! //! `Lock` is a mutex. -//! It internally uses `parking_lot::Mutex` if cfg!(parallel_compiler) is true, +//! It internally uses `parking_lot::Mutex` if `cfg!(parallel_compiler)` is `true`, //! `RefCell` otherwise. //! //! `RwLock` is a read-write lock. -//! It internally uses `parking_lot::RwLock` if cfg!(parallel_compiler) is true, +//! It internally uses `parking_lot::RwLock` if `cfg!(parallel_compiler)` is `true`, //! `RefCell` otherwise. //! -//! `MTLock` is a mutex which disappears if cfg!(parallel_compiler) is false. +//! `MTLock` is a mutex which disappears if `cfg!(parallel_compiler)` is `false`. //! -//! `MTRef` is a immutable reference if cfg!(parallel_compiler), and an mutable reference otherwise. +//! `MTRef` is a immutable reference if `cfg!(parallel_compiler)`, and an mutable reference +//! otherwise. //! -//! `rustc_erase_owner!` erases a OwningRef owner into Erased or Erased + Send + Sync -//! depending on the value of cfg!(parallel_compiler). +//! `rustc_erase_owner!` erases an `OwningRef` owner into `Erased` or `Erased + Send + Sync` +//! depending on the value of `cfg!(parallel_compiler)`. use std::collections::HashMap; use std::hash::{Hash, BuildHasher}; @@ -253,7 +254,7 @@ cfg_if! { WorkerLocal(OneThread::new(f(0))) } - /// Returns the worker-local value for each thread + /// Returns the worker-local value for each thread. #[inline] pub fn into_inner(self) -> Vec { vec![OneThread::into_inner(self.0)] @@ -384,7 +385,7 @@ cfg_if! { ($fblock:tt, $($blocks:tt),*) => { // Reverse the order of the later blocks since Rayon executes them in reverse order // when using a single thread. This ensures the execution order matches that - // of a single threaded rustc + // of a single threaded rustc. parallel!(impl $fblock [] [$($blocks),*]); }; } @@ -410,7 +411,7 @@ cfg_if! { pub type MetadataRef = OwningRef, [u8]>; /// This makes locks panic if they are already held. - /// It is only useful when you are running in a single thread + /// It is only useful when you are running in a single thread. const ERROR_CHECKING: bool = false; #[macro_export] @@ -430,8 +431,8 @@ pub fn assert_send_val(_t: &T) {} pub fn assert_send_sync_val(_t: &T) {} pub trait HashMapExt { - /// Same as HashMap::insert, but it may panic if there's already an - /// entry for `key` with a value not equal to `value` + /// Equivalent to `HashMap::insert`, but may panic if there's already an + /// entry for `key` with a value not equal to `value`. fn insert_same(&mut self, key: K, value: V); } @@ -441,21 +442,22 @@ impl HashMapExt for HashMap } } -/// A type whose inner value can be written once and then will stay read-only -// This contains a PhantomData since this type conceptually owns a T outside the Mutex once -// initialized. This ensures that Once is Sync only if T is. If we did not have PhantomData -// we could send a &Once> to multiple threads and call `get` on it to get access -// to &Cell on those threads. +/// A type whose inner value can be written once and will then stay read-only. +// +// This contains a `PhantomData` since this type conceptually owns a `T` outside the `Mutex` once +// initialized. This ensures that `Once` is `Sync` only if `T` is. If we did not have +// `PhantomData` we could send a `&Once>` to multiple threads and call `get` on it to +// get access to `&Cell` on those threads. pub struct Once(Lock>, PhantomData); impl Once { - /// Creates an Once value which is uninitialized + /// Creates an uninitialized `Once` value. #[inline(always)] pub fn new() -> Self { Once(Lock::new(None), PhantomData) } - /// Consumes the value and returns Some(T) if it was initialized + /// Consumes the value and returns `Some(T)` if it was initialized. #[inline(always)] pub fn into_inner(self) -> Option { self.0.into_inner() @@ -463,7 +465,7 @@ impl Once { /// Tries to initialize the inner value to `value`. /// Returns `None` if the inner value was uninitialized and `value` was consumed setting it - /// otherwise if the inner value was already set it returns `value` back to the caller + /// otherwise if the inner value was already set it returns `value` back to the caller. #[inline] pub fn try_set(&self, value: T) -> Option { let mut lock = self.0.lock(); @@ -477,7 +479,7 @@ impl Once { /// Tries to initialize the inner value to `value`. /// Returns `None` if the inner value was uninitialized and `value` was consumed setting it /// otherwise if the inner value was already set it asserts that `value` is equal to the inner - /// value and then returns `value` back to the caller + /// value and then returns `value` back to the caller. #[inline] pub fn try_set_same(&self, value: T) -> Option where T: Eq { let mut lock = self.0.lock(); @@ -489,7 +491,7 @@ impl Once { None } - /// Tries to initialize the inner value to `value` and panics if it was already initialized + /// Tries to initialize the inner value to `value` and panics if it was already initialized. #[inline] pub fn set(&self, value: T) { assert!(self.try_set(value).is_none()); @@ -498,7 +500,7 @@ impl Once { /// Tries to initialize the inner value by calling the closure while ensuring that no-one else /// can access the value in the mean time by holding a lock for the duration of the closure. /// If the value was already initialized the closure is not called and `false` is returned, - /// otherwise if the value from the closure initializes the inner value, `true` is returned + /// otherwise if the value from the closure initializes the inner value, `true` is returned. #[inline] pub fn init_locking T>(&self, f: F) -> bool { let mut lock = self.0.lock(); @@ -544,25 +546,25 @@ impl Once { } } - /// Tries to get a reference to the inner value, returns `None` if it is not yet initialized + /// Tries to get a reference to the inner value, returns `None` if it is not yet initialized. #[inline(always)] pub fn try_get(&self) -> Option<&T> { let lock = &*self.0.lock(); if let Some(ref inner) = *lock { - // This is safe since we won't mutate the inner value + // This is safe since we won't mutate the inner value. unsafe { Some(&*(inner as *const T)) } } else { None } } - /// Gets reference to the inner value, panics if it is not yet initialized + /// Gets reference to the inner value, panics if it is not yet initialized. #[inline(always)] pub fn get(&self) -> &T { self.try_get().expect("value was not set") } - /// Gets reference to the inner value, panics if it is not yet initialized + /// Gets reference to the inner value, panics if it is not yet initialized. #[inline(always)] pub fn borrow(&self) -> &T { self.get() diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index a912ea3c3582..1a680758b05c 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -25,7 +25,6 @@ pub extern crate rustc_plugin_impl as plugin; use pretty::{PpMode, UserIdentifiedItem}; -//use rustc_resolve as resolve; use rustc_save_analysis as save; use rustc_save_analysis::DumpHandler; use rustc::session::{config, Session, DiagnosticOutput}; @@ -95,27 +94,30 @@ pub fn abort_on_err(result: Result, sess: &Session) -> T { match result { Err(..) => { sess.abort_if_errors(); - panic!("error reported but abort_if_errors didn't abort???"); + panic!("error reported but `abort_if_errors` didn't abort?"); } Ok(x) => x, } } pub trait Callbacks { - /// Called before creating the compiler instance + /// Called before creating the compiler instance. fn config(&mut self, _config: &mut interface::Config) {} - /// Called after parsing. Return value instructs the compiler whether to - /// continue the compilation afterwards (defaults to `Compilation::Continue`) + + /// Called after parsing. The return value instructs the compiler whether to + /// continue the compilation afterwards (defaults to `Compilation::Continue`). fn after_parsing(&mut self, _compiler: &interface::Compiler) -> Compilation { Compilation::Continue } - /// Called after expansion. Return value instructs the compiler whether to - /// continue the compilation afterwards (defaults to `Compilation::Continue`) + + /// Called after expansion. The return value instructs the compiler whether to + /// continue the compilation afterwards (defaults to `Compilation::Continue`). fn after_expansion(&mut self, _compiler: &interface::Compiler) -> Compilation { Compilation::Continue } - /// Called after analysis. Return value instructs the compiler whether to - /// continue the compilation afterwards (defaults to `Compilation::Continue`) + + /// Called after analysis. The return value instructs the compiler whether to + /// continue the compilation afterwards (defaults to `Compilation::Continue`). fn after_analysis(&mut self, _compiler: &interface::Compiler) -> Compilation { Compilation::Continue } @@ -138,8 +140,8 @@ impl Callbacks for TimePassesCallbacks { } // Parse args and run the compiler. This is the primary entry point for rustc. -// See comments on CompilerCalls below for details about the callbacks argument. -// The FileLoader provides a way to load files from sources other than the file system. +// See comments on `CompilerCalls` below for details about the callbacks argument. +// The `FileLoader` provides a way to load files from sources other than the file system. pub fn run_compiler( at_args: &[String], callbacks: &mut (dyn Callbacks + Send), @@ -220,7 +222,7 @@ pub fn run_compiler( }); return Ok(()); } - 1 => panic!("make_input should have provided valid inputs"), + 1 => panic!("`make_input` should have provided valid inputs"), _ => early_error(sopts.error_format, &format!( "multiple input filenames provided (first two filenames are `{}` and `{}`)", matches.free[0], @@ -231,8 +233,8 @@ pub fn run_compiler( }; if let Some(err) = input_err { - // Immediately stop compilation if there was an issue reading - // the input (for example if the input stream is not UTF-8). + // Immediately stop compilation if there was an issue reading the input (for example, + // if the input stream is not UTF-8). interface::run_compiler(dummy_config(sopts, cfg, diagnostic_output), |compiler| { compiler.session().err(&err.to_string()); }); @@ -320,7 +322,7 @@ pub fn run_compiler( compiler.register_plugins()?; - // Lint plugins are registered; now we can process command line flags. + // Lint plugins are registered; now we can process command-line flags. if sess.opts.describe_lints { describe_lints(&sess, &sess.lint_store.borrow(), true); return sess.compile_status(); @@ -365,10 +367,10 @@ pub fn run_compiler( result // AST will be dropped *after* the `after_analysis` callback - // (needed by the RLS) + // (needed by the RLS). })?; } else { - // Drop AST after creating GlobalCtxt to free memory + // Drop AST after creating `GlobalCtxt` to free memory. mem::drop(compiler.expansion()?.take()); } @@ -384,7 +386,7 @@ pub fn run_compiler( compiler.ongoing_codegen()?; - // Drop GlobalCtxt after starting codegen to free memory + // Drop `GlobalCtxt` after starting codegen to free memory. mem::drop(compiler.global_ctxt()?.take()); if sess.opts.debugging_opts.print_type_sizes { @@ -496,17 +498,17 @@ impl Compilation { } } -/// CompilerCalls instance for a regular rustc build. +/// `CompilerCalls` instance for a regular rustc build. #[derive(Copy, Clone)] pub struct RustcDefaultCalls; -// FIXME remove these and use winapi 0.3 instead -// Duplicates: bootstrap/compile.rs, librustc_errors/emitter.rs #[cfg(unix)] fn stdout_isatty() -> bool { unsafe { libc::isatty(libc::STDOUT_FILENO) != 0 } } +// FIXME: remove these and use v0.3 of winapi crate instead. +// Duplicates: `bootstrap/compile.rs`, `librustc_errors/emitter.rs` #[cfg(windows)] fn stdout_isatty() -> bool { type DWORD = u32; @@ -593,7 +595,7 @@ fn show_content_with_pager(content: &String) { } // If pager fails for whatever reason, we should still print the content - // to standard output + // to standard output. if fallback_to_println { print!("{}", content); } @@ -628,7 +630,6 @@ impl RustcDefaultCalls { Compilation::Continue } - fn print_crate_info(codegen_backend: &dyn CodegenBackend, sess: &Session, input: Option<&Input>, @@ -636,8 +637,8 @@ impl RustcDefaultCalls { ofile: &Option) -> Compilation { use rustc::session::config::PrintRequest::*; - // PrintRequest::NativeStaticLibs is special - printed during linking - // (empty iterator returns true) + // `PrintRequest::NativeStaticLibs` is special -- printed during linking + // (empty iterator returns `true`). if sess.opts.prints.iter().all(|&p| p == PrintRequest::NativeStaticLibs) { return Compilation::Continue; } @@ -714,7 +715,7 @@ impl RustcDefaultCalls { let value = value.as_ref().map(|s| s.as_ref()); if name != sym::target_feature || value != Some("crt-static") { if !allow_unstable_cfg && gated_cfg.is_some() { - return None + return None; } } @@ -733,7 +734,7 @@ impl RustcDefaultCalls { RelocationModels | CodeModels | TlsModels | TargetCPUs | TargetFeatures => { codegen_backend.print(*req, sess); } - // Any output here interferes with Cargo's parsing of other printed output + // Any output here interferes with Cargo's parsing of other printed output. PrintRequest::NativeStaticLibs => {} } } @@ -756,7 +757,7 @@ fn commit_date_str() -> Option<&'static str> { option_env!("CFG_VER_DATE") } -/// Prints version information +/// Prints version information. pub fn version(binary: &str, matches: &getopts::Matches) { let verbose = matches.opt_present("verbose"); @@ -977,8 +978,8 @@ fn print_flag_list(cmdline_opt: &str, } } -/// Process command line options. Emits messages as appropriate. If compilation -/// should continue, returns a getopts::Matches object parsed from args, +/// Processes command-line options. Emits messages as appropriate. If compilation +/// should continue, returns a `getopts::Matches` object parsed from arguments, /// otherwise returns `None`. /// /// The compiler's handling of options is a little complicated as it ties into @@ -1001,11 +1002,11 @@ fn print_flag_list(cmdline_opt: &str, /// So with all that in mind, the comments below have some more detail about the /// contortions done here to get things to work out correctly. pub fn handle_options(args: &[String]) -> Option { - // Throw away the first argument, the name of the binary + // Throw away the first argument (the name of the binary). let args = &args[1..]; if args.is_empty() { - // user did not write `-v` nor `-Z unstable-options`, so do not + // User did not write `-v` nor `-Z unstable-options`, so do not // include that extra information. usage(false, false); return None; @@ -1033,26 +1034,26 @@ pub fn handle_options(args: &[String]) -> Option { // (unstable option being used on stable) nightly_options::check_nightly_options(&matches, &config::rustc_optgroups()); - // Late check to see if @file was used without unstable options enabled + // Late check to see if `@path` was used without unstable options enabled. if crate::args::used_unstable_argsfile() && !nightly_options::is_unstable_enabled(&matches) { early_error(ErrorOutputType::default(), - "@path is unstable - use -Z unstable-options to enable its use"); + "`@path` is unstable; use `-Z unstable-options` to enable its use"); } if matches.opt_present("h") || matches.opt_present("help") { - // Only show unstable options in --help if we accept unstable options. + // Only show unstable options in `--help` if we accept unstable options. usage(matches.opt_present("verbose"), nightly_options::is_unstable_enabled(&matches)); return None; } - // Handle the special case of -Wall. + // Handle the special case of `-Wall`. let wall = matches.opt_strs("W"); if wall.iter().any(|x| *x == "all") { print_wall_help(); return None; } - // Don't handle -W help here, because we might first load plugins. + // Don't handle `-W` help here, because we might first load plugins. let r = matches.opt_strs("Z"); if r.iter().any(|x| *x == "help") { describe_debug_flags(); @@ -1161,7 +1162,7 @@ pub fn report_ices_to_stderr_if_any R, R>(f: F) -> Result() { ErrorReported } else { - // Thread panicked without emitting a fatal diagnostic + // Thread panicked without emitting a fatal diagnostic. eprintln!(""); let emitter = Box::new(errors::emitter::EmitterWriter::stderr( @@ -1173,8 +1174,7 @@ pub fn report_ices_to_stderr_if_any R, R>(f: F) -> Result() { handler.emit(&MultiSpan::new(), "unexpected panic", @@ -1208,8 +1208,8 @@ pub fn report_ices_to_stderr_if_any R, R>(f: F) -> Result) -> ast::NodeId { let fail_because = |is_wrong_because| -> ast::NodeId { - let message = format!("{} needs NodeId (int) or unique path suffix (b::c::d); got \ - {}, which {}", + let message = format!("`{}` needs `NodeId` (int) or unique path suffix (`b::c::d`); \ + got `{}`, which {}", user_option, self.reconstructed_input(), is_wrong_because); @@ -721,15 +721,17 @@ pub fn print_after_parsing(sess: &Session, // Silently ignores an identified node. let out = &mut out; s.call_with_pp_support(sess, None, move |annotation| { - debug!("pretty printing source code {:?}", s); + debug!("pretty-printing source code {:?}", s); let sess = annotation.sess(); - *out = pprust::print_crate(sess.source_map(), - &sess.parse_sess, - krate, - src_name, - src, - annotation.pp_ann(), - false) + *out = pprust::print_crate( + sess.source_map(), + &sess.parse_sess, + krate, + src_name, + src, + annotation.pp_ann(), + false + ) }) } else { unreachable!(); @@ -766,15 +768,17 @@ pub fn print_after_hir_lowering<'tcx>( let out = &mut out; let src = src.clone(); s.call_with_pp_support(tcx.sess, Some(tcx), move |annotation| { - debug!("pretty printing source code {:?}", s); + debug!("pretty-printing source code {:?}", s); let sess = annotation.sess(); - *out = pprust::print_crate(sess.source_map(), - &sess.parse_sess, - krate, - src_name, - src, - annotation.pp_ann(), - true) + *out = pprust::print_crate( + sess.source_map(), + &sess.parse_sess, + krate, + src_name, + src, + annotation.pp_ann(), + true + ) }) } @@ -782,21 +786,23 @@ pub fn print_after_hir_lowering<'tcx>( let out = &mut out; let src = src.clone(); s.call_with_pp_support_hir(tcx, move |annotation, krate| { - debug!("pretty printing source code {:?}", s); + debug!("pretty-printing source code {:?}", s); let sess = annotation.sess(); - *out = pprust_hir::print_crate(sess.source_map(), - &sess.parse_sess, - krate, - src_name, - src, - annotation.pp_ann()) + *out = pprust_hir::print_crate( + sess.source_map(), + &sess.parse_sess, + krate, + src_name, + src, + annotation.pp_ann() + ) }) } (PpmHirTree(s), None) => { let out = &mut out; s.call_with_pp_support_hir(tcx, move |_annotation, krate| { - debug!("pretty printing source code {:?}", s); + debug!("pretty-printing source code {:?}", s); *out = format!("{:#?}", krate); }); } @@ -805,21 +811,23 @@ pub fn print_after_hir_lowering<'tcx>( let out = &mut out; let src = src.clone(); s.call_with_pp_support_hir(tcx, move |annotation, _| { - debug!("pretty printing source code {:?}", s); + debug!("pretty-printing source code {:?}", s); let sess = annotation.sess(); - let hir_map = annotation.hir_map().expect("-Z unpretty missing HIR map"); - let mut pp_state = pprust_hir::State::new_from_input(sess.source_map(), - &sess.parse_sess, - src_name, - src, - annotation.pp_ann()); + let hir_map = annotation.hir_map().expect("`-Z unpretty` missing HIR map"); + let mut pp_state = pprust_hir::State::new_from_input( + sess.source_map(), + &sess.parse_sess, + src_name, + src, + annotation.pp_ann() + ); for node_id in uii.all_matching_node_ids(hir_map) { let hir_id = tcx.hir().node_to_hir_id(node_id); let node = hir_map.get(hir_id); pp_state.print_node(node); pp_state.s.space(); let path = annotation.node_path(hir_id) - .expect("-Z unpretty missing node paths"); + .expect("`-Z unpretty` missing node paths"); pp_state.synth_comment(path); pp_state.s.hardbreak(); } @@ -830,7 +838,7 @@ pub fn print_after_hir_lowering<'tcx>( (PpmHirTree(s), Some(uii)) => { let out = &mut out; s.call_with_pp_support_hir(tcx, move |_annotation, _krate| { - debug!("pretty printing source code {:?}", s); + debug!("pretty-printing source code {:?}", s); for node_id in uii.all_matching_node_ids(tcx.hir()) { let hir_id = tcx.hir().node_to_hir_id(node_id); let node = tcx.hir().get(hir_id); @@ -856,10 +864,10 @@ fn print_with_analysis( ofile: Option<&Path>, ) -> Result<(), ErrorReported> { let nodeid = if let Some(uii) = uii { - debug!("pretty printing for {:?}", uii); + debug!("pretty-printing for {:?}", uii); Some(uii.to_one_node_id("-Z unpretty", tcx.sess, tcx.hir())) } else { - debug!("pretty printing for whole crate"); + debug!("pretty-printing for whole crate"); None }; diff --git a/src/librustc_incremental/persist/data.rs b/src/librustc_incremental/persist/data.rs index 49b4bb061141..cb0444e8d905 100644 --- a/src/librustc_incremental/persist/data.rs +++ b/src/librustc_incremental/persist/data.rs @@ -1,12 +1,12 @@ -//! The data that we will serialize and deserialize. +//! The data that gets serialized and deserialized. use rustc::dep_graph::{WorkProduct, WorkProductId}; #[derive(Debug, RustcEncodable, RustcDecodable)] pub struct SerializedWorkProduct { - /// node that produced the work-product + /// The node that produced the work-product. pub id: WorkProductId, - /// work-product data itself + /// The work-product data itself. pub work_product: WorkProduct, } diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index 837aa9360c89..0bfeb2287dc2 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -242,7 +242,7 @@ pub struct DirtyCleanVisitor<'tcx> { } impl DirtyCleanVisitor<'tcx> { - /// Possibly "deserialize" the attribute into a clean/dirty assertion + /// Possibly "deserialize" the attribute into a clean/dirty assertion. fn assertion_maybe(&mut self, item_id: hir::HirId, attr: &Attribute) -> Option { @@ -251,11 +251,11 @@ impl DirtyCleanVisitor<'tcx> { } else if attr.check_name(ATTR_CLEAN) { true } else { - // skip: not rustc_clean/dirty - return None + // Skip: not `rustc_clean`/`rustc_dirty`. + return None; }; if !check_config(self.tcx, attr) { - // skip: not the correct `cfg=` + // Skip: not the correct `cfg=`. return None; } let assertion = if let Some(labels) = self.labels(attr) { @@ -279,7 +279,7 @@ impl DirtyCleanVisitor<'tcx> { for e in except.iter() { if !auto.remove(e) { let msg = format!( - "`except` specified DepNodes that can not be affected for \"{}\": \"{}\"", + "`except` specified `DepNode`s that can not be affected for \"{}\": \"{}\"", name, e ); @@ -309,7 +309,7 @@ impl DirtyCleanVisitor<'tcx> { None } - /// `except=` attribute value + /// `except=` attribute value. fn except(&self, attr: &Attribute) -> Labels { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(EXCEPT) { @@ -317,18 +317,18 @@ impl DirtyCleanVisitor<'tcx> { return self.resolve_labels(&item, value.as_str().as_ref()); } } - // if no `label` or `except` is given, only the node's group are asserted + // If no `label` or `except` is given, only the node's group are asserted. Labels::default() } - /// Return all DepNode labels that should be asserted for this item. - /// index=0 is the "name" used for error messages + /// Returns all `DepNode` labels that should be asserted for this item. + /// `index=0` is the "name" used for error messages. fn auto_labels(&mut self, item_id: hir::HirId, attr: &Attribute) -> (&'static str, Labels) { let node = self.tcx.hir().get(item_id); let (name, labels) = match node { HirNode::Item(item) => { match item.node { - // note: these are in the same order as hir::Item_; + // NOTE: these are in the same order as `hir::Item_`. // FIXME(michaelwoerister): do commented out ones // // An `extern crate` item, with optional original crate name, @@ -381,7 +381,7 @@ impl DirtyCleanVisitor<'tcx> { // However, this did not seem to work effectively and more bugs were hit. // Nebie @vitiral gave up :) // - //HirItem::Trait(..) => ("ItemTrait", LABELS_TRAIT), + // HirItem::Trait(..) => ("ItemTrait", LABELS_TRAIT), // An implementation, eg `impl Trait for Foo { .. }` HirItem::Impl(..) => ("ItemKind::Impl", LABELS_IMPL), @@ -601,7 +601,7 @@ fn expect_associated_value(tcx: TyCtxt<'_>, item: &NestedMetaItem) -> ast::Name } } -// A visitor that collects all #[rustc_dirty]/#[rustc_clean] attributes from +// A visitor that collects all `#[rustc_dirty]`/`#[rustc_clean]` attributes from // the HIR. It is used to verfiy that we really ran checks for all annotated // nodes. pub struct FindAllAttrs<'tcx> { diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs index f363f718496f..88b667619552 100644 --- a/src/librustc_incremental/persist/file_format.rs +++ b/src/librustc_incremental/persist/file_format.rs @@ -31,7 +31,7 @@ const RUSTC_VERSION: Option<&str> = option_env!("CFG_VERSION"); pub fn write_file_header(stream: &mut Encoder) { stream.emit_raw_bytes(FILE_MAGIC); stream.emit_raw_bytes(&[(HEADER_FORMAT_VERSION >> 0) as u8, - (HEADER_FORMAT_VERSION >> 8) as u8]); + (HEADER_FORMAT_VERSION >> 8) as u8]); let rustc_version = rustc_version(); assert_eq!(rustc_version.len(), (rustc_version.len() as u8) as usize); @@ -59,18 +59,18 @@ pub fn read_file(report_incremental_info: bool, path: &Path) let mut file = io::Cursor::new(data); - // Check FILE_MAGIC + // Check `FILE_MAGIC`. { debug_assert!(FILE_MAGIC.len() == 4); let mut file_magic = [0u8; 4]; file.read_exact(&mut file_magic)?; if file_magic != FILE_MAGIC { - report_format_mismatch(report_incremental_info, path, "Wrong FILE_MAGIC"); + report_format_mismatch(report_incremental_info, path, "wrong `FILE_MAGIC`"); return Ok(None) } } - // Check HEADER_FORMAT_VERSION + // Check `HEADER_FORMAT_VERSION`. { debug_assert!(::std::mem::size_of_val(&HEADER_FORMAT_VERSION) == 2); let mut header_format_version = [0u8; 2]; @@ -79,12 +79,12 @@ pub fn read_file(report_incremental_info: bool, path: &Path) ((header_format_version[1] as u16) << 8); if header_format_version != HEADER_FORMAT_VERSION { - report_format_mismatch(report_incremental_info, path, "Wrong HEADER_FORMAT_VERSION"); + report_format_mismatch(report_incremental_info, path, "wrong `HEADER_FORMAT_VERSION`"); return Ok(None) } } - // Check RUSTC_VERSION + // Check `RUSTC_VERSION`. { let mut rustc_version_str_len = [0u8; 1]; file.read_exact(&mut rustc_version_str_len)?; @@ -94,7 +94,7 @@ pub fn read_file(report_incremental_info: bool, path: &Path) file.read_exact(&mut buffer)?; if buffer != rustc_version().as_bytes() { - report_format_mismatch(report_incremental_info, path, "Different compiler version"); + report_format_mismatch(report_incremental_info, path, "different compiler version"); return Ok(None); } } @@ -120,7 +120,7 @@ fn rustc_version() -> String { } } - RUSTC_VERSION.expect("Cannot use rustc without explicit version for \ + RUSTC_VERSION.expect("cannot use rustc without explicit version for \ incremental compilation") .to_string() } diff --git a/src/librustc_incremental/persist/fs.rs b/src/librustc_incremental/persist/fs.rs index 511175de5d8d..c2590ad5c17e 100644 --- a/src/librustc_incremental/persist/fs.rs +++ b/src/librustc_incremental/persist/fs.rs @@ -2,7 +2,7 @@ //! the file system. //! //! Incremental compilation caches are managed according to a copy-on-write -//! strategy: Once a complete, consistent cache version is finalized, it is +//! strategy: once a complete, consistent cache version is finalized, it is //! never modified. Instead, when a subsequent compilation session is started, //! the compiler will allocate a new version of the cache that starts out as //! a copy of the previous version. Then only this new copy is modified and it @@ -128,7 +128,7 @@ const QUERY_CACHE_FILENAME: &str = "query-cache.bin"; // We encode integers using the following base, so they are shorter than decimal // or hexadecimal numbers (we want short file and directory names). Since these // numbers will be used in file names, we choose an encoding that is not -// case-sensitive (as opposed to base64, for example). +// case-sensitive (as opposed to Base64, for example). const INT_ENCODE_BASE: usize = base_n::CASE_INSENSITIVE; pub fn dep_graph_path(sess: &Session) -> PathBuf { @@ -156,7 +156,7 @@ pub fn lock_file_path(session_dir: &Path) -> PathBuf { .map(|(idx, _)| idx) .collect(); if dash_indices.len() != 3 { - bug!("Encountered incremental compilation session directory with \ + bug!("encountered incremental compilation session directory with \ malformed name: {}", session_dir.display()) } @@ -173,7 +173,7 @@ pub fn in_incr_comp_dir(incr_comp_session_dir: &Path, file_name: &str) -> PathBu incr_comp_session_dir.join(file_name) } -/// Allocates the private session directory. The boolean in the Ok() result +/// Allocates the private session directory. The boolean in the `Ok` result /// indicates whether we should try loading a dep graph from the successfully /// initialized directory, or not. /// The post-condition of this fn is that we have a valid incremental @@ -186,20 +186,20 @@ pub fn prepare_session_directory(sess: &Session, crate_name: &str, crate_disambiguator: CrateDisambiguator) { if sess.opts.incremental.is_none() { - return + return; } debug!("prepare_session_directory"); - // {incr-comp-dir}/{crate-name-and-disambiguator} + // '{incr-comp-dir}/{crate-name-and-disambiguator}' let crate_dir = crate_path(sess, crate_name, crate_disambiguator); debug!("crate-dir: {}", crate_dir.display()); if create_dir(sess, &crate_dir, "crate").is_err() { - return + return; } // Hack: canonicalize the path *after creating the directory* - // because, on windows, long paths can cause problems; + // because, on Windows, long paths can cause problems; // canonicalization inserts this weird prefix that makes windows // tolerate long paths. let crate_dir = match crate_dir.canonicalize() { @@ -207,7 +207,7 @@ pub fn prepare_session_directory(sess: &Session, Err(err) => { sess.err(&format!("incremental compilation: error canonicalizing path `{}`: {}", crate_dir.display(), err)); - return + return; } }; @@ -215,20 +215,19 @@ pub fn prepare_session_directory(sess: &Session, loop { // Generate a session directory of the form: - // - // {incr-comp-dir}/{crate-name-and-disambiguator}/s-{timestamp}-{random}-working + // {incr-comp-dir}/{crate-name-and-disambiguator}/s-{timestamp}-{random}-working let session_dir = generate_session_dir_path(&crate_dir); debug!("session-dir: {}", session_dir.display()); // Lock the new session directory. If this fails, return an - // error without retrying + // error without retrying. let (directory_lock, lock_file_path) = match lock_directory(sess, &session_dir) { Ok(e) => e, Err(_) => return, }; // Now that we have the lock, we can actually create the session - // directory + // directory. if create_dir(sess, &session_dir, "session").is_err() { return } @@ -241,20 +240,18 @@ pub fn prepare_session_directory(sess: &Session, let source_directory = if let Some(dir) = source_directory { dir } else { - // There's nowhere to copy from, we're done + // There's nowhere to copy from, we're done. debug!("no source directory found. Continuing with empty session \ directory."); sess.init_incr_comp_session(session_dir, directory_lock, false); - return + return; }; debug!("attempting to copy data from source: {}", source_directory.display()); - - - // Try copying over all files from the source directory + // Try copying over all files from the source directory. if let Ok(allows_links) = copy_files(sess, &session_dir, &source_directory) { @@ -262,17 +259,16 @@ pub fn prepare_session_directory(sess: &Session, source_directory.display()); if !allows_links { - sess.warn(&format!("Hard linking files in the incremental \ - compilation cache failed. Copying files \ - instead. Consider moving the cache \ - directory to a file system which supports \ - hard linking in session dir `{}`", - session_dir.display()) - ); + sess.warn(&format!("Hard-linking files in the incremental \ + compilation cache failed. Copying files \ + instead. Consider moving the cache \ + directory to a file system which supports \ + hard linking in session dir `{}`", + session_dir.display())); } sess.init_incr_comp_session(session_dir, directory_lock, true); - return + return; } else { debug!("copying failed - trying next directory"); @@ -335,7 +331,7 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) { assert_no_characters_lost(&old_sub_dir_name); // Keep the 's-{timestamp}-{random-number}' prefix, but replace the - // '-working' part with the SVH of the crate + // '-working' part with the SVH of the crate. let dash_indices: Vec<_> = old_sub_dir_name.match_indices("-") .map(|(idx, _)| idx) .collect(); @@ -345,13 +341,13 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) { incr_comp_session_dir.display()) } - // State: "s-{timestamp}-{random-number}-" + // State: 's-{timestamp}-{random-number}-'. let mut new_sub_dir_name = String::from(&old_sub_dir_name[..= dash_indices[2]]); - // Append the svh + // Append the svh. base_n::push_str(svh.as_u64() as u128, INT_ENCODE_BASE, &mut new_sub_dir_name); - // Create the full path + // Create the full path. let new_path = incr_comp_session_dir.parent().unwrap().join(new_sub_dir_name); debug!("finalize_session_directory() - new path: {}", new_path.display()); @@ -359,7 +355,7 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) { Ok(_) => { debug!("finalize_session_directory() - directory renamed successfully"); - // This unlocks the directory + // This unlocks the directory. sess.finalize_incr_comp_session(new_path); } Err(e) => { @@ -370,7 +366,7 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) { e)); debug!("finalize_session_directory() - error, marking as invalid"); - // Drop the file lock, so we can garage collect + // Drop the file lock, so we can garage-collect. sess.mark_incr_comp_session_as_invalid(); } } @@ -400,8 +396,8 @@ fn copy_files(sess: &Session, false) { // not exclusive lock } else { - // Could not acquire the lock, don't try to copy from here - return Err(()) + // Could not acquire the lock; don't try to copy from here. + return Err(()); }; let source_dir_iterator = match source_dir.read_dir() { @@ -432,7 +428,7 @@ fn copy_files(sess: &Session, } } Err(_) => { - return Err(()) + return Err(()); } } } @@ -447,8 +443,8 @@ fn copy_files(sess: &Session, Ok(files_linked > 0 || files_copied == 0) } -/// Generates unique directory path of the form: -/// {crate_dir}/s-{timestamp}-{random-number}-working +/// Generates unique directory path of the form +/// `{crate_dir}/s-{timestamp}-{random-number}-working`. fn generate_session_dir_path(crate_dir: &Path) -> PathBuf { let timestamp = timestamp_to_string(SystemTime::now()); debug!("generate_session_dir_path: timestamp = {}", timestamp); @@ -490,9 +486,9 @@ fn lock_directory(sess: &Session, debug!("lock_directory() - lock_file: {}", lock_file_path.display()); match flock::Lock::new(&lock_file_path, - false, // don't wait - true, // create the lock file - true) { // the lock should be exclusive + false, // Don't wait. + true, // Create the lock file. + true) { // The lock should be exclusive. Ok(lock) => Ok((lock, lock_file_path)), Err(err) => { sess.err(&format!("incremental compilation: could not create \ @@ -542,7 +538,7 @@ fn find_source_directory_in_iter(iter: I, !is_session_directory(&directory_name) || !is_finalized(&directory_name) { debug!("find_source_directory_in_iter - ignoring"); - continue + continue; } let timestamp = extract_timestamp_from_session_dir(&directory_name) @@ -574,14 +570,14 @@ fn is_session_directory_lock_file(file_name: &str) -> bool { fn extract_timestamp_from_session_dir(directory_name: &str) -> Result { if !is_session_directory(directory_name) { - return Err(()) + return Err(()); } let dash_indices: Vec<_> = directory_name.match_indices("-") .map(|(idx, _)| idx) .collect(); if dash_indices.len() != 3 { - return Err(()) + return Err(()); } string_to_timestamp(&directory_name[dash_indices[0]+1 .. dash_indices[1]]) @@ -598,7 +594,7 @@ fn string_to_timestamp(s: &str) -> Result { let micros_since_unix_epoch = u64::from_str_radix(s, INT_ENCODE_BASE as u32); if micros_since_unix_epoch.is_err() { - return Err(()) + return Err(()); } let micros_since_unix_epoch = micros_since_unix_epoch.unwrap(); @@ -615,8 +611,7 @@ fn crate_path(sess: &Session, let incr_dir = sess.opts.incremental.as_ref().unwrap().clone(); - // The full crate disambiguator is really long. 64 bits of it should be - // sufficient. + // The full crate disambiguator is really long. 64 bits of it should be sufficient. let crate_disambiguator = crate_disambiguator.to_fingerprint().to_smaller_hash(); let crate_disambiguator = base_n::encode(crate_disambiguator as u128, INT_ENCODE_BASE); @@ -627,7 +622,7 @@ fn crate_path(sess: &Session, fn assert_no_characters_lost(s: &str) { if s.contains('\u{FFFD}') { - bug!("Could not losslessly convert '{}'.", s) + bug!("could not losslessly convert '{}'.", s); } } @@ -647,7 +642,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { crate_directory.display()); // First do a pass over the crate directory, collecting lock files and - // session directories + // session directories. let mut session_directories = FxHashSet::default(); let mut lock_files = FxHashSet::default(); @@ -655,8 +650,8 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { let dir_entry = match dir_entry { Ok(dir_entry) => dir_entry, _ => { - // Ignore any errors - continue + // Ignore any errors. + continue; } }; @@ -670,11 +665,11 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { assert_no_characters_lost(&entry_name); session_directories.insert(entry_name.into_owned()); } else { - // This is something we don't know, leave it alone + // This is something we don't know; leave it alone. } } - // Now map from lock files to session directories + // Now map from lock files to session directories. let lock_file_to_session_dir: FxHashMap> = lock_files.into_iter() .map(|lock_file_name| { @@ -690,7 +685,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { .collect(); // Delete all lock files, that don't have an associated directory. They must - // be some kind of leftover + // be some kind of leftover. for (lock_file_name, directory_name) in &lock_file_to_session_dir { if directory_name.is_none() { let timestamp = match extract_timestamp_from_session_dir(lock_file_name) { @@ -698,8 +693,8 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { Err(()) => { debug!("found lock-file with malformed timestamp: {}", crate_directory.join(&lock_file_name).display()); - // Ignore it - continue + // Ignore it. + continue; } }; @@ -717,7 +712,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { } } - // Filter out `None` directories + // Filter out `None` directories. let lock_file_to_session_dir: FxHashMap = lock_file_to_session_dir.into_iter() .filter_map(|(lock_file_name, directory_name)| { @@ -751,24 +746,24 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { Err(()) => { debug!("found session-dir with malformed timestamp: {}", crate_directory.join(directory_name).display()); - // Ignore it - continue + // Ignore it. + continue; } }; if is_finalized(directory_name) { let lock_file_path = crate_directory.join(lock_file_name); match flock::Lock::new(&lock_file_path, - false, // don't wait - false, // don't create the lock-file - true) { // get an exclusive lock + false, // Don't wait. + false, // Don't create the lock-file. + true) { // Get an exclusive lock. Ok(lock) => { debug!("garbage_collect_session_directories() - \ successfully acquired lock"); debug!("garbage_collect_session_directories() - adding \ deletion candidate: {}", directory_name); - // Note that we are holding on to the lock + // Note that we are holding on to the lock. deletion_candidates.push((timestamp, crate_directory.join(directory_name), Some(lock))); @@ -792,14 +787,14 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { // leave this directory alone. let lock_file_path = crate_directory.join(lock_file_name); match flock::Lock::new(&lock_file_path, - false, // don't wait - false, // don't create the lock-file - true) { // get an exclusive lock + false, // Don't wait. + false, // Don't create the lock-file. + true) { // Get an exclusive lock. Ok(lock) => { debug!("garbage_collect_session_directories() - \ successfully acquired lock"); - // Note that we are holding on to the lock + // Note that we are holding on to the lock. definitely_delete.push((crate_directory.join(directory_name), Some(lock))); } @@ -814,7 +809,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { } } - // Delete all but the most recent of the candidates + // Delete all but the most recent of the candidates. for (path, lock) in all_except_most_recent(deletion_candidates) { debug!("garbage_collect_session_directories() - deleting `{}`", path.display()); @@ -830,7 +825,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { // Let's make it explicit that the file lock is released at this point, - // or rather, that we held on to it until here + // or rather, that we held on to it until here. mem::drop(lock); } @@ -848,7 +843,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { } // Let's make it explicit that the file lock is released at this point, - // or rather, that we held on to it until here + // or rather, that we held on to it until here. mem::drop(lock); } @@ -875,7 +870,7 @@ fn all_except_most_recent(deletion_candidates: Vec<(SystemTime, PathBuf, Option< /// need to support deleting files with very long paths. The regular /// WinApi functions only support paths up to 260 characters, however. In order /// to circumvent this limitation, we canonicalize the path of the directory -/// before passing it to std::fs::remove_dir_all(). This will convert the path +/// before passing it to `std::fs::remove_dir_all()`. This will convert the path /// into the '\\?\' format, which supports much longer paths. fn safe_remove_dir_all(p: &Path) -> io::Result<()> { if p.exists() { diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs index 13e2c5d1c574..7b38f36018e2 100644 --- a/src/librustc_incremental/persist/save.rs +++ b/src/librustc_incremental/persist/save.rs @@ -1,3 +1,9 @@ +use super::data::*; +use super::fs::*; +use super::dirty_clean; +use super::file_format; +use super::work_product; + use rustc::dep_graph::{DepGraph, DepKind, WorkProduct, WorkProductId}; use rustc::session::Session; use rustc::ty::TyCtxt; @@ -6,15 +12,10 @@ use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::join; use rustc_serialize::Encodable as RustcEncodable; use rustc_serialize::opaque::Encoder; + use std::fs; use std::path::PathBuf; -use super::data::*; -use super::fs::*; -use super::dirty_clean; -use super::file_format; -use super::work_product; - pub fn save_dep_graph(tcx: TyCtxt<'_>) { debug!("save_dep_graph()"); tcx.dep_graph.with_ignore(|| { @@ -75,7 +76,7 @@ pub fn save_work_product_index(sess: &Session, } } - // Check that we did not delete one of the current work-products: + // Check that we did not delete one of the current work-products. debug_assert!({ new_work_products.iter() .flat_map(|(_, wp)| wp.saved_files @@ -91,10 +92,10 @@ fn save_in(sess: &Session, path_buf: PathBuf, encode: F) { debug!("save: storing data in {}", path_buf.display()); - // delete the old dep-graph, if any + // Delete the old dep-graph, if any. // Note: It's important that we actually delete the old file and not just // truncate and overwrite it, since it might be a shared hard-link, the - // underlying data of which we don't want to modify + // underlying data of which we don't want to modify. if path_buf.exists() { match fs::remove_file(&path_buf) { Ok(()) => { @@ -109,12 +110,12 @@ fn save_in(sess: &Session, path_buf: PathBuf, encode: F) } } - // generate the data in a memory buffer + // Generate the data in a memory buffer. let mut encoder = Encoder::new(Vec::new()); file_format::write_file_header(&mut encoder); encode(&mut encoder); - // write the data out + // Write the data out. let data = encoder.into_inner(); match fs::write(&path_buf, data) { Ok(_) => { diff --git a/src/librustc_interface/interface.rs b/src/librustc_interface/interface.rs index fef60a47dc4e..c54d0db156fc 100644 --- a/src/librustc_interface/interface.rs +++ b/src/librustc_interface/interface.rs @@ -21,13 +21,13 @@ use syntax_pos::edition; pub type Result = result::Result; -/// Represents a compiler session. +/// Represents a compiler instance. /// Can be used run `rustc_interface` queries. /// Created by passing `Config` to `run_compiler`. pub struct Compiler { pub(crate) sess: Lrc, - codegen_backend: Lrc>, - source_map: Lrc, + pub codegen_backend: Lrc>, + pub source_map: Lrc, pub(crate) input: Input, pub(crate) input_path: Option, pub(crate) output_dir: Option, @@ -61,12 +61,12 @@ impl Compiler { } } -/// The compiler configuration +/// The compiler configuration. pub struct Config { - /// Command line options + /// The command-line options. pub opts: config::Options, - /// cfg! configuration in addition to the default ones + /// `cfg!` configuration in addition to the default ones. pub crate_cfg: FxHashSet<(String, Option)>, pub input: Input, @@ -76,17 +76,14 @@ pub struct Config { pub file_loader: Option>, pub diagnostic_output: DiagnosticOutput, - /// Set to capture stderr output during compiler execution + /// `true` to capture stderr output during compiler execution. pub stderr: Option>>>, pub crate_name: Option, pub lint_caps: FxHashMap, } -pub fn run_compiler_in_existing_thread_pool(config: Config, f: F) -> R -where - F: FnOnce(&Compiler) -> R, -{ +fn create_compiler(config: Config) -> Compiler { let (sess, codegen_backend, source_map) = util::create_session( config.opts, config.crate_cfg, @@ -98,7 +95,7 @@ where let cstore = Lrc::new(CStore::new(codegen_backend.metadata_loader())); - let compiler = Compiler { + Compiler { sess, codegen_backend, source_map, @@ -109,7 +106,14 @@ where output_file: config.output_file, queries: Default::default(), crate_name: config.crate_name, - }; + } +} + +pub fn run_compiler_in_existing_thread_pool(config: Config, f: F) -> R +where + F: FnOnce(&Compiler) -> R, +{ + let compiler = create_compiler(config); let _sess_abort_error = OnDrop(|| { compiler.sess.diagnostic().print_error_count(&util::diagnostics_registry()); diff --git a/src/librustc_interface/passes.rs b/src/librustc_interface/passes.rs index 3cfae1686dfd..b7cd609e9d35 100644 --- a/src/librustc_interface/passes.rs +++ b/src/librustc_interface/passes.rs @@ -115,7 +115,7 @@ declare_box_region_type!( /// harness if one is to be provided, injection of a dependency on the /// standard library and prelude, and name resolution. /// -/// Returns `None` if we're aborting after handling -W help. +/// Returns `None` if we're aborting after handling `-W help`. pub fn configure_and_expand( sess: Lrc, cstore: Lrc, @@ -241,7 +241,7 @@ pub fn register_plugins<'a>( sess.edition(), &sess.opts.debugging_opts.allow_features, ); - // these need to be set "early" so that expansion sees `quote` if enabled. + // These need to be set "early" so that expansion sees `quote` if enabled. sess.init_features(features); let crate_types = util::collect_crate_types(sess, &krate.attrs); @@ -252,7 +252,7 @@ pub fn register_plugins<'a>( rustc_incremental::prepare_session_directory(sess, &crate_name, disambiguator); if sess.opts.incremental.is_some() { - time(sess, "garbage collect incremental cache directory", || { + time(sess, "garbage-collect incremental cache directory", || { if let Err(e) = rustc_incremental::garbage_collect_session_directories(sess) { warn!( "Error while trying to garbage collect incremental \ @@ -330,7 +330,7 @@ fn configure_and_expand_inner<'a>( plugin_info: PluginInfo, ) -> Result<(ast::Crate, Resolver<'a>)> { let attributes = plugin_info.attributes; - time(sess, "pre ast expansion lint checks", || { + time(sess, "pre-AST-expansion lint checks", || { lint::check_ast_crate( sess, &krate, @@ -366,13 +366,13 @@ fn configure_and_expand_inner<'a>( &mut krate, &mut resolver, plugin_info.syntax_exts, sess.edition() ); - // Expand all macros + // Expand all macros. sess.profiler(|p| p.start_activity("macro expansion")); krate = time(sess, "expansion", || { - // Windows dlls do not have rpaths, so they don't know how to find their + // Windows DLLs do not have rpaths, so they don't know how to find their // dependencies. It's up to us to tell the system where to find all the - // dependent dlls. Note that this uses cfg!(windows) as opposed to - // targ_cfg because syntax extensions are always loaded for the host + // dependent DLLs. Note that this uses `cfg!(windows)` as opposed to + // `targ_cfg` because syntax extensions are always loaded for the host // compiler, not for the target. // // This is somewhat of an inherently racy operation, however, as @@ -401,7 +401,7 @@ fn configure_and_expand_inner<'a>( ); } - // Create the config for macro expansion + // Create the config for macro expansion. let features = sess.features_untracked(); let cfg = syntax::ext::expand::ExpansionConfig { features: Some(&features), @@ -418,7 +418,7 @@ fn configure_and_expand_inner<'a>( ecx.monotonic_expander().expand_crate(krate) }); - // The rest is error reporting + // The rest is error reporting. time(sess, "check unused macros", || { ecx.check_unused_macros(); @@ -456,7 +456,7 @@ fn configure_and_expand_inner<'a>( }); // If we're actually rustdoc then there's no need to actually compile - // anything, so switch everything to just looping + // anything, so switch everything to just looping. if sess.opts.actually_rustdoc { util::ReplaceBodyWithLoop::new(sess).visit_crate(&mut krate); } @@ -546,8 +546,8 @@ pub fn lower_to_hir( dep_graph: &DepGraph, krate: &ast::Crate, ) -> Result { - // Lower ast -> hir - let hir_forest = time(sess, "lowering ast -> hir", || { + // Lower AST to HIR. + let hir_forest = time(sess, "lowering AST -> HIR", || { let hir_crate = lower_crate(sess, cstore, &dep_graph, &krate, resolver); if sess.opts.debugging_opts.hir_stats { @@ -592,7 +592,7 @@ fn generated_output_paths( out_filenames.push(p); }, OutputType::DepInfo if sess.opts.debugging_opts.dep_info_omit_d_target => { - // Don't add the dep-info output when omitting it from dep-info targets + // Don't add the dep-info output when omitting it from dep-info targets. } _ => { out_filenames.push(file); @@ -650,7 +650,7 @@ fn escape_dep_filename(filename: &FileName) -> String { fn write_out_deps(compiler: &Compiler, outputs: &OutputFilenames, out_filenames: &[PathBuf]) { let sess = &compiler.sess; - // Write out dependency rules to the dep-info file if requested + // Write out dependency rules to the dep-info file if requested. if !sess.opts.output_types.contains_key(&OutputType::DepInfo) { return; } @@ -658,7 +658,7 @@ fn write_out_deps(compiler: &Compiler, outputs: &OutputFilenames, out_filenames: let result = (|| -> io::Result<()> { // Build a list of files used to compile the output and - // write Makefile-compatible dependency rules + // write Makefile-compatible dependency rules. let mut files: Vec = sess.source_map() .files() .iter() @@ -690,7 +690,7 @@ fn write_out_deps(compiler: &Compiler, outputs: &OutputFilenames, out_filenames: // Emit a fake target for each input file to the compilation. This // prevents `make` from spitting out an error if a file is later - // deleted. For more info see #28735 + // deleted. For more info see #28735. for path in files { writeln!(file, "{}:", path)?; } @@ -720,7 +720,7 @@ pub fn prepare_outputs( krate: &ast::Crate, crate_name: &str ) -> Result { - // FIXME: rustdoc passes &[] instead of &krate.attrs here + // FIXME: rustdoc passes `&[]` instead of `&krate.attrs` here. let outputs = util::build_output_filenames( &compiler.input, &compiler.output_dir, @@ -741,8 +741,7 @@ pub fn prepare_outputs( if sess.opts.will_create_output_file() { if output_contains_path(&output_paths, input_path) { sess.err(&format!( - "the input file \"{}\" would be overwritten by the generated \ - executable", + "the input file \"{}\" would be overwritten by the generated executable", input_path.display() )); return Err(ErrorReported); @@ -750,7 +749,7 @@ pub fn prepare_outputs( if let Some(dir_path) = output_conflicts_with_dir(&output_paths) { sess.err(&format!( "the generated executable for the input file \"{}\" conflicts with the \ - existing directory \"{}\"", + existing directory \"{}\"", input_path.display(), dir_path.display() )); @@ -767,7 +766,7 @@ pub fn prepare_outputs( if !only_dep_info { if let Some(ref dir) = compiler.output_dir { if fs::create_dir_all(dir).is_err() { - sess.err("failed to find or create the directory specified by --out-dir"); + sess.err("failed to find or create the directory specified by `--out-dir`"); return Err(ErrorReported); } } @@ -842,8 +841,8 @@ pub fn create_global_ctxt( let global_ctxt: Option>; let arenas = AllArenas::new(); - // Construct the HIR map - let hir_map = time(sess, "indexing hir", || { + // Construct the HIR map. + let hir_map = time(sess, "indexing HIR", || { hir::map::map_crate(sess, cstore, &mut hir_forest, &defs) }); @@ -870,14 +869,14 @@ pub fn create_global_ctxt( query_result_on_disk_cache, &crate_name, tx, - &outputs + &outputs, ); global_ctxt = Some(gcx); let gcx = global_ctxt.as_ref().unwrap(); ty::tls::enter_global(gcx, |tcx| { - // Do some initialization of the DepGraph that can only be done with the + // Do some initialization of the `DepGraph` that can only be done with the // tcx available. time(tcx.sess, "dep graph tcx init", || rustc_incremental::dep_graph_tcx_init(tcx)); }); @@ -924,7 +923,7 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> { }); }); - // passes are timed inside typeck + // Passes are timed inside typeck. typeck::check_crate(tcx)?; time(sess, "misc checking 2", || { @@ -938,13 +937,13 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> { }, { time(sess, "liveness checking + intrinsic checking", || { par_iter(&tcx.hir().krate().modules).for_each(|(&module, _)| { - // this must run before MIR dump, because + // This must run before MIR dump, because // "not all control paths return a value" is reported here. // - // maybe move the check to a MIR pass? - tcx.ensure().check_mod_liveness(tcx.hir().local_def_id_from_node_id(module)); - - tcx.ensure().check_mod_intrinsics(tcx.hir().local_def_id_from_node_id(module)); + // FIXME: maybe move the check to a MIR pass? + let def_id = tcx.hir().local_def_id_from_node_id(module); + tcx.ensure().check_mod_liveness(def_id); + tcx.ensure().check_mod_intrinsics(def_id); }); }); }); @@ -960,7 +959,7 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> { tcx.par_body_owners(|def_id| tcx.ensure().mir_borrowck(def_id)); }); - time(sess, "dumping chalk-like clauses", || { + time(sess, "dumping Chalk-like clauses", || { rustc_traits::lowering::dump_program_clauses(tcx); }); @@ -1073,8 +1072,7 @@ fn encode_and_write_metadata( (metadata, need_metadata_module) } -/// Runs the codegen backend, after which the AST and analysis can -/// be discarded. +/// Runs the codegen backend, after which the AST and analysis can be discarded. pub fn start_codegen<'tcx>( codegen_backend: &dyn CodegenBackend, tcx: TyCtxt<'tcx>, diff --git a/src/librustc_interface/queries.rs b/src/librustc_interface/queries.rs index ed50dadb6009..9d2d00ef2cea 100644 --- a/src/librustc_interface/queries.rs +++ b/src/librustc_interface/queries.rs @@ -15,7 +15,7 @@ use std::any::Any; use std::mem; use syntax::{self, ast}; -/// Represent the result of a query. +/// Represents the result of a query. /// This result can be stolen with the `take` method and returned with the `give` method. pub struct Query { result: RefCell>>, @@ -47,14 +47,14 @@ impl Query { *result = Some(Ok(value)); } - /// Borrows the query result using the RefCell. Panics if the result is stolen. + /// Borrows the query result using the `RefCell`. Panics if the result is stolen. pub fn peek(&self) -> Ref<'_, T> { Ref::map(self.result.borrow(), |r| { r.as_ref().unwrap().as_ref().expect("missing query result") }) } - /// Mutably borrows the query result using the RefCell. Panics if the result is stolen. + /// Mutably borrows the query result using the `RefCell`. Panics if the result is stolen. pub fn peek_mut(&self) -> RefMut<'_, T> { RefMut::map(self.result.borrow_mut(), |r| { r.as_mut().unwrap().as_mut().expect("missing query result") @@ -71,7 +71,7 @@ impl Default for Query { } #[derive(Default)] -pub(crate) struct Queries { +pub struct Queries { dep_graph_future: Query>, parse: Query, crate_name: Query, @@ -226,7 +226,8 @@ impl Compiler { expansion.resolutions.steal(), outputs, tx, - &crate_name)) + &crate_name, + )) }) } @@ -237,7 +238,7 @@ impl Compiler { self.global_ctxt()?.peek_mut().enter(|tcx| { tcx.analysis(LOCAL_CRATE).ok(); - // Don't do code generation if there were any errors + // Don't do code generation if there were any errors. self.session().compile_status()?; Ok(passes::start_codegen( @@ -278,12 +279,12 @@ impl Compiler { self.global_ctxt()?; - // Drop AST after creating GlobalCtxt to free memory + // Drop AST after creating `GlobalCtxt` to free memory. mem::drop(self.expansion()?.take()); self.ongoing_codegen()?; - // Drop GlobalCtxt after starting codegen to free memory + // Drop `GlobalCtxt` after starting codegen to free memory. mem::drop(self.global_ctxt()?.take()); self.link().map(|_| ()) diff --git a/src/librustc_interface/util.rs b/src/librustc_interface/util.rs index 9eaf7b77716f..d9fcf6618b8d 100644 --- a/src/librustc_interface/util.rs +++ b/src/librustc_interface/util.rs @@ -565,7 +565,7 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec { // "-" as input file will cause the parser to read from stdin so we - // have to make up a name + // have to make up a name. // We want to toss everything after the final '.' let dirpath = (*odir).as_ref().cloned().unwrap_or_default(); - // If a crate name is present, we use it as the link name + // If a crate name is present, we use it as the link name. let stem = sess.opts .crate_name .clone() @@ -641,12 +641,12 @@ pub fn build_output_filenames( None } else { if !sess.opts.cg.extra_filename.is_empty() { - sess.warn("ignoring -C extra-filename flag due to -o flag"); + sess.warn("ignoring `-C extra-filename` flag due to `-o` flag"); } Some(out_file.clone()) }; if *odir != None { - sess.warn("ignoring --out-dir flag due to -o flag"); + sess.warn("ignoring `--out-dir` flag due to `-o` flag"); } OutputFilenames { @@ -665,7 +665,7 @@ pub fn build_output_filenames( } } -// Note: Also used by librustdoc, see PR #43348. Consider moving this struct elsewhere. +// Note: Also used by librustdoc; see PR #43348. Consider moving this struct elsewhere. // // FIXME: Currently the `everybody_loops` transformation is not applied to: // * `const fn`, due to issue #43636 that `loop` is not supported for const evaluation. We are diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index aecf5c5b52db..75b8b38f6dc8 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -763,7 +763,7 @@ impl EarlyLintPass for UnusedDocComment { ast::StmtKind::Local(..) => ("statements", false), ast::StmtKind::Item(..) => ("inner items", false), ast::StmtKind::Mac(..) => ("macro expansions", true), - // expressions will be reported by `check_expr`. + // Expressions will be reported by `check_expr`. ast::StmtKind::Semi(..) | ast::StmtKind::Expr(..) => return, }; diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 75d726170472..fb833a0621fe 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -1,4 +1,4 @@ -// Decoding metadata from a single crate's metadata +//! This module handles decoding metadata for a single crate. use crate::cstore::{self, CrateMetadata, MetadataBlob, NativeLibrary, ForeignModule}; use crate::schema::*; @@ -178,7 +178,7 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> { impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> { #[inline] fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx.expect("missing TyCtxt in DecodeContext") + self.tcx.expect("missing `TyCtxt` in `DecodeContext`") } #[inline] @@ -284,7 +284,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx if let Some(alloc_decoding_session) = self.alloc_decoding_session { alloc_decoding_session.decode_alloc_id(self) } else { - bug!("Attempting to decode interpret::AllocId without CrateMetadata") + bug!("attempting to decode `interpret::AllocId` without `CrateMetadata`"); } } } @@ -306,13 +306,13 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let sess = if let Some(sess) = self.sess { sess } else { - bug!("Cannot decode Span without Session.") + bug!("cannot decode `Span` without `Session`"); }; let imported_source_files = self.cdata().imported_source_files(&sess.source_map()); let source_file = { // Optimize for the case that most spans within a translated item - // originate from the same source_file. + // originate from the same `SourceFile`. let last_source_file = &imported_source_files[self.last_source_file_index]; if lo >= last_source_file.original_start_pos && @@ -340,7 +340,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { debug_assert!(lo >= source_file.original_start_pos && lo <= source_file.original_end_pos); - // Make sure we correctly filtered out invalid spans during encoding + // Make sure we correctly filtered out invalid spans during encoding. debug_assert!(hi >= source_file.original_start_pos && hi <= source_file.original_end_pos); @@ -362,6 +362,7 @@ impl SpecializedDecoder for DecodeContext<'_, '_> { } impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { + #[inline] fn specialized_decode(&mut self) -> Result { Fingerprint::decode_opaque(&mut self.opaque) } @@ -575,7 +576,7 @@ impl<'a, 'tcx> CrateMetadata { false, self.def_path_table.def_path_hash(item_id)) }, - _ => bug!("def-index does not refer to trait or trait alias"), + _ => bug!("`DefIndex` does not refer to trait or trait alias"), } } @@ -630,7 +631,7 @@ impl<'a, 'tcx> CrateMetadata { EntryKind::Enum(repr) => (ty::AdtKind::Enum, repr), EntryKind::Struct(_, repr) => (ty::AdtKind::Struct, repr), EntryKind::Union(_, repr) => (ty::AdtKind::Union, repr), - _ => bug!("get_adt_def called on a non-ADT {:?}", did), + _ => bug!("`get_adt_def` called on a non-ADT {:?}", did), }; let variants = if let ty::AdtKind::Enum = kind { @@ -671,7 +672,7 @@ impl<'a, 'tcx> CrateMetadata { let super_predicates = match self.entry(item_id).kind { EntryKind::Trait(data) => data.decode(self).super_predicates, EntryKind::TraitAlias(data) => data.decode(self).super_predicates, - _ => bug!("def-index does not refer to trait or trait alias"), + _ => bug!("`DefIndex` does not refer to trait or trait alias"), }; super_predicates.decode((self, tcx)) @@ -975,7 +976,7 @@ impl<'a, 'tcx> CrateMetadata { EntryKind::AssocOpaqueTy(container) => { (ty::AssocKind::OpaqueTy, container, false) } - _ => bug!("cannot get associated-item of `{:?}`", def_key) + _ => bug!("cannot get associated item of `{:?}`", def_key) }; ty::AssocItem { @@ -1261,25 +1262,25 @@ impl<'a, 'tcx> CrateMetadata { self.def_path_table.def_path_hash(index) } - /// Imports the source_map from an external crate into the source_map of the crate + /// Imports the `SourceMap` from an external crate into the `SourceMap` of the crate /// currently being compiled (the "local crate"). /// /// The import algorithm works analogous to how AST items are inlined from an /// external crate's metadata: - /// For every SourceFile in the external source_map an 'inline' copy is created in the - /// local source_map. The correspondence relation between external and local + /// For every `SourceFile` in the external `SourceMap` an 'inline' copy is created in the + /// local `SourceMap`. The correspondence relation between external and local /// SourceFiles is recorded in the `ImportedSourceFile` objects returned from this /// function. When an item from an external crate is later inlined into this /// crate, this correspondence information is used to translate the span /// information of the inlined item so that it refers the correct positions in - /// the local source_map (see `>`). + /// the local `SourceMap` (see `>`). /// /// The import algorithm in the function below will reuse SourceFiles already - /// existing in the local source_map. For example, even if the SourceFile of some + /// existing in the local `SourceMap`. For example, even if the SourceFile of some /// source file of libstd gets imported many times, there will only ever be - /// one SourceFile object for the corresponding file in the local source_map. + /// one SourceFile object for the corresponding file in the local `SourceMap`. /// - /// Note that imported SourceFiles do not actually contain the source code of the + /// Note that imported `SourceFile`s do not actually contain the source code of the /// file they represent, just information about length, line breaks, and /// multibyte characters. This information is enough to generate valid debuginfo /// for items inlined from other crates. @@ -1296,7 +1297,7 @@ impl<'a, 'tcx> CrateMetadata { } } - // Lock the source_map_import_info to ensure this only happens once + // Lock the `source_map_import_info` to ensure this only happens once. let mut source_map_import_info = self.source_map_import_info.borrow_mut(); if !source_map_import_info.is_empty() { @@ -1307,18 +1308,20 @@ impl<'a, 'tcx> CrateMetadata { let external_source_map = self.root.source_map.decode(self); let imported_source_files = external_source_map.map(|source_file_to_import| { - // We can't reuse an existing SourceFile, so allocate a new one + // We can't reuse an existing `SourceFile`, so allocate a new one // containing the information we need. - let syntax_pos::SourceFile { name, - name_was_remapped, - src_hash, - start_pos, - end_pos, - mut lines, - mut multibyte_chars, - mut non_narrow_chars, - name_hash, - .. } = source_file_to_import; + let syntax_pos::SourceFile { + name, + name_was_remapped, + src_hash, + start_pos, + end_pos, + mut lines, + mut multibyte_chars, + mut non_narrow_chars, + name_hash, + .. + } = source_file_to_import; let source_length = (end_pos - start_pos).to_usize(); @@ -1326,7 +1329,7 @@ impl<'a, 'tcx> CrateMetadata { // position into frame of reference local to file. // `SourceMap::new_imported_source_file()` will then translate those // coordinates to their new global frame of reference when the - // offset of the SourceFile is known. + // offset of the `SourceFile` is known. for pos in &mut lines { *pos = *pos - start_pos; } @@ -1337,15 +1340,17 @@ impl<'a, 'tcx> CrateMetadata { *swc = *swc - start_pos; } - let local_version = local_source_map.new_imported_source_file(name, - name_was_remapped, - self.cnum.as_u32(), - src_hash, - name_hash, - source_length, - lines, - multibyte_chars, - non_narrow_chars); + let local_version = local_source_map.new_imported_source_file( + name, + name_was_remapped, + self.cnum.as_u32(), + src_hash, + name_hash, + source_length, + lines, + multibyte_chars, + non_narrow_chars, + ); debug!("CrateMetaData::imported_source_files alloc \ source_file {:?} original (start_pos {:?} end_pos {:?}) \ translated (start_pos {:?} end_pos {:?})", @@ -1362,7 +1367,7 @@ impl<'a, 'tcx> CrateMetadata { *source_map_import_info = imported_source_files; drop(source_map_import_info); - // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. + // This shouldn't borrow twice, but there is no way to downgrade `RefMut` to `Ref`. self.source_map_import_info.borrow() } } diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index f430f01542ef..4029c736a556 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -1,3 +1,5 @@ +//! This module handles encoding metadata for a single crate. + use crate::index::Index; use crate::schema::*; @@ -55,7 +57,7 @@ pub struct EncodeContext<'tcx> { interpret_allocs: FxHashMap, interpret_allocs_inverse: Vec, - // This is used to speed up Span encoding. + // This is used to speed up `Span` encoding. source_file_cache: Lrc, } @@ -148,7 +150,7 @@ impl<'tcx> SpecializedEncoder for EncodeContext<'tcx> { let span = span.data(); - // The Span infrastructure should make sure that this invariant holds: + // The `Span` infrastructure should make sure that this invariant holds. debug_assert!(span.lo <= span.hi); if !self.source_file_cache.contains(span.lo) { @@ -213,6 +215,7 @@ impl<'tcx> SpecializedEncoder for EncodeContext<'tcx> { } impl<'tcx> SpecializedEncoder> for EncodeContext<'tcx> { + #[inline] fn specialized_encode(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> Result<(), Self::Error> { @@ -221,12 +224,14 @@ impl<'tcx> SpecializedEncoder> for EncodeContext<'tc } impl<'tcx> SpecializedEncoder for EncodeContext<'tcx> { + #[inline] fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> { f.encode_opaque(&mut self.opaque) } } impl<'tcx, T: Encodable> SpecializedEncoder> for EncodeContext<'tcx> { + #[inline] fn specialized_encode(&mut self, _: &mir::ClearCrossCrate) -> Result<(), Self::Error> { @@ -312,6 +317,7 @@ impl<'tcx> EncodeContext<'tcx> { /// arguments. This `record` function will call `op` to generate /// the `Entry` (which may point to other encoded information) /// and will then record the `Lazy` for use in the index. + // // FIXME(eddyb) remove this. pub fn record(&mut self, id: DefId, @@ -361,7 +367,7 @@ impl<'tcx> EncodeContext<'tcx> { // cloning the whole map in the process). _ if source_file.name_was_remapped => source_file.clone(), - // Otherwise expand all paths to absolute paths because + // Otherwise, expand all paths to absolute paths because // any relative paths are potentially relative to a // wrong directory. FileName::Real(ref name) => { @@ -409,19 +415,19 @@ impl<'tcx> EncodeContext<'tcx> { let diagnostic_items = self.encode_diagnostic_items(); let diagnostic_item_bytes = self.position() - i; - // Encode the native libraries used + // Encode the native libraries used. i = self.position(); let native_libraries = self.encode_native_libraries(); let native_lib_bytes = self.position() - i; let foreign_modules = self.encode_foreign_modules(); - // Encode source_map + // Encode `source_map`. i = self.position(); let source_map = self.encode_source_map(); let source_map_bytes = self.position() - i; - // Encode DefPathTable + // Encode `def_path_table`. i = self.position(); let def_path_table = self.encode_def_path_table(); let def_path_table_bytes = self.position() - i; @@ -444,19 +450,19 @@ impl<'tcx> EncodeContext<'tcx> { self.encode_info_for_items(); let item_bytes = self.position() - i; - // Encode the allocation index + // Encode the allocation index. let interpret_alloc_index = { let mut interpret_alloc_index = Vec::new(); let mut n = 0; - trace!("beginning to encode alloc ids"); + trace!("beginning to encode alloc IDs"); loop { let new_n = self.interpret_allocs_inverse.len(); - // if we have found new ids, serialize those, too + // If we have found new Ids, serialize those too. if n == new_n { - // otherwise, abort + // Otherwise, abort. break; } - trace!("encoding {} further alloc ids", new_n - n); + trace!("encoding {} further alloc IDs", new_n - n); for idx in n..new_n { let id = self.interpret_allocs_inverse[idx]; let pos = self.position() as u32; diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs index 1a5f0e17ba7c..6bd91c921c2b 100644 --- a/src/librustc_metadata/schema.rs +++ b/src/librustc_metadata/schema.rs @@ -42,8 +42,8 @@ pub const METADATA_VERSION: u8 = 4; pub const METADATA_HEADER: &[u8; 12] = &[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION]; -/// Additional metadata for a `Lazy` where `T` may not be `Sized`, -/// e.g. for `Lazy<[T]>`, this is the length (count of `T` values). +/// Additional metadata for a `Lazy` where `T` may not be `Sized`. +/// E.g., for `Lazy<[T]>`, this is the length (count of `T` values). pub trait LazyMeta { type Meta: Copy + 'static; @@ -69,7 +69,7 @@ impl LazyMeta for [T] { } } -/// A value of type T referred to by its absolute position +/// A value of type `T` referred to by its absolute position /// in the metadata, and which can be decoded lazily. /// /// Metadata is effective a tree, encoded in post-order, @@ -99,8 +99,9 @@ impl LazyMeta for [T] { // FIXME(#59875) the `Meta` parameter only exists to dodge // invariance wrt `T` (coming from the `meta: T::Meta` field). pub struct Lazy::Meta> - where T: ?Sized + LazyMeta, - Meta: 'static + Copy, +where + T: ?Sized + LazyMeta, + Meta: 'static + Copy, { pub position: usize, pub meta: Meta, @@ -270,7 +271,7 @@ pub enum EntryKind<'tcx> { TraitAlias(Lazy>), } -/// Additional data for EntryKind::Const and EntryKind::AssocConst +/// Additional data for `EntryKind::Const` and `EntryKind::AssocConst`. #[derive(Clone, Copy, RustcEncodable, RustcDecodable)] pub struct ConstQualif { pub mir: u8, @@ -304,10 +305,9 @@ pub struct FnData<'tcx> { pub struct VariantData<'tcx> { pub ctor_kind: CtorKind, pub discr: ty::VariantDiscr, - /// If this is unit or tuple-variant/struct, then this is the index of the ctor id. + /// If this is unit or tuple-variant/struct, then this is the index of the ctor ID. pub ctor: Option, - /// If this is a tuple struct or variant - /// ctor, this is its "function" signature. + /// If this is a tuple struct or variant ctor, this is its "function" signature. pub ctor_sig: Option>>, } diff --git a/src/librustc_passes/rvalue_promotion.rs b/src/librustc_passes/rvalue_promotion.rs index f2461f701613..9087203546a7 100644 --- a/src/librustc_passes/rvalue_promotion.rs +++ b/src/librustc_passes/rvalue_promotion.rs @@ -225,7 +225,7 @@ impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { } NotPromotable } - // Item statements are allowed + // Item statements are allowed. hir::StmtKind::Item(..) => Promotable, hir::StmtKind::Expr(ref box_expr) | hir::StmtKind::Semi(ref box_expr) => { diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 1e61f78c357d..511ca3ee285f 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -181,7 +181,10 @@ where ty::ExistentialPredicate::Trait(trait_ref) => trait_ref, ty::ExistentialPredicate::Projection(proj) => proj.trait_ref(tcx), ty::ExistentialPredicate::AutoTrait(def_id) => - ty::ExistentialTraitRef { def_id, substs: InternalSubsts::empty() }, + ty::ExistentialTraitRef { + def_id, + substs: InternalSubsts::empty(), + }, }; let ty::ExistentialTraitRef { def_id, substs: _ } = trait_ref; if self.def_id_visitor.visit_def_id(def_id, "trait", &trait_ref) { diff --git a/src/librustc_resolve/late.rs b/src/librustc_resolve/late.rs index aae283b74523..d5f5e2955ad5 100644 --- a/src/librustc_resolve/late.rs +++ b/src/librustc_resolve/late.rs @@ -434,7 +434,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { visit::walk_fn_ret_ty(this, &declaration.output); - // Resolve the function body, potentially inside the body of an async closure + // Resolve the function body, potentially inside the body of an async closure. match fn_kind { FnKind::ItemFn(.., body) | FnKind::Method(.., body) => this.visit_block(body), @@ -1138,10 +1138,10 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { self.resolve_pattern_top(&local.pat, PatternSource::Let); } - /// build a map from pattern identifiers to binding-info's. + /// Builds a map from pattern identifiers to `BindingInfo`s. /// this is done hygienically. This could arise for a macro - /// that expands into an or-pattern where one 'x' was from the - /// user and one 'x' came from the macro. + /// that expands into an or-pattern where one `x` was from the + /// user and one `x` came from the macro. fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap { let mut binding_map = FxHashMap::default(); @@ -1244,7 +1244,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { maps } - /// Check the consistency of the outermost or-patterns. + /// Checks the consistency of the outermost or-patterns. fn check_consistent_bindings_top(&mut self, pat: &Pat) { pat.walk(&mut |pat| match pat.node { PatKind::Or(ref ps) => { @@ -1263,7 +1263,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { }); } - /// Arising from `source`, resolve a top level pattern. + /// Arising from `source`, resolves a top level pattern. fn resolve_pattern_top(&mut self, pat: &Pat, pat_src: PatternSource) { let mut bindings = smallvec![(PatBoundCtx::Product, Default::default())]; self.resolve_pattern(pat, pat_src, &mut bindings); @@ -1281,7 +1281,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { visit::walk_pat(self, pat); } - /// Resolve bindings in a pattern. This is a helper to `resolve_pattern`. + /// Resolves bindings in a pattern. This is a helper to `resolve_pattern`. /// /// ### `bindings` /// @@ -1603,7 +1603,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { if let Some(LexicalScopeBinding::Res(res)) = binding { res != Res::Err } else { false } } - // Resolve in alternative namespaces if resolution in the primary namespace fails. + // Resolves in alternative namespaces if resolution in the primary namespace fails. fn resolve_qpath_anywhere( &mut self, id: NodeId, @@ -1946,17 +1946,17 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { // closure are detected as upvars rather than normal closure arg usages. ExprKind::Closure(_, IsAsync::Async { .. }, _, ref fn_decl, ref body, _span) => { self.with_rib(ValueNS, NormalRibKind, |this| { - // Resolve arguments: + // Resolve arguments. this.resolve_params(&fn_decl.inputs); // No need to resolve return type -- // the outer closure return type is `FunctionRetTy::Default`. // Now resolve the inner closure { - // No need to resolve arguments: the inner closure has none. - // Resolve the return type: + // No need to resolve arguments; the inner closure has none. + // Resolve the return type. visit::walk_fn_ret_ty(this, &fn_decl.output); - // Resolve the body + // Resolve the body. this.visit_expr(body); } }); diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 6e131c04722a..dc822e71a746 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -118,7 +118,7 @@ enum Scope<'a> { /// but not for late resolution yet. enum ScopeSet { /// All scopes with the given namespace. - All(Namespace, /*is_import*/ bool), + All(Namespace, /* is_import */ bool), /// Crate root, then extern prelude (used for mixed 2015-2018 mode in macros). AbsolutePath(Namespace), /// All scopes with macro namespace and the given macro kind restriction. diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 87439440463b..093d5e75063b 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -35,7 +35,7 @@ type Res = def::Res; #[derive(Debug)] pub struct LegacyBinding<'a> { crate binding: &'a NameBinding<'a>, - /// Legacy scope into which the `macro_rules` item was planted. + /// The legacy scope into which the `macro_rules` item was planted. crate parent_legacy_scope: LegacyScope<'a>, crate ident: Ident, } @@ -246,7 +246,7 @@ impl<'a> base::Resolver for Resolver<'a> { } impl<'a> Resolver<'a> { - /// Resolve macro path with error reporting and recovery. + /// Resolves macro path with error reporting and recovery. fn smart_resolve_macro_path( &mut self, path: &ast::Path, @@ -376,7 +376,7 @@ impl<'a> Resolver<'a> { res.map(|res| (self.get_macro(res), res)) } - // Resolve an identifier in lexical scope. + // Resolves an identifier in lexical scope. // This is a variation of `fn resolve_ident_in_lexical_scope` that can be run during // expansion and import resolution (perhaps they can be merged in the future). // The function is used for resolving initial segments of macro paths (e.g., `foo` in @@ -820,7 +820,7 @@ impl<'a> Resolver<'a> { } } - /// Compile the macro into a `SyntaxExtension` and possibly replace it with a pre-defined + /// Compiles the macro into a `SyntaxExtension` and possibly replace it with a pre-defined /// extension partially or entirely for built-in macros and legacy plugin macros. crate fn compile_macro(&mut self, item: &ast::Item, edition: Edition) -> SyntaxExtension { let mut result = macro_rules::compile( diff --git a/src/librustc_target/spec/abi.rs b/src/librustc_target/spec/abi.rs index 909f0fc53fce..c992b21737fc 100644 --- a/src/librustc_target/spec/abi.rs +++ b/src/librustc_target/spec/abi.rs @@ -5,8 +5,8 @@ mod tests; #[derive(PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Clone, Copy, Debug)] pub enum Abi { - // N.B., this ordering MUST match the AbiDatas array below. - // (This is ensured by the test indices_are_correct().) + // N.B., this ordering MUST match the `AbiData`s array below. + // (This is ensured by the test `indices_are_correct()`.) // Single platform ABIs Cdecl, @@ -22,7 +22,7 @@ pub enum Abi { X86Interrupt, AmdGpuKernel, - // Multiplatform / generic ABIs + // Multi-platform / generic ABIs Rust, C, System, diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 9e52eae88ef4..142fd3a8fde4 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -1,4 +1,4 @@ -//! Conversion from AST representation of types to the `ty.rs` representation. +//! Conversion from AST representation of types to the typesystem representation. //! The main routine here is `ast_ty_to_ty()`; each use is parameterized by an //! instance of `AstConv`. @@ -61,8 +61,7 @@ pub trait AstConv<'tcx> { &self, param: Option<&ty::GenericParamDef>, span: Span, - ) - -> Option>; + ) -> Option>; /// Returns the type to use when a type is omitted. fn ty_infer(&self, param: Option<&ty::GenericParamDef>, span: Span) -> Ty<'tcx>; @@ -275,7 +274,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } /// Checks that the correct number of generic arguments have been provided. - /// This is used both for datatypes and function calls. + /// This is used both for data types and function calls. fn check_generic_arg_count( tcx: TyCtxt<'_>, span: Span, @@ -603,9 +602,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { def_id: DefId, generic_args: &'a hir::GenericArgs, infer_args: bool, - self_ty: Option>) - -> (SubstsRef<'tcx>, Vec>, Option>) - { + self_ty: Option> + ) -> (SubstsRef<'tcx>, Vec>, Option>) { // If the type is parameterized by this region, then replace this // region with the current anon region binding (in other words, // whatever & would get replaced with). @@ -772,8 +770,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { pub fn instantiate_mono_trait_ref(&self, trait_ref: &hir::TraitRef, self_ty: Ty<'tcx> - ) -> ty::TraitRef<'tcx> - { + ) -> ty::TraitRef<'tcx> { self.prohibit_generics(trait_ref.path.segments.split_last().unwrap().1); self.ast_path_to_mono_trait_ref(trait_ref.path.span, @@ -782,7 +779,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { trait_ref.path.segments.last().unwrap()) } - /// The given trait-ref must actually be a trait. + /// Instantiates the path for the given trait reference. + /// `trait_ref` must actually refer to a trait. pub(super) fn instantiate_poly_trait_ref_inner(&self, trait_ref: &hir::TraitRef, span: Span, @@ -863,9 +861,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { span: Span, trait_def_id: DefId, self_ty: Ty<'tcx>, - trait_segment: &hir::PathSegment - ) -> ty::TraitRef<'tcx> - { + trait_segment: &hir::PathSegment, + ) -> ty::TraitRef<'tcx> { let (substs, assoc_bindings, _) = self.create_substs_for_ast_trait_ref(span, trait_def_id, @@ -1197,9 +1194,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { fn ast_path_to_ty(&self, span: Span, did: DefId, - item_segment: &hir::PathSegment) - -> Ty<'tcx> - { + item_segment: &hir::PathSegment + ) -> Ty<'tcx> { let substs = self.ast_path_substs_for_ty(span, did, item_segment); self.normalize_ty( span, @@ -1212,7 +1208,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { fn trait_ref_to_existential(&self, trait_ref: ty::TraitRef<'tcx>) -> ty::ExistentialTraitRef<'tcx> { if trait_ref.self_ty() != self.tcx().types.trait_object_dummy_self { - bug!("trait_ref_to_existential called on {:?} with non-dummy Self", trait_ref); + bug!("trait_ref_to_existential called on {:?} with non-dummy self type", trait_ref); } ty::ExistentialTraitRef::erase_self_ty(self.tcx(), trait_ref) } @@ -1220,9 +1216,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { fn conv_object_ty_poly_trait_ref(&self, span: Span, trait_bounds: &[hir::PolyTraitRef], - lifetime: &hir::Lifetime) - -> Ty<'tcx> - { + lifetime: &hir::Lifetime, + ) -> Ty<'tcx> { let tcx = self.tcx(); let mut bounds = Bounds::default(); @@ -1583,7 +1578,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { return Ok(bound); } - // Create a type from a path to an associated type. + // Creates a type from a path to an associated type. // For a path `A::B::C::D`, `qself_ty` and `qself_def` are the type and def for `A::B::C` // and item_segment is the path segment for `D`. We return a type and a def for // the whole path. @@ -1963,7 +1958,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { path_segs } - // Check a type `Path` and convert it to a `Ty`. + // Checks a type `Path` and convert it to a `Ty`. pub fn res_to_ty(&self, opt_self_ty: Option>, path: &hir::Path, @@ -2144,7 +2139,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } hir::TyKind::Infer => { // Infer also appears as the type of arguments or return - // values in a ExprKind::Closure, or as + // values in a `ExprKind::Closure`, or as // the type of local variables. Both of these cases are // handled specially and will not descend into this routine. self.ty_infer(None, ast_ty.span) diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index f2e1a6e29d6f..f608cbfb190f 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -895,7 +895,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Configure a Coerce instance to compute the LUB. // We don't allow two-phase borrows on any autorefs this creates since we // probably aren't processing function arguments here and even if we were, - // they're going to get autorefed again anyway and we can apply 2-phase borrows + // they're going to get autorefed again anyway and we can apply two-phase borrows // at that time. let mut coerce = Coerce::new(self, cause.clone(), AllowTwoPhase::No); coerce.use_lub = true; diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index dfbf8bcd0f60..b70a2c400bfe 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -1,16 +1,15 @@ //! Type-checking for the rust-intrinsic and platform-intrinsic //! intrinsics that the compiler exposes. +use crate::require_same_types; + +use rustc::hir; use rustc::traits::{ObligationCause, ObligationCauseCode}; use rustc::ty::{self, TyCtxt, Ty}; use rustc::ty::subst::Subst; -use crate::require_same_types; - use rustc_target::spec::abi::Abi; use syntax::symbol::InternedString; -use rustc::hir; - use std::iter; fn equate_intrinsic_type<'tcx>( @@ -77,8 +76,8 @@ pub fn intrisic_operation_unsafety(intrinsic: &str) -> hir::Unsafety { } } -/// Remember to add all intrinsics here, in librustc_codegen_llvm/intrinsic.rs, -/// and in libcore/intrinsics.rs +// Remember to add all intrinsics here, in `librustc_codegen_llvm/intrinsic.rs`, +// and in `libcore/intrinsics.rs`. pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem) { let param = |n| tcx.mk_ty_param(n, InternedString::intern(&format!("P{}", n))); let name = it.ident.as_str(); @@ -97,9 +96,9 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem) { let (n_tps, inputs, output, unsafety) = if name.starts_with("atomic_") { let split : Vec<&str> = name.split('_').collect(); - assert!(split.len() >= 2, "Atomic intrinsic in an incorrect format"); + assert!(split.len() >= 2, "atomic intrinsic in an incorrect format"); - //We only care about the operation here + // We only care about the operation here. let (n_tps, inputs, output) = match split[1] { "cxchg" | "cxchgweak" => (1, vec![tcx.mk_mut_ptr(param(0)), param(0), @@ -355,14 +354,14 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem) { let va_list_ptr_ty = tcx.mk_mut_ptr(va_list_ty); (0, vec![va_list_ptr_ty, va_list_ref_ty], tcx.mk_unit()) } - None => bug!("`va_list` language item needed for C-variadic intrinsics") + None => bug!("`va_list` language item needed for C-variadic intrinsics"), } } "va_arg" => { match mk_va_list_ty(hir::MutMutable) { Some((va_list_ref_ty, _)) => (1, vec![va_list_ref_ty], param(0)), - None => bug!("`va_list` language item needed for C-variadic intrinsics") + None => bug!("`va_list` language item needed for C-variadic intrinsics"), } } @@ -384,7 +383,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem) { equate_intrinsic_type(tcx, it, n_tps, Abi::RustIntrinsic, unsafety, inputs, output) } -/// Type-check `extern "platform-intrinsic" { ... }` functions. +/// Type-checks `extern "platform-intrinsic" { ... }` functions. pub fn check_platform_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem) { let param = |n| { let name = InternedString::intern(&format!("P{}", n)); diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index d8d01624f1d5..cc5b3ea18058 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -3645,7 +3645,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Don't do all the complex logic below for `DeclItem`. match stmt.node { hir::StmtKind::Item(..) => return, - hir::StmtKind::Local(..) | hir::StmtKind::Expr(..) | hir::StmtKind::Semi(..) => {} + hir::StmtKind::Local(..) | + hir::StmtKind::Expr(..) | + hir::StmtKind::Semi(..) => {} } self.warn_if_unreachable(stmt.hir_id, stmt.span, "statement"); diff --git a/src/librustc_typeck/check/pat.rs b/src/librustc_typeck/check/pat.rs index 8502b89de146..6ca63c063626 100644 --- a/src/librustc_typeck/check/pat.rs +++ b/src/librustc_typeck/check/pat.rs @@ -411,7 +411,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { hir::BindingAnnotation::Unannotated => def_bm, _ => BindingMode::convert(ba), }; - // ...and store it in a side table: + // ... and store it in a side table: self.inh .tables .borrow_mut() diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 312a598af02b..88a10df29cc6 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -2367,7 +2367,7 @@ fn is_foreign_item(tcx: TyCtxt<'_>, def_id: DefId) -> bool { match tcx.hir().get_if_local(def_id) { Some(Node::ForeignItem(..)) => true, Some(_) => false, - _ => bug!("is_foreign_item applied to non-local def-id {:?}", def_id), + _ => bug!("`is_foreign_item` applied to non-local `DefId` {:?}", def_id), } } @@ -2380,7 +2380,7 @@ fn static_mutability(tcx: TyCtxt<'_>, def_id: DefId) -> Option node: hir::ForeignItemKind::Static(_, mutbl), .. })) => Some(mutbl), Some(_) => None, - _ => bug!("static_mutability applied to non-local def-id {:?}", def_id), + _ => bug!("`static_mutability` applied to non-local `DefId` {:?}", def_id), } } @@ -2482,14 +2482,14 @@ fn from_target_feature( fn linkage_by_name(tcx: TyCtxt<'_>, def_id: DefId, name: &str) -> Linkage { use rustc::mir::mono::Linkage::*; - // Use the names from src/llvm/docs/LangRef.rst here. Most types are only + // Use the names from `src/llvm/docs/LangRef.rst` here. Most types are only // applicable to variable declarations and may not really make sense for // Rust code in the first place but whitelist them anyway and trust that - // the user knows what s/he's doing. Who knows, unanticipated use cases + // the user knows what they're doing. Who knows, unanticipated use cases // may pop up in the future. // - // ghost, dllimport, dllexport and linkonce_odr_autohide are not supported - // and don't have to be, LLVM treats them as no-ops. + // `ghost`, `dllimport`, `dllexport` and `linkonce_odr_autohide` are not + // supported and don't have to be; LLVM treats them as no-ops. match name { "appending" => Appending, "available_externally" => AvailableExternally, @@ -2676,9 +2676,9 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { } }); - // If a function uses #[target_feature] it can't be inlined into general + // If a function uses `#[target_feature]` it can't be inlined into general // purpose functions as they wouldn't have the right target features - // enabled. For that reason we also forbid #[inline(always)] as it can't be + // enabled. For that reason we also forbid `#[inline(always)]` as it can't be // respected. if codegen_fn_attrs.target_features.len() > 0 { if codegen_fn_attrs.inline == InlineAttr::Always { @@ -2696,7 +2696,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { // sense that they're preserved through all our LTO passes and only // strippable by the linker. // - // Additionally weak lang items have predetermined symbol names. + // Additionally, weak lang items have predetermined symbol names. if tcx.is_weak_lang_item(id) { codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL; } @@ -2705,7 +2705,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { codegen_fn_attrs.link_name = Some(name); } - // Internal symbols to the standard library all have no_mangle semantics in + // Internal symbols to the standard library all have `no_mangle` semantics in // that they have defined symbol names present in the function name. This // also applies to weak symbols where they all have known symbol names. if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) { diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index 959483e4439f..2b880f199992 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -188,11 +188,11 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { let actual = tcx.fn_sig(main_def_id); let expected_return_type = if tcx.lang_items().termination().is_some() { - // we take the return type of the given main function, the real check is done - // in `check_fn` + // We take the return type of the given main function, the real check is done + // in `check_fn`. actual.output().skip_binder() } else { - // standard () main return type + // Standard `()` main return type. tcx.mk_unit() }; diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 38eff43bad2b..b9eaf5a9f6ef 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -66,7 +66,7 @@ thread_local!(pub static MAX_DEF_ID: RefCell> = Defau const FN_OUTPUT_NAME: &'static str = "Output"; -// extract the stability index for a node from tcx, if possible +// Extracts the stability index for a node from tcx, if possible. fn get_stability(cx: &DocContext<'_>, def_id: DefId) -> Option { cx.tcx.lookup_stability(def_id).clean(cx) } @@ -150,7 +150,7 @@ pub fn krate(mut cx: &mut DocContext<'_>) -> Crate { let mut externs = Vec::new(); for &cnum in cx.tcx.crates().iter() { externs.push((cnum, cnum.clean(cx))); - // Analyze doc-reachability for extern items + // Analyze doc-reachability for extern items. LibEmbargoVisitor::new(&mut cx).visit_lib(cnum); } externs.sort_by(|&(a, _), &(b, _)| a.cmp(&b)); @@ -355,9 +355,9 @@ impl Clean for CrateNum { /// directly to the AST's concept of an item; it's a strict superset. #[derive(Clone)] pub struct Item { - /// Stringified span + /// The stringified span. pub source: Span, - /// Not everything has a name. E.g., impls + /// The name of the item. (Not everything has a name, e.g., impls.) pub name: Option, pub attrs: Attributes, pub inner: ItemEnum, @@ -589,9 +589,9 @@ impl Clean for doctree::Module<'_> { String::new() }; - // maintain a stack of mod ids, for doc comment path resolution - // but we also need to resolve the module's own docs based on whether its docs were written - // inside or outside the module, so check for that + // Maintain a stack of mod IDs, for doc comment path resolution. + // However, we also need to resolve the module's own docs based on whether its docs were + // written inside or outside the module, so check for that. let attrs = self.attrs.clean(cx); let mut items: Vec = vec![]; @@ -613,17 +613,17 @@ impl Clean for doctree::Module<'_> { items.extend(self.proc_macros.iter().map(|x| x.clean(cx))); items.extend(self.trait_aliases.iter().map(|x| x.clean(cx))); - // determine if we should display the inner contents or + // Determine if we should display the inner contents or // the outer `mod` item for the source code. let whence = { let cm = cx.sess().source_map(); let outer = cm.lookup_char_pos(self.where_outer.lo()); let inner = cm.lookup_char_pos(self.where_inner.lo()); if outer.file.start_pos == inner.file.start_pos { - // mod foo { ... } + // `mod foo { ... }` self.where_outer } else { - // mod foo; (and a separate SourceFile for the contents) + // `mod foo` (separate `SourceFile` for contents) self.where_inner } }; @@ -694,7 +694,7 @@ impl AttributesExt for [ast::Attribute] { } pub trait NestedAttributesExt { - /// Returns `true` if the attribute list contains a specific `Word` + /// Returns `true` if the attribute list contains a specific `Word`. fn has_word(self, word: Symbol) -> bool; } @@ -768,7 +768,7 @@ pub struct Attributes { pub other_attrs: Vec, pub cfg: Option>, pub span: Option, - /// map from Rust paths to resolved defs and potential URL fragments + /// Map from Rust paths to resolved defs and potential URL fragments. pub links: Vec<(String, Option, Option)>, pub inner_docs: bool, } @@ -806,9 +806,9 @@ impl Attributes { mi.meta_item_list().and_then(|list| { for meta in list { if meta.check_name(sym::include) { - // the actual compiled `#[doc(include="filename")]` gets expanded to + // The actual compiled `#[doc(include="filename")]` gets expanded to // `#[doc(include(file="filename", contents="file contents")]` so we need to - // look for that instead + // look for that instead. return meta.meta_item_list().and_then(|list| { let mut filename: Option = None; let mut contents: Option = None; @@ -864,7 +864,7 @@ impl Attributes { if attr.check_name(sym::doc) { if let Some(mi) = attr.meta() { if let Some(value) = mi.value_str() { - // Extracted #[doc = "..."] + // Extracted `#[doc = "..."]`. let value = value.to_string(); let line = doc_line; doc_line += value.lines().count(); @@ -880,7 +880,7 @@ impl Attributes { } return None; } else if let Some(cfg_mi) = Attributes::extract_cfg(&mi) { - // Extracted #[doc(cfg(...))] + // Extracted `#[doc(cfg(...))]`. match Cfg::parse(cfg_mi) { Ok(new_cfg) => cfg &= new_cfg, Err(e) => diagnostic.span_err(e.span, e.msg), @@ -901,8 +901,8 @@ impl Attributes { }) }).collect(); - // treat #[target_feature(enable = "feat")] attributes as if they were - // #[doc(cfg(target_feature = "feat"))] attributes as well + // Treat `#[target_feature(enable = "feat")]` attributes as if they were + // `#[doc(cfg(target_feature = "feat"))]` attributes as well. for attr in attrs.lists(sym::target_feature) { if attr.check_name(sym::enable) { if let Some(feat) = attr.value_str() { @@ -947,9 +947,9 @@ impl Attributes { } } - /// Gets links as a vector + /// Gets the links as a vector. /// - /// Cache must be populated before call + /// The cache must be populated before the call. pub fn links(&self, krate: &CrateNum) -> Vec<(String, String)> { use crate::html::format::href; @@ -985,7 +985,7 @@ impl Attributes { &fragment[..tail], &fragment[tail..]))) } else { - panic!("This isn't a primitive?!"); + panic!("this isn't a primitive?!"); } } } @@ -1115,7 +1115,7 @@ fn external_generic_args( }).collect(); match trait_did { - // Attempt to sugar an external path like Fn<(A, B,), C> to Fn(A, B) -> C + // Attempt to sugar an external path like `Fn<(A, B,), C>` to `Fn(A, B) -> C`. Some(did) if cx.tcx.lang_items().fn_trait_kind(did).is_some() => { assert!(ty_sty.is_some()); let inputs = match ty_sty { @@ -1136,8 +1136,8 @@ fn external_generic_args( } } -// trait_did should be set to a trait's DefId if called on a TraitRef, in order to sugar -// from Fn<(A, B,), C> to Fn(A, B) -> C +// `trait_did` should be set to a trait's `DefId` if called on a `TraitRef`, in order to desugar +// from `Fn<(A, B,), C>` to `Fn(A, B) -> C`. fn external_path(cx: &DocContext<'_>, name: &str, trait_did: Option, has_self: bool, bindings: Vec, substs: SubstsRef<'_>) -> Path { Path { @@ -1159,7 +1159,7 @@ impl<'a, 'tcx> Clean for (&'a ty::TraitRef<'tcx>, Vec debug!("ty::TraitRef\n subst: {:?}\n", trait_ref.substs); - // collect any late bound regions + // Collect any late-bound regions. let mut late_bounds = vec![]; for ty_s in trait_ref.input_types().skip(1) { if let ty::Tuple(ts) = ty_s.sty { @@ -1588,7 +1588,7 @@ impl Clean for hir::GenericParam { } } -// maybe use a Generic enum and use Vec? +// FIXME: maybe use a `Generic` enum and use `Vec` instead? #[derive(Clone, PartialEq, Eq, Debug, Default, Hash)] pub struct Generics { pub params: Vec, @@ -1636,7 +1636,7 @@ impl Clean for hir::Generics { where_predicates: self.where_clause.predicates.clean(cx), }; - // Some duplicates are generated for ?Sized bounds between type params and where + // Some duplicates are generated for `?Sized` bounds between type params and where- // predicates. The point in here is to move the bounds definitions from type params // to where predicates when such cases occur. for where_pred in &mut generics.where_predicates { @@ -1677,7 +1677,7 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics, let mut impl_trait = BTreeMap::>::default(); // Bounds in the type_params and lifetimes fields are repeated in the - // predicates field (see rustc_typeck::collect::ty_generics), so remove + // predicates field (see [`rustc_typeck::collect::ty_generics`]), so remove // them. let stripped_typarams = gens.params.iter() .filter_map(|param| match param.kind { @@ -1696,7 +1696,7 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics, ty::GenericParamDefKind::Const { .. } => None, }).collect::>(); - // param index -> [(DefId of trait, associated type name, type)] + // param index -> [(`DefId` of trait, associated type name, type)] let mut impl_trait_proj = FxHashMap::)>>::default(); @@ -1787,13 +1787,13 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics, .flat_map(|p| p.clean(cx)) .collect::>(); - // Type parameters and have a Sized bound by default unless removed with - // ?Sized. Scan through the predicates and mark any type parameter with + // Type parameters and have a `Sized` bound by default unless removed with + // `?Sized`. Scan through the predicates and mark any type parameter with // a Sized bound, removing the bounds as we find them. // // Note that associated types also have a sized bound by default, but we // don't actually know the set of associated types right here so that's - // handled in cleaning associated types + // handled in cleaning associated types. let mut sized_params = FxHashSet::default(); where_predicates.retain(|pred| { match *pred { @@ -1809,8 +1809,8 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics, } }); - // Run through the type parameters again and insert a ?Sized - // unbound for any we didn't find to be Sized. + // Run through the type parameters again and insert a `?Sized` + // unbound for any we didn't find to be `Sized`. for tp in &stripped_typarams { if !sized_params.contains(&tp.name) { where_predicates.push(WP::BoundPredicate { @@ -1821,7 +1821,7 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics, } // It would be nice to collect all of the bounds on a type and recombine - // them if possible, to avoid e.g., `where T: Foo, T: Bar, T: Sized, T: 'a` + // them if possible, to avoid, e.g., `where T: Foo, T: Bar, T: Sized, T: 'a` // and instead see `where T: Foo + Bar + Sized + 'a` Generics { @@ -1838,9 +1838,9 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics, } } -/// The point of this function is to replace bounds with types. +/// Replaces bounds with types. /// -/// i.e. `[T, U]` when you have the following bounds: `T: Display, U: Option` will return +/// E.g., given `[T, U]` and the bounds `T: Display, U: Option`, this will return /// `[Display, Option]` (we just returns the list of the types, we don't care about the /// wrapped types in here). fn get_real_types( @@ -1867,7 +1867,7 @@ fn get_real_types( GenericBound::TraitBound(ref poly_trait, _) => { for x in poly_trait.generic_params.iter() { if !x.is_type() { - continue + continue; } if let Some(ty) = x.get_type(cx) { let adds = get_real_types(generics, &ty, cx, recurse + 1); @@ -1915,9 +1915,9 @@ fn get_real_types( res } -/// Return the full list of types when bounds have been resolved. +/// Returns the full list of types when bounds have been resolved. /// -/// i.e. `fn foo>(x: u32, y: B)` will return +/// E.g.. `fn foo>(x: u32, y: B)` will return /// `[u32, Display, Option]`. pub fn get_all_types( generics: &Generics, @@ -4536,7 +4536,7 @@ impl From for SimpleBound { t.generic_params, mod_) } - _ => panic!("Unexpected bound {:?}", bound), + _ => panic!("unexpected bound {:?}", bound), } } } diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 9cfcad427196..f019733efde3 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -182,7 +182,7 @@ pub fn new_handler(error_format: ErrorOutputType, ui_testing: bool, ) -> errors::Handler { // rustdoc doesn't override (or allow to override) anything from this that is relevant here, so - // stick to the defaults + // stick to the defaults. let sessopts = Options::default(); let emitter: Box = match error_format { ErrorOutputType::HumanReadable(kind) => { diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index d6073cdc1e11..e646dd4c9de3 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -74,7 +74,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> { }; if let Ok((_, res)) = result { - let res = res.map_id(|_| panic!("unexpected node_id")); + let res = res.map_id(|_| panic!("unexpected `NodeId`")); // In case this is a trait item, skip the // early return and try looking for the trait. let value = match res { @@ -137,7 +137,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> { if let Res::Err = ty_res { return Err(()); } - let ty_res = ty_res.map_id(|_| panic!("unexpected node_id")); + let ty_res = ty_res.map_id(|_| panic!("unexpected `NodeId`")); match ty_res { Res::Def(DefKind::Struct, did) | Res::Def(DefKind::Union, did) diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 000d2843adce..77a4ff7ed82b 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -415,8 +415,8 @@ pub fn make_test(s: &str, if !found_extern_crate { if let ast::ItemKind::ExternCrate(original) = item.node { - // This code will never be reached if `cratename` is none because - // `found_extern_crate` is initialized to `true` if it is none. + // This code will never be reached if `cratename` is `None` because + // `found_extern_crate` is initialized to `true` if it is `None`. let cratename = cratename.unwrap(); match original { diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index a5f7b4898ae0..5c77d348ee05 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -311,7 +311,7 @@ impl Encodable for usize { } impl Decodable for usize { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_usize() } } @@ -323,7 +323,7 @@ impl Encodable for u8 { } impl Decodable for u8 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_u8() } } @@ -335,7 +335,7 @@ impl Encodable for u16 { } impl Decodable for u16 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_u16() } } @@ -347,7 +347,7 @@ impl Encodable for u32 { } impl Decodable for u32 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_u32() } } @@ -371,7 +371,7 @@ impl Encodable for u64 { } impl Decodable for u64 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_u64() } } @@ -383,7 +383,7 @@ impl Encodable for u128 { } impl Decodable for u128 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_u128() } } @@ -395,7 +395,7 @@ impl Encodable for isize { } impl Decodable for isize { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_isize() } } @@ -407,7 +407,7 @@ impl Encodable for i8 { } impl Decodable for i8 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_i8() } } @@ -419,7 +419,7 @@ impl Encodable for i16 { } impl Decodable for i16 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_i16() } } @@ -431,7 +431,7 @@ impl Encodable for i32 { } impl Decodable for i32 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_i32() } } @@ -443,7 +443,7 @@ impl Encodable for i64 { } impl Decodable for i64 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_i64() } } @@ -455,7 +455,7 @@ impl Encodable for i128 { } impl Decodable for i128 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_i128() } } @@ -473,7 +473,7 @@ impl Encodable for String { } impl Decodable for String { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { Ok(d.read_str()?.into_owned()) } } @@ -485,7 +485,7 @@ impl Encodable for f32 { } impl Decodable for f32 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_f32() } } @@ -497,7 +497,7 @@ impl Encodable for f64 { } impl Decodable for f64 { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_f64() } } @@ -509,7 +509,7 @@ impl Encodable for bool { } impl Decodable for bool { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_bool() } } @@ -521,7 +521,7 @@ impl Encodable for char { } impl Decodable for char { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { d.read_char() } } @@ -533,7 +533,7 @@ impl Encodable for () { } impl Decodable for () { - fn decode(d: &mut D) -> Result<(), D::Error> { + fn decode(d: &mut D) -> Result { d.read_nil() } } @@ -545,7 +545,7 @@ impl Encodable for PhantomData { } impl Decodable for PhantomData { - fn decode(d: &mut D) -> Result, D::Error> { + fn decode(d: &mut D) -> Result { d.read_nil()?; Ok(PhantomData) } @@ -563,32 +563,32 @@ impl Encodable for Box { } } -impl< T: Decodable> Decodable for Box { - fn decode(d: &mut D) -> Result, D::Error> { +impl Decodable for Box { + fn decode(d: &mut D) -> Result { Ok(box Decodable::decode(d)?) } } -impl< T: Decodable> Decodable for Box<[T]> { - fn decode(d: &mut D) -> Result, D::Error> { +impl Decodable for Box<[T]> { + fn decode(d: &mut D) -> Result { let v: Vec = Decodable::decode(d)?; Ok(v.into_boxed_slice()) } } -impl Encodable for Rc { +impl Encodable for Rc { fn encode(&self, s: &mut S) -> Result<(), S::Error> { (**self).encode(s) } } -impl Decodable for Rc { - fn decode(d: &mut D) -> Result, D::Error> { +impl Decodable for Rc { + fn decode(d: &mut D) -> Result { Ok(Rc::new(Decodable::decode(d)?)) } } -impl Encodable for [T] { +impl Encodable for [T] { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_seq(self.len(), |s| { for (i, e) in self.iter().enumerate() { @@ -599,7 +599,7 @@ impl Encodable for [T] { } } -impl Encodable for Vec { +impl Encodable for Vec { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_seq(self.len(), |s| { for (i, e) in self.iter().enumerate() { @@ -610,8 +610,8 @@ impl Encodable for Vec { } } -impl Decodable for Vec { - fn decode(d: &mut D) -> Result, D::Error> { +impl Decodable for Vec { + fn decode(d: &mut D) -> Result { d.read_seq(|d, len| { let mut v = Vec::with_capacity(len); for i in 0..len { @@ -622,7 +622,7 @@ impl Decodable for Vec { } } -impl<'a, T:Encodable> Encodable for Cow<'a, [T]> where [T]: ToOwned> { +impl<'a, T: Encodable> Encodable for Cow<'a, [T]> where [T]: ToOwned> { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_seq(self.len(), |s| { for (i, e) in self.iter().enumerate() { @@ -633,10 +633,10 @@ impl<'a, T:Encodable> Encodable for Cow<'a, [T]> where [T]: ToOwned Decodable for Cow<'static, [T]> +impl Decodable for Cow<'static, [T]> where [T]: ToOwned> { - fn decode(d: &mut D) -> Result, D::Error> { + fn decode(d: &mut D) -> Result { d.read_seq(|d, len| { let mut v = Vec::with_capacity(len); for i in 0..len { @@ -647,8 +647,7 @@ impl Decodable for Cow<'static, [T]> } } - -impl Encodable for Option { +impl Encodable for Option { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_option(|s| { match *self { @@ -659,8 +658,8 @@ impl Encodable for Option { } } -impl Decodable for Option { - fn decode(d: &mut D) -> Result, D::Error> { +impl Decodable for Option { + fn decode(d: &mut D) -> Result { d.read_option(|d, b| { if b { Ok(Some(Decodable::decode(d)?)) @@ -694,8 +693,8 @@ impl Encodable for Result { } } -impl Decodable for Result { - fn decode(d: &mut D) -> Result, D::Error> { +impl Decodable for Result { + fn decode(d: &mut D) -> Result { d.read_enum("Result", |d| { d.read_enum_variant(&["Ok", "Err"], |d, disr| { match disr { @@ -739,9 +738,9 @@ macro_rules! count { macro_rules! tuple { () => (); ( $($name:ident,)+ ) => ( - impl<$($name:Decodable),+> Decodable for ($($name,)+) { + impl<$($name: Decodable),+> Decodable for ($($name,)+) { #[allow(non_snake_case)] - fn decode(d: &mut D) -> Result<($($name,)+), D::Error> { + fn decode(d: &mut D) -> Result { let len: usize = count!($($name)+); d.read_tuple(len, |d| { let mut i = 0; @@ -752,7 +751,7 @@ macro_rules! tuple { }) } } - impl<$($name:Encodable),+> Encodable for ($($name,)+) { + impl<$($name: Encodable),+> Encodable for ($($name,)+) { #[allow(non_snake_case)] fn encode(&self, s: &mut S) -> Result<(), S::Error> { let ($(ref $name,)+) = *self; @@ -784,7 +783,7 @@ impl Encodable for path::PathBuf { } impl Decodable for path::PathBuf { - fn decode(d: &mut D) -> Result { + fn decode(d: &mut D) -> Result { let bytes: String = Decodable::decode(d)?; Ok(path::PathBuf::from(bytes)) } @@ -797,7 +796,7 @@ impl Encodable for Cell { } impl Decodable for Cell { - fn decode(d: &mut D) -> Result, D::Error> { + fn decode(d: &mut D) -> Result { Ok(Cell::new(Decodable::decode(d)?)) } } @@ -814,25 +813,25 @@ impl Encodable for RefCell { } impl Decodable for RefCell { - fn decode(d: &mut D) -> Result, D::Error> { + fn decode(d: &mut D) -> Result { Ok(RefCell::new(Decodable::decode(d)?)) } } -impl Encodable for Arc { +impl Encodable for Arc { fn encode(&self, s: &mut S) -> Result<(), S::Error> { (**self).encode(s) } } -impl Decodable for Arc { - fn decode(d: &mut D) -> Result, D::Error> { +impl Decodable for Arc { + fn decode(d: &mut D) -> Result { Ok(Arc::new(Decodable::decode(d)?)) } } // ___________________________________________________________________________ -// Specialization-based interface for multi-dispatch Encodable/Decodable. +// Specialization-based interface for multi-dispatch `Encodable` / `Decodable. /// Implement this trait on your `{Encodable,Decodable}::Error` types /// to override the default panic behavior for missing specializations. @@ -909,7 +908,7 @@ pub trait UseSpecializedDecodable: Sized { } impl Decodable for T { - default fn decode(d: &mut D) -> Result { + default fn decode(d: &mut D) -> Result { D::specialized_decode(d) } } diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index d3d604b72521..01a5c42a8777 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -1,4 +1,5 @@ -//! The compiler code necessary for `#[derive(RustcDecodable)]`. See encodable.rs for more. +//! Expands the `#[derive(RustcDecodable)]` attribute, which implements the `Decodable` trait for +//! types. See `encodable.rs` for the converse `RustcEncodable` attribute. use crate::deriving::pathvec_std; use crate::deriving::generic::*; @@ -18,42 +19,54 @@ pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt<'_>, push: &mut dyn FnMut(Annotatable)) { let krate = "rustc_serialize"; let typaram = "__D"; + let decodable_path = Path::new_(vec![krate, "Decodable"], None, vec![], PathKind::Global); let trait_def = TraitDef { span, attributes: Vec::new(), - path: Path::new_(vec![krate, "Decodable"], None, vec![], PathKind::Global), - additional_bounds: Vec::new(), + path: decodable_path.clone(), + additional_bounds: vec![Ty::Literal(decodable_path)], generics: LifetimeBounds::empty(), is_unsafe: false, supports_unions: false, methods: vec![MethodDef { - name: "decode", - generics: LifetimeBounds { - lifetimes: Vec::new(), - bounds: vec![(typaram, - vec![Path::new_(vec![krate, "Decoder"], - None, - vec![], - PathKind::Global)])], - }, - explicit_self: None, - args: vec![(Ptr(Box::new(Literal(Path::new_local(typaram))), - Borrowed(None, Mutability::Mutable)), "d")], - ret_ty: - Literal(Path::new_(pathvec_std!(cx, result::Result), - None, - vec![Box::new(Self_), Box::new(Literal(Path::new_( - vec![typaram, "Error"], None, vec![], PathKind::Local - )))], - PathKind::Std)), - attributes: Vec::new(), - is_unsafe: false, - unify_fieldless_variants: false, - combine_substructure: combine_substructure(Box::new(|a, b, c| { - decodable_substructure(a, b, c, krate) - })), - }], + name: "decode", + generics: LifetimeBounds { + lifetimes: Vec::new(), + bounds: vec![ + ( + typaram, + vec![Path::new_(vec![krate, "Decoder"], None, vec![], PathKind::Global)], + ) + ], + }, + explicit_self: None, + args: vec![ + ( + Ptr(Box::new(Literal(Path::new_local(typaram))), + Borrowed(None, Mutability::Mutable)), "d", + ) + ], + ret_ty: Literal( + Path::new_( + pathvec_std!(cx, result::Result), + None, + vec![ + Box::new(Self_), + Box::new(Literal( + Path::new_(vec![typaram, "Error"], None, vec![], PathKind::Local) + )), + ], + PathKind::Std + ) + ), + attributes: Vec::new(), + is_unsafe: false, + unify_fieldless_variants: false, + combine_substructure: combine_substructure(Box::new(|a, b, c| { + decodable_substructure(a, b, c, krate) + })), + }], associated_types: Vec::new(), }; @@ -68,7 +81,7 @@ fn decodable_substructure(cx: &mut ExtCtxt<'_>, let decoder = substr.nonself_args[0].clone(); let recurse = vec![cx.ident_of(krate), cx.ident_of("Decodable"), cx.ident_of("decode")]; let exprdecode = cx.expr_path(cx.path_global(trait_span, recurse)); - // throw an underscore in front to suppress unused variable warnings + // Throw an underscore in front to suppress unused variable warnings. let blkarg = cx.ident_of("_d"); let blkdecoder = cx.expr_ident(trait_span, blkarg); @@ -83,21 +96,31 @@ fn decodable_substructure(cx: &mut ExtCtxt<'_>, let path = cx.path_ident(trait_span, substr.type_ident); let result = decode_static_fields(cx, trait_span, path, summary, |cx, span, name, field| { - cx.expr_try(span, - cx.expr_method_call(span, - blkdecoder.clone(), - read_struct_field, - vec![cx.expr_str(span, name), - cx.expr_usize(span, field), - exprdecode.clone()])) + cx.expr_try( + span, + cx.expr_method_call( + span, + blkdecoder.clone(), + read_struct_field, + vec![ + cx.expr_str(span, name), + cx.expr_usize(span, field), + exprdecode.clone(), + ] + ), + ) }); let result = cx.expr_ok(trait_span, result); - cx.expr_method_call(trait_span, - decoder, - cx.ident_of("read_struct"), - vec![cx.expr_str(trait_span, substr.type_ident.name), - cx.expr_usize(trait_span, nfields), - cx.lambda1(trait_span, result, blkarg)]) + cx.expr_method_call( + trait_span, + decoder, + cx.ident_of("read_struct"), + vec![ + cx.expr_str(trait_span, substr.type_ident.name), + cx.expr_usize(trait_span, nfields), + cx.lambda1(trait_span, result, blkarg), + ], + ) } StaticEnum(_, ref fields) => { let variant = cx.ident_of("i"); @@ -112,11 +135,15 @@ fn decodable_substructure(cx: &mut ExtCtxt<'_>, let path = cx.path(trait_span, vec![substr.type_ident, ident]); let decoded = decode_static_fields(cx, v_span, path, parts, |cx, span, _, field| { let idx = cx.expr_usize(span, field); - cx.expr_try(span, - cx.expr_method_call(span, - blkdecoder.clone(), - rvariant_arg, - vec![idx, exprdecode.clone()])) + cx.expr_try( + span, + cx.expr_method_call( + span, + blkdecoder.clone(), + rvariant_arg, + vec![idx, exprdecode.clone()], + ), + ) }); arms.push(cx.arm(v_span, cx.pat_lit(v_span, cx.expr_usize(v_span, i)), decoded)); @@ -124,23 +151,29 @@ fn decodable_substructure(cx: &mut ExtCtxt<'_>, arms.push(cx.arm_unreachable(trait_span)); - let result = - cx.expr_ok(trait_span, - cx.expr_match(trait_span, cx.expr_ident(trait_span, variant), arms)); + let result = cx.expr_ok( + trait_span, cx.expr_match(trait_span, cx.expr_ident(trait_span, variant), arms) + ); let lambda = cx.lambda(trait_span, vec![blkarg, variant], result); let variant_vec = cx.expr_vec(trait_span, variants); let variant_vec = cx.expr_addr_of(trait_span, variant_vec); - let result = cx.expr_method_call(trait_span, - blkdecoder, - cx.ident_of("read_enum_variant"), - vec![variant_vec, lambda]); - cx.expr_method_call(trait_span, - decoder, - cx.ident_of("read_enum"), - vec![cx.expr_str(trait_span, substr.type_ident.name), - cx.lambda1(trait_span, result, blkarg)]) + let result = cx.expr_method_call( + trait_span, + blkdecoder, + cx.ident_of("read_enum_variant"), + vec![variant_vec, lambda], + ); + cx.expr_method_call( + trait_span, + decoder, + cx.ident_of("read_enum"), + vec![ + cx.expr_str(trait_span, substr.type_ident.name), + cx.lambda1(trait_span, result, blkarg), + ], + ) } - _ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"), + _ => cx.bug("expected `StaticEnum` or `StaticStruct` in ``derive(Decodable)`"), }; } @@ -172,7 +205,7 @@ fn decode_static_fields(cx: &mut ExtCtxt<'_>, } } Named(ref fields) => { - // use the field's span to get nicer error messages. + // Use the field's span to get nicer error messages. let fields = fields.iter() .enumerate() .map(|(i, &(ident, span))| { diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index 8b18fb25e90c..e2cb92c070f5 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -1,6 +1,7 @@ -//! The compiler code necessary to implement the `#[derive(RustcEncodable)]` -//! (and `RustcDecodable`, in `decodable.rs`) extension. The idea here is that -//! type-defining items may be tagged with +//! Expands the `#[derive(RustcEncodable)]` attribute, which implements the `Encodable` trait for +//! types. See `decodable.rs` for the converse `RustcDecodable` attribute. +//! +//! The idea here is that type-defining items may be tagged with //! `#[derive(RustcEncodable, RustcDecodable)]`. //! //! For example, a type like: @@ -102,12 +103,13 @@ pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt<'_>, push: &mut dyn FnMut(Annotatable)) { let krate = "rustc_serialize"; let typaram = "__S"; + let encodable_path = Path::new_(vec![krate, "Encodable"], None, vec![], PathKind::Global); let trait_def = TraitDef { span, attributes: Vec::new(), - path: Path::new_(vec![krate, "Encodable"], None, vec![], PathKind::Global), - additional_bounds: Vec::new(), + path: encodable_path.clone(), + additional_bounds: vec![Ty::Literal(encodable_path)], generics: LifetimeBounds::empty(), is_unsafe: false, supports_unions: false, @@ -123,7 +125,7 @@ pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt<'_>, }, explicit_self: borrowed_explicit_self(), args: vec![(Ptr(Box::new(Literal(Path::new_local(typaram))), - Borrowed(None, Mutability::Mutable)), "s")], + Borrowed(None, Mutability::Mutable)), "s")], ret_ty: Literal(Path::new_( pathvec_std!(cx, result::Result), None, @@ -152,7 +154,7 @@ fn encodable_substructure(cx: &mut ExtCtxt<'_>, krate: &'static str) -> P { let encoder = substr.nonself_args[0].clone(); - // throw an underscore in front to suppress unused variable warnings + // Throw an underscore in front to suppress unused variable warnings. let blkarg = cx.ident_of("_e"); let blkencoder = cx.expr_ident(trait_span, blkarg); let fn_path = cx.expr_path(cx.path_global(trait_span, @@ -179,7 +181,7 @@ fn encodable_substructure(cx: &mut ExtCtxt<'_>, cx.expr_usize(span, i), lambda]); - // last call doesn't need a try! + // Last call doesn't need a try expression! let last = fields.len() - 1; let call = if i != last { cx.expr_try(span, call) @@ -191,7 +193,7 @@ fn encodable_substructure(cx: &mut ExtCtxt<'_>, stmts.push(stmt); } - // unit structs have no fields and need to return Ok() + // Unit structs have no fields and need to return `Ok()`. let blk = if stmts.is_empty() { let ok = cx.expr_ok(trait_span, cx.expr_tuple(trait_span, vec![])); cx.lambda1(trait_span, ok, blkarg) @@ -199,12 +201,16 @@ fn encodable_substructure(cx: &mut ExtCtxt<'_>, cx.lambda_stmts_1(trait_span, stmts, blkarg) }; - cx.expr_method_call(trait_span, - encoder, - cx.ident_of("emit_struct"), - vec![cx.expr_str(trait_span, substr.type_ident.name), - cx.expr_usize(trait_span, fields.len()), - blk]) + cx.expr_method_call( + trait_span, + encoder, + cx.ident_of("emit_struct"), + vec![ + cx.expr_str(trait_span, substr.type_ident.name), + cx.expr_usize(trait_span, fields.len()), + blk, + ], + ) } EnumMatching(idx, _, variant, ref fields) => { @@ -220,13 +226,18 @@ fn encodable_substructure(cx: &mut ExtCtxt<'_>, let last = fields.len() - 1; for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() { let self_ref = cx.expr_addr_of(span, self_.clone()); - let enc = - cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); + let enc = cx.expr_call( + span, + fn_path.clone(), + vec![self_ref, blkencoder.clone()], + ); let lambda = cx.lambda1(span, enc, blkarg); - let call = cx.expr_method_call(span, - blkencoder.clone(), - emit_variant_arg, - vec![cx.expr_usize(span, i), lambda]); + let call = cx.expr_method_call( + span, + blkencoder.clone(), + emit_variant_arg, + vec![cx.expr_usize(span, i), lambda], + ); let call = if i != last { cx.expr_try(span, call) } else { @@ -242,22 +253,30 @@ fn encodable_substructure(cx: &mut ExtCtxt<'_>, let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); let name = cx.expr_str(trait_span, variant.ident.name); - let call = cx.expr_method_call(trait_span, - blkencoder, - cx.ident_of("emit_enum_variant"), - vec![name, - cx.expr_usize(trait_span, idx), - cx.expr_usize(trait_span, fields.len()), - blk]); + let call = cx.expr_method_call( + trait_span, + blkencoder, + cx.ident_of("emit_enum_variant"), + vec![ + name, + cx.expr_usize(trait_span, idx), + cx.expr_usize(trait_span, fields.len()), + blk, + ], + ); let blk = cx.lambda1(trait_span, call, blkarg); - let ret = cx.expr_method_call(trait_span, - encoder, - cx.ident_of("emit_enum"), - vec![cx.expr_str(trait_span ,substr.type_ident.name), - blk]); + let ret = cx.expr_method_call( + trait_span, + encoder, + cx.ident_of("emit_enum"), + vec![ + cx.expr_str(trait_span, substr.type_ident.name), + blk, + ], + ); cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)])) } - _ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"), + _ => cx.bug("expected `Struct` or `EnumMatching` in `derive(Encodable)`"), }; } diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 893d89f06a16..74d558cfe667 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -201,25 +201,25 @@ use crate::deriving; pub mod ty; pub struct TraitDef<'a> { - /// The span for the current #[derive(Foo)] header. + /// The span for the current `#[derive(Foo)]` header. pub span: Span, pub attributes: Vec, - /// Path of the trait, including any type parameters + /// The path of the trait, including any type parameters pub path: Path<'a>, /// Additional bounds required of any type parameters of the type, - /// other than the current trait + /// other than the current trait. pub additional_bounds: Vec>, - /// Any extra lifetimes and/or bounds, e.g., `D: serialize::Decoder` + /// Any extra lifetimes and/or bounds, e.g., `D: serialize::Decoder`. pub generics: LifetimeBounds<'a>, - /// Is it an `unsafe` trait? + /// `true` if it is an `unsafe` trait. pub is_unsafe: bool, - /// Can this trait be derived for unions? + /// `true` if this trait be derived for unions. pub supports_unions: bool, pub methods: Vec>, @@ -229,28 +229,27 @@ pub struct TraitDef<'a> { pub struct MethodDef<'a> { - /// name of the method + /// The name of the method. pub name: &'a str, - /// List of generics, e.g., `R: rand::Rng` + /// A list of generics, e.g., `R: rand::Rng`. pub generics: LifetimeBounds<'a>, - /// Whether there is a self argument (outer Option) i.e., whether - /// this is a static function, and whether it is a pointer (inner - /// Option) + /// `true` if there is a self argument (outer `Option`), i.e., whether + /// this is a static function, and whether it is a pointer (inner `Option`). pub explicit_self: Option>>, - /// Arguments other than the self argument + /// Arguments other than the `self` argument. pub args: Vec<(Ty<'a>, &'a str)>, - /// Returns type + /// The return type. pub ret_ty: Ty<'a>, pub attributes: Vec, - // Is it an `unsafe fn`? + // `true` if it is an `unsafe fn`. pub is_unsafe: bool, - /// Can we combine fieldless variants for enums into a single match arm? + /// `true` if we can combine fieldless variants for enums into a single match arm. pub unify_fieldless_variants: bool, pub combine_substructure: RefCell>, @@ -258,13 +257,13 @@ pub struct MethodDef<'a> { /// All the data about the data structure/method being derived upon. pub struct Substructure<'a> { - /// ident of self + /// The `Ident` of `self`. pub type_ident: Ident, - /// ident of the method + /// The `Ident` of the method. pub method_ident: Ident, - /// dereferenced access to any `Self_` or `Ptr(Self_, _)` arguments + /// Dereferenced access to any `Self_` or `Ptr(Self_, _)` arguments. pub self_args: &'a [P], - /// verbatim access to any other arguments + /// Verbatim access to any other arguments. pub nonself_args: &'a [P], pub fields: &'a SubstructureFields<'a>, } @@ -272,7 +271,7 @@ pub struct Substructure<'a> { /// Summary of the relevant parts of a struct/enum field. pub struct FieldInfo<'a> { pub span: Span, - /// None for tuple structs/normal enum variants, Some for normal + /// `None` for tuple structs/normal enum variants, `Some` for normal /// structs/struct enum variants. pub name: Option, /// The expression corresponding to this field of `self` @@ -281,14 +280,14 @@ pub struct FieldInfo<'a> { /// The expressions corresponding to references to this field in /// the other `Self` arguments. pub other: Vec>, - /// The attributes on the field + /// The attributes on the field. pub attrs: &'a [ast::Attribute], } -/// Fields for a static method +/// Fields for a static method. pub enum StaticFields { /// Tuple and unit structs/enum variants like this. - Unnamed(Vec, bool /*is tuple*/), + Unnamed(Vec, bool /* is tuple */), /// Normal structs/struct variants. Named(Vec<(Ident, Span)>), } @@ -315,8 +314,6 @@ pub enum SubstructureFields<'a> { StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)>), } - - /// Combine the values of all the fields together. The last argument is /// all the fields of all the structures. pub type CombineSubstructureFunc<'a> = @@ -421,7 +418,7 @@ impl<'a> TraitDef<'a> { _ => { // Non-ADT derive is an error, but it should have been // set earlier; see - // libsyntax/ext/expand.rs:MacroExpander::expand() + // `libsyntax/ext/expand.rs:MacroExpander::expand()`. return; } }; @@ -460,7 +457,7 @@ impl<'a> TraitDef<'a> { _ => unreachable!(), }; // Keep the lint attributes of the previous item to control how the - // generated implementations are linted + // generated implementations are linted. let mut attrs = newitem.attrs.clone(); attrs.extend(item.attrs .iter() @@ -472,9 +469,9 @@ impl<'a> TraitDef<'a> { push(Annotatable::Item(P(ast::Item { attrs: attrs, ..(*newitem).clone() }))) } _ => { - // Non-Item derive is an error, but it should have been + // Non-`Item` derive is an error, but it should have been // set earlier; see - // libsyntax/ext/expand.rs:MacroExpander::expand() + // `libsyntax/ext/expand.rs:MacroExpander::expand()`. return; } } @@ -520,7 +517,7 @@ impl<'a> TraitDef<'a> { -> P { let trait_path = self.path.to_path(cx, self.span, type_ident, generics); - // Transform associated types from `deriving::ty::Ty` into `ast::ImplItem` + // Transform associated types from `deriving::ty::Ty` into `ast::ImplItem`. let associated_types = self.associated_types.iter().map(|&(ident, ref type_def)| { ast::ImplItem { id: ast::DUMMY_NODE_ID, @@ -539,22 +536,22 @@ impl<'a> TraitDef<'a> { let Generics { mut params, mut where_clause, span } = self.generics .to_generics(cx, self.span, type_ident, generics); - // Create the generic parameters + // Create the generic parameters. params.extend(generics.params.iter().map(|param| match param.kind { GenericParamKind::Lifetime { .. } => param.clone(), GenericParamKind::Type { .. } => { // I don't think this can be moved out of the loop, since - // a GenericBound requires an ast id + // a `GenericBound` requires an AST ID. let bounds: Vec<_> = - // extra restrictions on the generics parameters to the - // type being derived upon + // Extra restrictions on the generics parameters to the + // type being derived upon. self.additional_bounds.iter().map(|p| { cx.trait_bound(p.to_path(cx, self.span, type_ident, generics)) }).chain( - // require the current trait + // Require the current trait. iter::once(cx.trait_bound(trait_path.clone())) ).chain( - // also add in any bounds from the declaration + // Also add in any bounds from the declaration. param.bounds.iter().cloned() ).collect(); @@ -563,7 +560,7 @@ impl<'a> TraitDef<'a> { GenericParamKind::Const { .. } => param.clone(), })); - // and similarly for where clauses + // Create the where-clause. where_clause.predicates.extend(generics.where_clause.predicates.iter().map(|clause| { match *clause { ast::WherePredicate::BoundPredicate(ref wb) => { @@ -593,7 +590,7 @@ impl<'a> TraitDef<'a> { })); { - // Extra scope required here so ty_params goes out of scope before params is moved + // Extra scope required here so `ty_params` goes out of scope before params is moved. let mut ty_params = params.iter() .filter_map(|param| match param.kind { @@ -611,7 +608,7 @@ impl<'a> TraitDef<'a> { let tys = find_type_parameters(&field_ty, &ty_param_names, self.span, cx); for ty in tys { - // if we have already handled this type, skip it + // If we have already handled this type, skip it. if let ast::TyKind::Path(_, ref p) = ty.node { if p.segments.len() == 1 && ty_param_names.contains(&p.segments[0].ident.name) { @@ -625,7 +622,7 @@ impl<'a> TraitDef<'a> { }) .collect(); - // require the current trait + // Require the current trait. bounds.push(cx.trait_bound(trait_path.clone())); let predicate = ast::WhereBoundPredicate { @@ -668,7 +665,7 @@ impl<'a> TraitDef<'a> { let self_type = cx.ty_path(path); let attr = cx.attribute(cx.meta_word(self.span, sym::automatically_derived)); - // Just mark it now since we know that it'll end up used downstream + // Just mark it now since we know that it'll end up used downstream. attr::mark_used(&attr); let opt_trait_ref = Some(trait_ref); let unused_qual = { @@ -896,8 +893,8 @@ impl<'a> MethodDef<'a> { let arg_expr = cx.expr_ident(trait_.span, ident); match *ty { - // for static methods, just treat any Self - // arguments as a normal arg + // For static methods, just treat any `Self` + // arguments as a normal arg. Self_ if nonstatic => { self_args.push(arg_expr); } @@ -1030,7 +1027,7 @@ impl<'a> MethodDef<'a> { raw_fields.push(ident_expr); } - // transpose raw_fields + // Transpose `raw_fields`. let fields = if !raw_fields.is_empty() { let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter()); let first_field = raw_fields.next().unwrap(); @@ -1057,7 +1054,7 @@ impl<'a> MethodDef<'a> { `derive`") }; - // body of the inner most destructuring match + // Make the body of the innermost destructuring match. let mut body = self.call_substructure_method(cx, trait_, type_ident, @@ -1065,7 +1062,7 @@ impl<'a> MethodDef<'a> { nonself_args, &Struct(struct_def, fields)); - // make a series of nested matches, to destructure the + // Make a series of nested matches, to destructure the // structs. This is actually right-to-left, but it shouldn't // matter. for (arg_expr, pat) in self_args.iter().zip(patterns) { @@ -1226,7 +1223,7 @@ impl<'a> MethodDef<'a> { // (Variant1, Variant1, ...) => Body1 // (Variant2, Variant2, ...) => Body2 // ... - // where each tuple has length = self_args.len() + // where each tuple has length of `self_args.len()`. let mut match_arms: Vec = variants.iter() .enumerate() .filter(|&(_, v)| !(self.unify_fieldless_variants && v.data.fields().is_empty())) @@ -1240,8 +1237,8 @@ impl<'a> MethodDef<'a> { (cx.pat(sp, PatKind::Ref(p, ast::Mutability::Immutable)), idents) }; - // A single arm has form (&VariantK, &VariantK, ...) => BodyK - // (see "Final wrinkle" note below for why.) + // A single arm has form `(&VariantK, &VariantK, ...) => BodyK` + // (see "Final wrinkle" note below for why). let mut subpats = Vec::with_capacity(self_arg_names.len()); let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); let first_self_pat_idents = { @@ -1255,24 +1252,24 @@ impl<'a> MethodDef<'a> { self_pats_idents.push(idents); } - // Here is the pat = `(&VariantK, &VariantK, ...)` + // Here is the pat = `(&VariantK, &VariantK, ...)`. let single_pat = cx.pat_tuple(sp, subpats); // For the BodyK, we need to delegate to our caller, - // passing it an EnumMatching to indicate which case + // passing it an `EnumMatching` to indicate which case // we are in. // All of the Self args have the same variant in these // cases. So we transpose the info in self_pats_idents // to gather the getter expressions together, in the - // form that EnumMatching expects. + // form that `EnumMatching` expects. // The transposition is driven by walking across the // arg fields of the variant for the first self pat. let field_tuples = first_self_pat_idents.into_iter().enumerate() - // For each arg field of self, pull out its getter expr ... + // For each arg field of `self`, pull out its getter expr... .map(|(field_index, (sp, opt_ident, self_getter_expr, attrs))| { - // ... but FieldInfo also wants getter expr + // ... but `FieldInfo` also wants getter expr // for matching other arguments of Self type; // so walk across the *other* self_pats_idents // and pull out getter for same field in each @@ -1282,8 +1279,8 @@ impl<'a> MethodDef<'a> { let (_, _opt_ident, ref other_getter_expr, _) = fields[field_index]; - // All Self args have same variant, so - // opt_idents are the same. (Assert + // All `Self` args have same variant, so + // `opt_idents` are the same. (Assert // here to make it self-evident that // it is okay to ignore `_opt_ident`.) assert!(opt_ident == _opt_ident); @@ -1299,9 +1296,9 @@ impl<'a> MethodDef<'a> { } }).collect::>>(); - // Now, for some given VariantK, we have built up + // Now, for some given `VariantK`, we have built up // expressions for referencing every field of every - // Self arg, assuming all are instances of VariantK. + // Self arg, assuming all are instances of `VariantK`. // Build up code associated with such a case. let substructure = EnumMatching(index, variants.len(), variant, field_tuples); let arm_expr = self.call_substructure_method(cx, @@ -1319,7 +1316,7 @@ impl<'a> MethodDef<'a> { Some(v) if self.unify_fieldless_variants => { // We need a default case that handles the fieldless variants. // The index and actual variant aren't meaningful in this case, - // so just use whatever + // so just use whatever. let substructure = EnumMatching(0, variants.len(), v, Vec::new()); Some(self.call_substructure_method(cx, trait_, @@ -1331,7 +1328,7 @@ impl<'a> MethodDef<'a> { _ if variants.len() > 1 && self_args.len() > 1 => { // Since we know that all the arguments will match if we reach // the match expression we add the unreachable intrinsics as the - // result of the catch all which should help llvm in optimizing it + // result of the catch all which should help LLVM in optimizing it. Some(deriving::call_intrinsic(cx, sp, "unreachable", vec![])) } _ => None, @@ -1344,9 +1341,9 @@ impl<'a> MethodDef<'a> { // tuples `(VariantK, VariantK, ...)` for each VariantK of the // enum. But: // - // * when there is only one Self arg, the arms above suffice + // * when there is only one `Self` arg, the arms above suffice // (and the deriving we call back into may not be prepared to - // handle EnumNonMatchCollapsed), and, + // handle `EnumNonMatchCollapsed`), and, // // * when the enum has only one variant, the single arm that // is already present always suffices. @@ -1356,10 +1353,10 @@ impl<'a> MethodDef<'a> { // unreachable-pattern error. // if variants.len() > 1 && self_args.len() > 1 { - // Build a series of let statements mapping each self_arg + // Build a series of let statements mapping each `self_arg` // to its discriminant value. If this is a C-style enum // with a specific repr type, then casts the values to - // that type. Otherwise casts to `i32` (the default repr + // that type. Otherwise, casts to `i32` (the default repr // type). // // i.e., for `enum E { A, B(1), C(T, T) }`, and a deriving @@ -1416,7 +1413,7 @@ impl<'a> MethodDef<'a> { nonself_args, &catch_all_substructure); - // Final wrinkle: the self_args are expressions that deref + // Final wrinkle: the `self_args` are expressions that deref // down to desired places, but we cannot actually deref // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning @@ -1529,7 +1526,7 @@ impl<'a> MethodDef<'a> { } } -// general helper methods. +// General helper methods impl<'a> TraitDef<'a> { fn summarise_struct(&self, cx: &mut ExtCtxt<'_>, struct_def: &VariantData) -> StaticFields { let mut named_idents = Vec::new(); @@ -1546,8 +1543,7 @@ impl<'a> TraitDef<'a> { match (just_spans.is_empty(), named_idents.is_empty()) { (false, false) => { cx.span_bug(self.span, - "a struct with named and unnamed \ - fields in generic `derive`") + "a struct with named and unnamed fields in generic `derive`") } // named fields (_, false) => Named(named_idents), @@ -1646,13 +1642,13 @@ impl<'a> TraitDef<'a> { -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { let sp = variant.span.with_ctxt(self.span.ctxt()); let variant_path = cx.path(sp, vec![enum_ident, variant.ident]); - let use_temporaries = false; // enums can't be repr(packed) + let use_temporaries = false; // Enums can't be `repr(packed)`. self.create_struct_pattern(cx, variant_path, &variant.data, prefix, mutbl, use_temporaries) } } -// helpful premade recipes +// Helpful premade recipes pub fn cs_fold_fields<'a, F>(use_foldl: bool, mut f: F, @@ -1773,7 +1769,7 @@ pub fn cs_fold1(use_foldl: bool, } /// Returns `true` if the type has no value fields -/// (for an enum, no variant has any fields) +/// (for an enum, no variant has any fields). pub fn is_type_without_fields(item: &Annotatable) -> bool { if let Annotatable::Item(ref item) = *item { match item.node { diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs index cb1c7b21fee0..2e9e92682816 100644 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ b/src/libsyntax_ext/deriving/generic/ty.rs @@ -1,4 +1,4 @@ -//! A mini version of ast::Ty, which is easier to use, and features an explicit `Self` type to use +//! A mini version of `ast::Ty`, which is easier to use, and features an explicit `Self` type to use //! when specifying impls to be derived. pub use PtrTy::*; @@ -11,12 +11,12 @@ use syntax::ptr::P; use syntax_pos::Span; use syntax_pos::symbol::kw; -/// The types of pointers +/// The types of pointers. #[derive(Clone)] pub enum PtrTy<'a> { - /// &'lifetime mut + /// `&'lifetime mut` Borrowed(Option<&'a str>, ast::Mutability), - /// *mut + /// `*mut` #[allow(dead_code)] Raw(ast::Mutability), } @@ -94,16 +94,16 @@ impl<'a> Path<'a> { } } -/// A type. Supports pointers, Self, and literals. +/// A type. Supports pointers, `Self`, and literals. #[derive(Clone)] pub enum Ty<'a> { Self_, - /// &/Box/ Ty + /// `&/Box/ Ty`. Ptr(Box>, PtrTy<'a>), - /// mod::mod::Type<[lifetime], [Params...]>, including a plain type - /// parameter, and things like `i32` + /// `mod::mod::Type<[lifetime], [Params...]>`, including a plain type + /// parameter, and things like `i32`. Literal(Path<'a>), - /// includes unit + /// Includes unit. Tuple(Vec>), } @@ -194,7 +194,6 @@ impl<'a> Ty<'a> { } } - fn mk_ty_param(cx: &ExtCtxt<'_>, span: Span, name: &str, @@ -223,7 +222,7 @@ fn mk_generics(params: Vec, span: Span) -> Generics { } } -/// Lifetimes and bounds on type parameters +/// Lifetimes and bounds on type parameters. #[derive(Clone)] pub struct LifetimeBounds<'a> { pub lifetimes: Vec<(&'a str, Vec<&'a str>)>, @@ -267,7 +266,7 @@ pub fn get_explicit_self(cx: &ExtCtxt<'_>, span: Span, self_ptr: &Option>) -> (P, ast::ExplicitSelf) { - // this constructs a fresh `self` path + // This constructs a fresh `self` path. let self_path = cx.expr_self(span); match *self_ptr { None => (self_path, respan(span, SelfKind::Value(ast::Mutability::Immutable))), diff --git a/src/libsyntax_ext/deriving/hash.rs b/src/libsyntax_ext/deriving/hash.rs index 2fc594abd705..a1a1c9093479 100644 --- a/src/libsyntax_ext/deriving/hash.rs +++ b/src/libsyntax_ext/deriving/hash.rs @@ -13,7 +13,6 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt<'_>, mitem: &MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { - let path = Path::new_(pathvec_std!(cx, hash::Hash), None, vec![], PathKind::Std); let typaram = "__H"; diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 9a296f17aaf4..3f10107de4e3 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -891,7 +891,7 @@ pub struct OffsetOverflowError; /// A single source in the `SourceMap`. #[derive(Clone)] pub struct SourceFile { - /// The name of the file that the source came from, source that doesn't + /// The name of the file that the source came from. Source that doesn't /// originate from files has names between angle brackets by convention /// (e.g., ``). pub name: FileName, @@ -929,9 +929,9 @@ impl Encodable for SourceFile { s.emit_struct_field("name", 0, |s| self.name.encode(s))?; s.emit_struct_field("name_was_remapped", 1, |s| self.name_was_remapped.encode(s))?; s.emit_struct_field("src_hash", 2, |s| self.src_hash.encode(s))?; - s.emit_struct_field("start_pos", 4, |s| self.start_pos.encode(s))?; - s.emit_struct_field("end_pos", 5, |s| self.end_pos.encode(s))?; - s.emit_struct_field("lines", 6, |s| { + s.emit_struct_field("start_pos", 3, |s| self.start_pos.encode(s))?; + s.emit_struct_field("end_pos", 4, |s| self.end_pos.encode(s))?; + s.emit_struct_field("lines", 5, |s| { let lines = &self.lines[..]; // Store the length. s.emit_u32(lines.len() as u32)?; @@ -977,13 +977,13 @@ impl Encodable for SourceFile { Ok(()) })?; - s.emit_struct_field("multibyte_chars", 7, |s| { + s.emit_struct_field("multibyte_chars", 6, |s| { self.multibyte_chars.encode(s) })?; - s.emit_struct_field("non_narrow_chars", 8, |s| { + s.emit_struct_field("non_narrow_chars", 7, |s| { self.non_narrow_chars.encode(s) })?; - s.emit_struct_field("name_hash", 9, |s| { + s.emit_struct_field("name_hash", 8, |s| { self.name_hash.encode(s) }) }) @@ -992,7 +992,6 @@ impl Encodable for SourceFile { impl Decodable for SourceFile { fn decode(d: &mut D) -> Result { - d.read_struct("SourceFile", 8, |d| { let name: FileName = d.read_struct_field("name", 0, |d| Decodable::decode(d))?; let name_was_remapped: bool = @@ -1000,9 +999,9 @@ impl Decodable for SourceFile { let src_hash: u128 = d.read_struct_field("src_hash", 2, |d| Decodable::decode(d))?; let start_pos: BytePos = - d.read_struct_field("start_pos", 4, |d| Decodable::decode(d))?; - let end_pos: BytePos = d.read_struct_field("end_pos", 5, |d| Decodable::decode(d))?; - let lines: Vec = d.read_struct_field("lines", 6, |d| { + d.read_struct_field("start_pos", 3, |d| Decodable::decode(d))?; + let end_pos: BytePos = d.read_struct_field("end_pos", 4, |d| Decodable::decode(d))?; + let lines: Vec = d.read_struct_field("lines", 5, |d| { let num_lines: u32 = Decodable::decode(d)?; let mut lines = Vec::with_capacity(num_lines as usize); @@ -1031,18 +1030,18 @@ impl Decodable for SourceFile { Ok(lines) })?; let multibyte_chars: Vec = - d.read_struct_field("multibyte_chars", 7, |d| Decodable::decode(d))?; + d.read_struct_field("multibyte_chars", 6, |d| Decodable::decode(d))?; let non_narrow_chars: Vec = - d.read_struct_field("non_narrow_chars", 8, |d| Decodable::decode(d))?; + d.read_struct_field("non_narrow_chars", 7, |d| Decodable::decode(d))?; let name_hash: u128 = - d.read_struct_field("name_hash", 9, |d| Decodable::decode(d))?; + d.read_struct_field("name_hash", 8, |d| Decodable::decode(d))?; Ok(SourceFile { name, name_was_remapped, unmapped_path: None, // `crate_of_origin` has to be set by the importer. - // This value matches up with rustc::hir::def_id::INVALID_CRATE. - // That constant is not available here unfortunately :( + // This value matches up with `rustc::hir::def_id::INVALID_CRATE`. + // That constant is not available here, unfortunately. crate_of_origin: std::u32::MAX - 1, start_pos, end_pos, diff --git a/src/libsyntax_pos/span_encoding.rs b/src/libsyntax_pos/span_encoding.rs index 525ec1362328..f9f22302e36e 100644 --- a/src/libsyntax_pos/span_encoding.rs +++ b/src/libsyntax_pos/span_encoding.rs @@ -68,7 +68,7 @@ const LEN_TAG: u16 = 0b1000_0000_0000_0000; const MAX_LEN: u32 = 0b0111_1111_1111_1111; const MAX_CTXT: u32 = 0b1111_1111_1111_1111; -/// Dummy span, both position and length are zero, syntax context is zero as well. +/// Dummy span: both position and length are zero; syntax context is zero (empty) as well. pub const DUMMY_SP: Span = Span { base_or_index: 0, len_or_tag: 0, ctxt_or_zero: 0 }; impl Span { diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index ab32d4461ef8..b906d0241fc1 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -102,7 +102,7 @@ symbols! { } // Symbols that can be referred to with syntax_pos::sym::*. The symbol is - // the stringified identifier unless otherwise specified (e.g. + // the stringified identifier unless otherwise specified (e.g., // `proc_dash_macro` represents "proc-macro"). // // As well as the symbols listed, there are symbols for the the strings diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index 09d5fcc89520..bd1c3f9ad55f 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -74,9 +74,9 @@ use std::time::{Duration, Instant}; mod tests; const TEST_WARN_TIMEOUT_S: u64 = 60; -const QUIET_MODE_MAX_COLUMN: usize = 100; // insert a '\n' after 100 tests in quiet mode +const QUIET_MODE_MAX_COLUMN: usize = 100; // Insert a '\n' after 100 tests in quiet mode. -// to be used by rustc to compile tests in libtest +// To be used by rustc to compile tests in libtest. pub mod test { pub use crate::{ assert_test_result, filter_tests, parse_opts, run_test, test_main, test_main_static, @@ -91,7 +91,7 @@ pub mod stats; use crate::formatters::{JsonFormatter, OutputFormatter, PrettyFormatter, TerseFormatter}; -/// Whether to execute tests concurrently or not +/// Whether to execute tests concurrently or not. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Concurrent { Yes, @@ -102,7 +102,6 @@ pub enum Concurrent { // paths; i.e., it should be a series of identifiers separated by double // colons. This way if some test runner wants to arrange the tests // hierarchically it may. - #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TestName { StaticTestName(&'static str), @@ -225,8 +224,7 @@ pub enum ShouldPanic { YesWithMessage(&'static str), } -// The definition of a single test. A test runner will run a list of -// these. +/// Definition of a single test. A test runner will run a list of these. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct TestDesc { pub name: TestName, @@ -272,8 +270,8 @@ impl Options { } } -// The default console test runner. It accepts the command line -// arguments and a vector of test_descs. +/// The default console test runner. Accepts the command-line arguments and a vector of +/// test descriptions. pub fn test_main(args: &[String], tests: Vec, options: Option) { let mut opts = match parse_opts(args) { Some(Ok(o)) => o, @@ -288,7 +286,7 @@ pub fn test_main(args: &[String], tests: Vec, options: Option, options: Option {} Ok(false) => process::exit(101), Err(e) => { - eprintln!("error: io error when listing tests: {:?}", e); + eprintln!("error: I/O error when listing tests: {:?}", e); process::exit(101); } } } } -// A variant optimized for invocation with a static test vector. -// This will panic (intentionally) when fed any dynamic tests, because -// it is copying the static values out into a dynamic vector and cannot -// copy dynamic values. It is doing this because from this point on -// a Vec is used in order to effect ownership-transfer -// semantics into parallel test runners, which in turn requires a Vec<> -// rather than a &[]. +/// A variant optimized for invocation with a static test vector. +/// This will panic (intentionally) when fed any dynamic tests, because +/// it is copying the static values out into a dynamic vector and cannot +/// copy dynamic values. It is doing this because from this point on +/// a `Vec` is used in order to effect ownership-transfer +/// semantics into parallel test runners, which in turn requires a `Vec<>` +/// rather than a `&[]`. pub fn test_main_static(tests: &[&TestDescAndFn]) { let args = env::args().collect::>(); let owned_tests = tests @@ -498,17 +496,17 @@ Test Attributes: ); } -// FIXME: Copied from libsyntax until linkage errors are resolved. Issue #47566 +// FIXME(#47566): Copied from libsyntax until linkage errors are resolved. fn is_nightly() -> bool { - // Whether this is a feature-staged build, i.e., on the beta or stable channel + // `true` if this is a feature-staged build, i.e., on the beta or stable channel. let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some(); - // Whether we should enable unstable features for bootstrapping + // `true` if we should enable unstable features for bootstrapping. let bootstrap = env::var("RUSTC_BOOTSTRAP").is_ok(); bootstrap || !disable_unstable_features } -// Parses command line arguments into test options +// Parses command-line arguments into test options. pub fn parse_opts(args: &[String]) -> Option { let mut allow_unstable = false; let opts = optgroups(); @@ -869,7 +867,7 @@ pub fn list_tests_console(opts: &TestOpts, tests: Vec) -> io::Res Ok(()) } -// A simple console test runner +/// A simple console test runner. pub fn run_tests_console(opts: &TestOpts, tests: Vec) -> io::Result { fn callback( event: &TestEvent, @@ -1313,20 +1311,20 @@ pub fn filter_tests(opts: &TestOpts, tests: Vec) -> Vec { filtered @@ -1342,14 +1340,14 @@ pub fn filter_tests(opts: &TestOpts, tests: Vec) -> Vec {} } - // Sort the tests alphabetically + // Sort the tests alphabetically. filtered.sort_by(|t1, t2| t1.desc.name.as_slice().cmp(t2.desc.name.as_slice())); filtered } pub fn convert_benchmarks_to_tests(tests: Vec) -> Vec { - // convert benchmarks to tests, if we're not benchmarking them + // Convert benchmarks to tests if we're not benchmarking them. tests .into_iter() .map(|x| { @@ -1395,7 +1393,7 @@ pub fn run_test( testfn: Box, concurrency: Concurrent, ) { - // Buffer for capturing standard I/O + // Buffer for capturing standard I/O. let data = Arc::new(Mutex::new(Vec::new())); let data2 = data.clone(); @@ -1424,9 +1422,8 @@ pub fn run_test( .unwrap(); }; - // If the platform is single-threaded we're just going to run - // the test synchronously, regardless of the concurrency - // level. + // If the platform is single-threaded, we're just going to run + // the test synchronously, regardless of the concurrency level. let supports_threads = !cfg!(target_os = "emscripten") && !cfg!(target_arch = "wasm32"); if concurrency == Concurrent::Yes && supports_threads { let cfg = thread::Builder::new().name(name.as_slice().to_owned()); @@ -1500,7 +1497,7 @@ impl MetricMap { MetricMap(BTreeMap::new()) } - /// Insert a named `value` (+/- `noise`) metric into the map. The value + /// Inserts a named `value` (+/- `noise`) metric into the map. The value /// must be non-negative. The `noise` indicates the uncertainty of the /// metric, which doubles as the "noise range" of acceptable /// pairwise-regressions on this named value, when comparing from one @@ -1582,7 +1579,7 @@ where let ns_target_total = 1_000_000; // 1ms let mut n = ns_target_total / cmp::max(1, ns_single); - // if the first run took more than 1ms we don't want to just + // If the first run took more than 1ms we don't want to just // be left doing 0 iterations on every loop. The unfortunate // side effect of not being able to do as many runs is // automatically handled by the statistical analysis below @@ -1629,7 +1626,7 @@ where // If we overflow here just return the results so far. We check a // multiplier of 10 because we're about to multiply by 2 and the // next iteration of the loop will also multiply by 5 (to calculate - // the summ5 result) + // the `summ5` result). n = match n.checked_mul(10) { Some(_) => n * 2, None => { @@ -1677,7 +1674,7 @@ pub mod bench { }; let test_result = match result { - //bs.bench(f) { + // bs.bench(f) { Ok(Some(ns_iter_summ)) => { let ns_iter = cmp::max(ns_iter_summ.median as u64, 1); let mb_s = bs.bytes * 1000 / ns_iter; @@ -1689,7 +1686,7 @@ pub mod bench { TestResult::TrBench(bs) } Ok(None) => { - // iter not called, so no data. + // Iter not called, so no data. // FIXME: error in this case? let samples: &mut [f64] = &mut [0.0_f64; 1]; let bs = BenchSamples { diff --git a/src/test/mir-opt/inline-retag.rs b/src/test/mir-opt/inline-retag.rs index 6cdbcfdb0add..72732a49da6f 100644 --- a/src/test/mir-opt/inline-retag.rs +++ b/src/test/mir-opt/inline-retag.rs @@ -1,6 +1,6 @@ // compile-flags: -Z span_free_formats -Z mir-emit-retag -// Tests that MIR inliner fixes up `Retag`'s `fn_entry` flag +// Tests that MIR inliner fixes up Retag's `fn_entry` flag. fn main() { println!("{}", bar()); diff --git a/src/test/ui/nll/polonius/storagedead-kills-loans.rs b/src/test/ui/nll/polonius/storagedead-kills-loans.rs index ff801cbf9f35..1f2523537b68 100644 --- a/src/test/ui/nll/polonius/storagedead-kills-loans.rs +++ b/src/test/ui/nll/polonius/storagedead-kills-loans.rs @@ -1,4 +1,4 @@ -// Whenever a `StorageDead` MIR statement destroys a value `x`, +// Whenever a StorageDead MIR statement destroys a value `x`, // we should kill all loans of `x`. This is extracted from `rand 0.4.6`, // is correctly accepted by NLL but was incorrectly rejected by // Polonius because of these missing `killed` facts.