From 81bac88e9344e80594207f0bfc38c57b287f6bc6 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 9 Apr 2022 18:59:21 +0200 Subject: [PATCH 1/8] Directly encode ImplPolarity in metadata. --- compiler/rustc_metadata/src/lib.rs | 1 + compiler/rustc_metadata/src/rmeta/decoder.rs | 6 ++++ compiler/rustc_metadata/src/rmeta/encoder.rs | 2 +- compiler/rustc_metadata/src/rmeta/mod.rs | 2 +- compiler/rustc_metadata/src/rmeta/table.rs | 37 ++++++++++++++++++++ 5 files changed, 46 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_metadata/src/lib.rs b/compiler/rustc_metadata/src/lib.rs index 06658aadbac27..5c7a1ccf6c276 100644 --- a/compiler/rustc_metadata/src/lib.rs +++ b/compiler/rustc_metadata/src/lib.rs @@ -5,6 +5,7 @@ #![feature(nll)] #![feature(once_cell)] #![feature(proc_macro_internals)] +#![feature(macro_metavar_expr)] #![feature(min_specialization)] #![feature(try_blocks)] #![feature(never_type)] diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 3402acccf3f92..4d06a079f526c 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -292,6 +292,12 @@ trait LazyQueryDecodable<'a, 'tcx, T> { ) -> T; } +impl<'a, 'tcx, T> LazyQueryDecodable<'a, 'tcx, T> for Option { + fn decode_query(self, _: CrateMetadataRef<'a>, _: TyCtxt<'tcx>, err: impl FnOnce() -> !) -> T { + if let Some(l) = self { l } else { err() } + } +} + impl<'a, 'tcx, T> LazyQueryDecodable<'a, 'tcx, T> for Option> where T: Decodable>, diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 6c758b8e5b633..2b989ee2952f0 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -1472,7 +1472,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } let polarity = self.tcx.impl_polarity(def_id); - record!(self.tables.impl_polarity[def_id] <- polarity); + self.tables.impl_polarity.set(def_id.index, polarity); EntryKind::Impl } diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 15e8693d71282..ffd21767f5ce0 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -310,7 +310,7 @@ define_tables! { promoted_mir: Table>)>, thir_abstract_const: Table])>, impl_parent: Table, - impl_polarity: Table, + impl_polarity: Table, impl_constness: Table, impl_defaultness: Table, // FIXME(eddyb) perhaps compute this on the fly if cheap enough? diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 265ca5a6d8d13..ca76afeffe0c5 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -76,6 +76,43 @@ impl FixedSizeEncoding for u32 { } } +macro_rules! fixed_size_enum { + ($ty:ty { $(($($pat:tt)*))* }) => { + impl FixedSizeEncoding for Option<$ty> { + fixed_size_encoding_byte_len_and_defaults!(1); + + #[inline] + fn from_bytes(b: &[u8]) -> Self { + use $ty::*; + if b[0] == 0 { + return None; + } + match b[0] - 1 { + $(${index()} => Some($($pat)*),)* + _ => panic!("Unexpected ImplPolarity code: {:?}", b[0]), + } + } + + #[inline] + fn write_to_bytes(self, b: &mut [u8]) { + use $ty::*; + b[0] = match self { + None => 0, + $(Some($($pat)*) => 1 + ${index()},)* + } + } + } + } +} + +fixed_size_enum! { + ty::ImplPolarity { + ( Positive ) + ( Negative ) + ( Reservation ) + } +} + // NOTE(eddyb) there could be an impl for `usize`, which would enable a more // generic `Lazy` impl, but in the general case we might not need / want to // fit every `usize` in `u32`. From ec7f80036d7191529ba4b1d2af29fed1c5ff5d48 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 9 Apr 2022 19:03:34 +0200 Subject: [PATCH 2/8] Directly encode Constness in metadata. --- compiler/rustc_metadata/src/rmeta/encoder.rs | 18 +++++++++--------- compiler/rustc_metadata/src/rmeta/mod.rs | 2 +- compiler/rustc_metadata/src/rmeta/table.rs | 7 +++++++ 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 2b989ee2952f0..41bd63d68a5fb 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -1048,7 +1048,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { }; record!(self.tables.kind[def_id] <- EntryKind::Variant(self.lazy(data))); - record!(self.tables.impl_constness[def_id] <- hir::Constness::Const); + self.tables.impl_constness.set(def_id.index, hir::Constness::Const); record!(self.tables.children[def_id] <- variant.fields.iter().map(|f| { assert!(f.did.is_local()); f.did.index @@ -1078,7 +1078,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { }; record!(self.tables.kind[def_id] <- EntryKind::Variant(self.lazy(data))); - record!(self.tables.impl_constness[def_id] <- hir::Constness::Const); + self.tables.impl_constness.set(def_id.index, hir::Constness::Const); self.encode_item_type(def_id); if variant.ctor_kind == CtorKind::Fn { record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id)); @@ -1157,7 +1157,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { }; record!(self.tables.repr_options[def_id] <- adt_def.repr()); - record!(self.tables.impl_constness[def_id] <- hir::Constness::Const); + self.tables.impl_constness.set(def_id.index, hir::Constness::Const); record!(self.tables.kind[def_id] <- EntryKind::Struct(self.lazy(data))); self.encode_item_type(def_id); if variant.ctor_kind == CtorKind::Fn { @@ -1208,7 +1208,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } }; record!(self.tables.asyncness[def_id] <- m_sig.header.asyncness); - record!(self.tables.impl_constness[def_id] <- hir::Constness::NotConst); + self.tables.impl_constness.set(def_id.index, hir::Constness::NotConst); record!(self.tables.kind[def_id] <- EntryKind::AssocFn(self.lazy(AssocFnData { container, has_self: trait_item.fn_has_self_parameter, @@ -1273,7 +1273,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } else { hir::Constness::NotConst }; - record!(self.tables.impl_constness[def_id] <- constness); + self.tables.impl_constness.set(def_id.index, constness); record!(self.tables.kind[def_id] <- EntryKind::AssocFn(self.lazy(AssocFnData { container, has_self: impl_item.fn_has_self_parameter, @@ -1396,7 +1396,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ItemKind::Fn(ref sig, .., body) => { record!(self.tables.asyncness[def_id] <- sig.header.asyncness); record!(self.tables.fn_arg_names[def_id] <- self.tcx.hir().body_param_names(body)); - record!(self.tables.impl_constness[def_id] <- sig.header.constness); + self.tables.impl_constness.set(def_id.index, sig.header.constness); EntryKind::Fn } hir::ItemKind::Macro(ref macro_def, _) => { @@ -1420,7 +1420,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ItemKind::Struct(ref struct_def, _) => { let adt_def = self.tcx.adt_def(def_id); record!(self.tables.repr_options[def_id] <- adt_def.repr()); - record!(self.tables.impl_constness[def_id] <- hir::Constness::Const); + self.tables.impl_constness.set(def_id.index, hir::Constness::Const); // Encode def_ids for each field and method // for methods, write all the stuff get_trait_method @@ -1451,7 +1451,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } hir::ItemKind::Impl(hir::Impl { defaultness, constness, .. }) => { record!(self.tables.impl_defaultness[def_id] <- defaultness); - record!(self.tables.impl_constness[def_id] <- constness); + self.tables.impl_constness.set(def_id.index, constness); let trait_ref = self.tcx.impl_trait_ref(def_id); if let Some(trait_ref) = trait_ref { @@ -1893,7 +1893,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } else { hir::Constness::NotConst }; - record!(self.tables.impl_constness[def_id] <- constness); + self.tables.impl_constness.set(def_id.index, constness); record!(self.tables.kind[def_id] <- EntryKind::ForeignFn); } hir::ForeignItemKind::Static(..) => { diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index ffd21767f5ce0..4dd000eaefeba 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -311,7 +311,7 @@ define_tables! { thir_abstract_const: Table])>, impl_parent: Table, impl_polarity: Table, - impl_constness: Table, + impl_constness: Table, impl_defaultness: Table, // FIXME(eddyb) perhaps compute this on the fly if cheap enough? coerce_unsized_info: Table, diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index ca76afeffe0c5..6bb07a34e0f37 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -113,6 +113,13 @@ fixed_size_enum! { } } +fixed_size_enum! { + hir::Constness { + ( NotConst ) + ( Const ) + } +} + // NOTE(eddyb) there could be an impl for `usize`, which would enable a more // generic `Lazy` impl, but in the general case we might not need / want to // fit every `usize` in `u32`. From 42820daf910d1db6ff4b41bc0e4df9e2ff61b4d0 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 9 Apr 2022 19:05:43 +0200 Subject: [PATCH 3/8] Directly encode Defaultness in metadata. --- compiler/rustc_metadata/src/rmeta/encoder.rs | 2 +- compiler/rustc_metadata/src/rmeta/mod.rs | 2 +- compiler/rustc_metadata/src/rmeta/table.rs | 8 ++++++++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 41bd63d68a5fb..c33001b926265 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -1450,7 +1450,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { })) } hir::ItemKind::Impl(hir::Impl { defaultness, constness, .. }) => { - record!(self.tables.impl_defaultness[def_id] <- defaultness); + self.tables.impl_defaultness.set(def_id.index, defaultness); self.tables.impl_constness.set(def_id.index, constness); let trait_ref = self.tcx.impl_trait_ref(def_id); diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 4dd000eaefeba..c112430ca1870 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -312,7 +312,7 @@ define_tables! { impl_parent: Table, impl_polarity: Table, impl_constness: Table, - impl_defaultness: Table, + impl_defaultness: Table, // FIXME(eddyb) perhaps compute this on the fly if cheap enough? coerce_unsized_info: Table, mir_const_qualif: Table, diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 6bb07a34e0f37..3f28a7ed4214a 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -120,6 +120,14 @@ fixed_size_enum! { } } +fixed_size_enum! { + hir::Defaultness { + ( Final ) + ( Default { has_value: false } ) + ( Default { has_value: true } ) + } +} + // NOTE(eddyb) there could be an impl for `usize`, which would enable a more // generic `Lazy` impl, but in the general case we might not need / want to // fit every `usize` in `u32`. From 2129866dc0d49f5b1197278643102a53da60536a Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 9 Apr 2022 19:08:27 +0200 Subject: [PATCH 4/8] Directly encode IsAsync in metadata. --- compiler/rustc_metadata/src/rmeta/encoder.rs | 8 ++++---- compiler/rustc_metadata/src/rmeta/mod.rs | 2 +- compiler/rustc_metadata/src/rmeta/table.rs | 7 +++++++ 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index c33001b926265..d0cdc85e1709b 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -1207,7 +1207,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { record!(self.tables.fn_arg_names[def_id] <- self.tcx.hir().body_param_names(body)) } }; - record!(self.tables.asyncness[def_id] <- m_sig.header.asyncness); + self.tables.asyncness.set(def_id.index, m_sig.header.asyncness); self.tables.impl_constness.set(def_id.index, hir::Constness::NotConst); record!(self.tables.kind[def_id] <- EntryKind::AssocFn(self.lazy(AssocFnData { container, @@ -1265,7 +1265,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } ty::AssocKind::Fn => { let hir::ImplItemKind::Fn(ref sig, body) = ast_item.kind else { bug!() }; - record!(self.tables.asyncness[def_id] <- sig.header.asyncness); + self.tables.asyncness.set(def_id.index, sig.header.asyncness); record!(self.tables.fn_arg_names[def_id] <- self.tcx.hir().body_param_names(body)); // Can be inside `impl const Trait`, so using sig.header.constness is not reliable let constness = if self.tcx.is_const_fn_raw(def_id) { @@ -1394,7 +1394,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { EntryKind::Const } hir::ItemKind::Fn(ref sig, .., body) => { - record!(self.tables.asyncness[def_id] <- sig.header.asyncness); + self.tables.asyncness.set(def_id.index, sig.header.asyncness); record!(self.tables.fn_arg_names[def_id] <- self.tcx.hir().body_param_names(body)); self.tables.impl_constness.set(def_id.index, sig.header.constness); EntryKind::Fn @@ -1886,7 +1886,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { match nitem.kind { hir::ForeignItemKind::Fn(_, ref names, _) => { - record!(self.tables.asyncness[def_id] <- hir::IsAsync::NotAsync); + self.tables.asyncness.set(def_id.index, hir::IsAsync::NotAsync); record!(self.tables.fn_arg_names[def_id] <- *names); let constness = if self.tcx.is_const_fn_raw(def_id) { hir::Constness::Const diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index c112430ca1870..f4caa126ab43f 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -317,7 +317,7 @@ define_tables! { coerce_unsized_info: Table, mir_const_qualif: Table, rendered_const: Table, - asyncness: Table, + asyncness: Table, fn_arg_names: Table, generator_kind: Table, trait_def: Table, diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 3f28a7ed4214a..3c5c551f0e09c 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -128,6 +128,13 @@ fixed_size_enum! { } } +fixed_size_enum! { + hir::IsAsync { + ( NotAsync ) + ( Async ) + } +} + // NOTE(eddyb) there could be an impl for `usize`, which would enable a more // generic `Lazy` impl, but in the general case we might not need / want to // fit every `usize` in `u32`. From 72be5b81df7d038411ea0f88f6ef351e386aca9c Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sun, 10 Apr 2022 01:04:08 +0200 Subject: [PATCH 5/8] Directly encode DefKind in metadata. --- compiler/rustc_metadata/src/rmeta/decoder.rs | 24 +++++----- compiler/rustc_metadata/src/rmeta/encoder.rs | 6 +-- compiler/rustc_metadata/src/rmeta/mod.rs | 2 +- compiler/rustc_metadata/src/rmeta/table.rs | 46 ++++++++++++++++++++ 4 files changed, 64 insertions(+), 14 deletions(-) diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 4d06a079f526c..ef1debf4344e8 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -292,6 +292,12 @@ trait LazyQueryDecodable<'a, 'tcx, T> { ) -> T; } +impl<'a, 'tcx, T> LazyQueryDecodable<'a, 'tcx, T> for T { + fn decode_query(self, _: CrateMetadataRef<'a>, _: TyCtxt<'tcx>, _: impl FnOnce() -> !) -> T { + self + } +} + impl<'a, 'tcx, T> LazyQueryDecodable<'a, 'tcx, T> for Option { fn decode_query(self, _: CrateMetadataRef<'a>, _: TyCtxt<'tcx>, err: impl FnOnce() -> !) -> T { if let Some(l) = self { l } else { err() } @@ -862,16 +868,14 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { } fn def_kind(self, item_id: DefIndex) -> DefKind { - self.root.tables.opt_def_kind.get(self, item_id).map(|k| k.decode(self)).unwrap_or_else( - || { - bug!( - "CrateMetadata::def_kind({:?}): id not found, in crate {:?} with number {}", - item_id, - self.root.name, - self.cnum, - ) - }, - ) + self.root.tables.opt_def_kind.get(self, item_id).unwrap_or_else(|| { + bug!( + "CrateMetadata::def_kind({:?}): id not found, in crate {:?} with number {}", + item_id, + self.root.name, + self.cnum, + ) + }) } fn get_span(self, index: DefIndex, sess: &Session) -> Span { diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index d0cdc85e1709b..818d51cc66a97 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -988,7 +988,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let def_id = local_id.to_def_id(); let def_kind = tcx.opt_def_kind(local_id); let Some(def_kind) = def_kind else { continue }; - record!(self.tables.opt_def_kind[def_id] <- def_kind); + self.tables.opt_def_kind.set(def_id.index, def_kind); record!(self.tables.def_span[def_id] <- tcx.def_span(def_id)); record!(self.tables.attributes[def_id] <- tcx.get_attrs(def_id)); record!(self.tables.expn_that_defined[def_id] <- self.tcx.expn_that_defined(def_id)); @@ -1644,7 +1644,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.tables.proc_macro_quoted_spans.set(i, span); } - record!(self.tables.opt_def_kind[LOCAL_CRATE.as_def_id()] <- DefKind::Mod); + self.tables.opt_def_kind.set(LOCAL_CRATE.as_def_id().index, DefKind::Mod); record!(self.tables.def_span[LOCAL_CRATE.as_def_id()] <- tcx.def_span(LOCAL_CRATE.as_def_id())); record!(self.tables.attributes[LOCAL_CRATE.as_def_id()] <- tcx.get_attrs(LOCAL_CRATE.as_def_id())); record!(self.tables.visibility[LOCAL_CRATE.as_def_id()] <- tcx.visibility(LOCAL_CRATE.as_def_id())); @@ -1685,7 +1685,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { def_key.disambiguated_data.data = DefPathData::MacroNs(name); let def_id = id.to_def_id(); - record!(self.tables.opt_def_kind[def_id] <- DefKind::Macro(macro_kind)); + self.tables.opt_def_kind.set(def_id.index, DefKind::Macro(macro_kind)); record!(self.tables.kind[def_id] <- EntryKind::ProcMacro(macro_kind)); record!(self.tables.attributes[def_id] <- attrs); record!(self.tables.def_keys[def_id] <- def_key); diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index f4caa126ab43f..192857ce57722 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -286,7 +286,7 @@ define_tables! { attributes: Table>, children: Table>, - opt_def_kind: Table>, + opt_def_kind: Table, visibility: Table>, def_span: Table>, def_ident_span: Table>, diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 3c5c551f0e09c..bbf6bde95e599 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -1,8 +1,10 @@ use crate::rmeta::*; +use rustc_hir::def::{CtorKind, CtorOf}; use rustc_index::vec::Idx; use rustc_serialize::opaque::Encoder; use rustc_serialize::Encoder as _; +use rustc_span::hygiene::MacroKind; use std::convert::TryInto; use std::marker::PhantomData; use std::num::NonZeroUsize; @@ -105,6 +107,50 @@ macro_rules! fixed_size_enum { } } +fixed_size_enum! { + DefKind { + ( Mod ) + ( Struct ) + ( Union ) + ( Enum ) + ( Variant ) + ( Trait ) + ( TyAlias ) + ( ForeignTy ) + ( TraitAlias ) + ( AssocTy ) + ( TyParam ) + ( Fn ) + ( Const ) + ( ConstParam ) + ( AssocFn ) + ( AssocConst ) + ( ExternCrate ) + ( Use ) + ( ForeignMod ) + ( AnonConst ) + ( InlineConst ) + ( OpaqueTy ) + ( Field ) + ( LifetimeParam ) + ( GlobalAsm ) + ( Impl ) + ( Closure ) + ( Generator ) + ( Static(ast::Mutability::Not) ) + ( Static(ast::Mutability::Mut) ) + ( Ctor(CtorOf::Struct, CtorKind::Fn) ) + ( Ctor(CtorOf::Struct, CtorKind::Const) ) + ( Ctor(CtorOf::Struct, CtorKind::Fictive) ) + ( Ctor(CtorOf::Variant, CtorKind::Fn) ) + ( Ctor(CtorOf::Variant, CtorKind::Const) ) + ( Ctor(CtorOf::Variant, CtorKind::Fictive) ) + ( Macro(MacroKind::Bang) ) + ( Macro(MacroKind::Attr) ) + ( Macro(MacroKind::Derive) ) + } +} + fixed_size_enum! { ty::ImplPolarity { ( Positive ) From 6142f50845171bba9975be096f0fd4ea2a056fdf Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sun, 10 Apr 2022 11:14:58 +0200 Subject: [PATCH 6/8] Directly encode DefPathHash in metadata. --- compiler/rustc_metadata/src/rmeta/decoder.rs | 6 +++--- compiler/rustc_metadata/src/rmeta/encoder.rs | 5 ++--- compiler/rustc_metadata/src/rmeta/mod.rs | 2 +- compiler/rustc_metadata/src/rmeta/table.rs | 19 +++++++++++++++++++ 4 files changed, 25 insertions(+), 7 deletions(-) diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index ef1debf4344e8..7b500f1565da2 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -1459,9 +1459,9 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { index: DefIndex, def_path_hashes: &mut FxHashMap, ) -> DefPathHash { - *def_path_hashes.entry(index).or_insert_with(|| { - self.root.tables.def_path_hashes.get(self, index).unwrap().decode(self) - }) + *def_path_hashes + .entry(index) + .or_insert_with(|| self.root.tables.def_path_hashes.get(self, index).unwrap()) } #[inline] diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 818d51cc66a97..be10ad490e11e 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -461,16 +461,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { .chain(self.tcx.resolutions(()).proc_macros.iter().map(|p| p.local_def_index)) { let def_key = self.lazy(table.def_key(def_index)); - let def_path_hash = self.lazy(table.def_path_hash(def_index)); + let def_path_hash = table.def_path_hash(def_index); self.tables.def_keys.set(def_index, def_key); self.tables.def_path_hashes.set(def_index, def_path_hash); } } else { for (def_index, def_key, def_path_hash) in table.enumerated_keys_and_path_hashes() { let def_key = self.lazy(def_key); - let def_path_hash = self.lazy(def_path_hash); self.tables.def_keys.set(def_index, def_key); - self.tables.def_path_hashes.set(def_index, def_path_hash); + self.tables.def_path_hashes.set(def_index, *def_path_hash); } } } diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 192857ce57722..37340ee326841 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -332,7 +332,7 @@ define_tables! { // `DefPathTable` up front, since we may only ever use a few // definitions from any given crate. def_keys: Table>, - def_path_hashes: Table>, + def_path_hashes: Table, proc_macro_quoted_spans: Table>, } diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index bbf6bde95e599..3e0aa0728df43 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -1,5 +1,6 @@ use crate::rmeta::*; +use rustc_data_structures::fingerprint::Fingerprint; use rustc_hir::def::{CtorKind, CtorOf}; use rustc_index::vec::Idx; use rustc_serialize::opaque::Encoder; @@ -181,6 +182,24 @@ fixed_size_enum! { } } +// We directly encode `DefPathHash` because a `Lazy` would encur a 25% cost. +impl FixedSizeEncoding for Option { + fixed_size_encoding_byte_len_and_defaults!(16); + + #[inline] + fn from_bytes(b: &[u8]) -> Self { + Some(DefPathHash(Fingerprint::from_le_bytes(b.try_into().unwrap()))) + } + + #[inline] + fn write_to_bytes(self, b: &mut [u8]) { + let Some(DefPathHash(fingerprint)) = self else { + panic!("Trying to encode absent DefPathHash.") + }; + b[..Self::BYTE_LEN].copy_from_slice(&fingerprint.to_le_bytes()); + } +} + // NOTE(eddyb) there could be an impl for `usize`, which would enable a more // generic `Lazy` impl, but in the general case we might not need / want to // fit every `usize` in `u32`. From b9287a83c5691568827f056bea0241a0bdb72f18 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sun, 10 Apr 2022 00:16:55 +0200 Subject: [PATCH 7/8] Directly encode DefId in metadata. --- compiler/rustc_metadata/src/rmeta/decoder.rs | 22 ++++++++++++-- compiler/rustc_metadata/src/rmeta/encoder.rs | 7 ++--- compiler/rustc_metadata/src/rmeta/mod.rs | 30 ++++++++++++++++++-- compiler/rustc_metadata/src/rmeta/table.rs | 28 ++++++++++++++++++ 4 files changed, 77 insertions(+), 10 deletions(-) diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 7b500f1565da2..3933a0d19a4ad 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -388,6 +388,17 @@ impl<'a, 'tcx> LazyQueryDecodable<'a, 'tcx, Option> } } +impl<'a, 'tcx> LazyQueryDecodable<'a, 'tcx, Option> for Option { + fn decode_query( + self, + cdata: CrateMetadataRef<'a>, + _: TyCtxt<'tcx>, + _: impl FnOnce() -> !, + ) -> Option { + self.map(|raw_def_id| raw_def_id.decode(cdata)) + } +} + impl<'a, 'tcx> DecodeContext<'a, 'tcx> { #[inline] fn tcx(&self) -> TyCtxt<'tcx> { @@ -406,8 +417,9 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> { self.cdata.unwrap() } + #[inline] fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum { - if cnum == LOCAL_CRATE { self.cdata().cnum } else { self.cdata().cnum_map[cnum] } + self.cdata().map_encoded_cnum_to_current(cnum) } fn read_lazy_with_meta(&mut self, meta: T::Meta) -> Lazy { @@ -718,8 +730,7 @@ impl<'a, 'tcx, T: Decodable>> Decodable>> Decodable> - for Lazy> +impl<'a, 'tcx, I: Idx, T> Decodable> for Lazy> where Option: FixedSizeEncoding, { @@ -856,6 +867,11 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { self.root.tables.kind.get(self, item_id).map(|k| k.decode(self)) } + #[inline] + pub(super) fn map_encoded_cnum_to_current(self, cnum: CrateNum) -> CrateNum { + if cnum == LOCAL_CRATE { self.cnum } else { self.cnum_map[cnum] } + } + fn kind(self, item_id: DefIndex) -> EntryKind { self.maybe_kind(item_id).unwrap_or_else(|| { bug!( diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index be10ad490e11e..e967750aebb52 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -147,8 +147,7 @@ impl<'a, 'tcx, T: Encodable>> Encodable>> Encodable> - for Lazy> +impl<'a, 'tcx, I: Idx, T> Encodable> for Lazy> where Option: FixedSizeEncoding, { @@ -1285,7 +1284,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.encode_ident_span(def_id, impl_item.ident(self.tcx)); self.encode_item_type(def_id); if let Some(trait_item_def_id) = impl_item.trait_item_def_id { - record!(self.tables.trait_item_def_id[def_id] <- trait_item_def_id); + self.tables.trait_item_def_id.set(def_id.index, trait_item_def_id.into()); } if impl_item.kind == ty::AssocKind::Fn { record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id)); @@ -1457,7 +1456,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let trait_def = self.tcx.trait_def(trait_ref.def_id); if let Some(mut an) = trait_def.ancestors(self.tcx, def_id).ok() { if let Some(specialization_graph::Node::Impl(parent)) = an.nth(1) { - record!(self.tables.impl_parent[def_id] <- parent); + self.tables.impl_parent.set(def_id.index, parent.into()); } } diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 37340ee326841..43ccfc64e0563 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -1,3 +1,4 @@ +use crate::creader::CrateMetadataRef; use decoder::Metadata; use def_path_hash_map::DefPathHashMapRef; use table::{Table, TableBuilder}; @@ -8,7 +9,7 @@ use rustc_data_structures::svh::Svh; use rustc_data_structures::sync::MetadataRef; use rustc_hir as hir; use rustc_hir::def::{CtorKind, DefKind}; -use rustc_hir::def_id::{DefId, DefIndex, DefPathHash, StableCrateId}; +use rustc_hir::def_id::{CrateNum, DefId, DefIndex, DefPathHash, StableCrateId}; use rustc_hir::definitions::DefKey; use rustc_hir::lang_items; use rustc_index::{bit_set::FiniteBitSet, vec::IndexVec}; @@ -237,6 +238,29 @@ crate struct CrateRoot<'tcx> { symbol_mangling_version: SymbolManglingVersion, } +/// On-disk representation of `DefId`. +/// This creates a type-safe way to enforce that we remap the CrateNum between the on-disk +/// representation and the compilation session. +#[derive(Copy, Clone)] +crate struct RawDefId { + krate: u32, + index: u32, +} + +impl Into for DefId { + fn into(self) -> RawDefId { + RawDefId { krate: self.krate.as_u32(), index: self.index.as_u32() } + } +} + +impl RawDefId { + fn decode(self, cdata: CrateMetadataRef<'_>) -> DefId { + let krate = CrateNum::from_u32(self.krate); + let krate = cdata.map_encoded_cnum_to_current(krate); + DefId { krate, index: DefIndex::from_u32(self.index) } + } +} + #[derive(Encodable, Decodable)] crate struct CrateDep { pub name: Symbol, @@ -309,7 +333,7 @@ define_tables! { mir_for_ctfe: Table)>, promoted_mir: Table>)>, thir_abstract_const: Table])>, - impl_parent: Table, + impl_parent: Table, impl_polarity: Table, impl_constness: Table, impl_defaultness: Table, @@ -322,7 +346,7 @@ define_tables! { generator_kind: Table, trait_def: Table, - trait_item_def_id: Table>, + trait_item_def_id: Table, inherent_impls: Table>, expn_that_defined: Table>, unused_generic_params: Table>>, diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 3e0aa0728df43..b336649d36670 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -200,6 +200,34 @@ impl FixedSizeEncoding for Option { } } +// We directly encode RawDefId because using a `Lazy` would incur a 50% overhead in the worst case. +impl FixedSizeEncoding for Option { + fixed_size_encoding_byte_len_and_defaults!(2 * u32::BYTE_LEN); + + #[inline] + fn from_bytes(b: &[u8]) -> Self { + let krate = u32::from_bytes(&b[0..4]); + let index = u32::from_bytes(&b[4..8]); + if krate == 0 { + return None; + } + Some(RawDefId { krate: krate - 1, index }) + } + + #[inline] + fn write_to_bytes(self, b: &mut [u8]) { + match self { + None => 0u32.write_to_bytes(b), + Some(RawDefId { krate, index }) => { + // CrateNum is less than `CrateNum::MAX_AS_U32`. + debug_assert!(krate < u32::MAX); + (1 + krate).write_to_bytes(&mut b[0..4]); + index.write_to_bytes(&mut b[4..8]); + } + } + } +} + // NOTE(eddyb) there could be an impl for `usize`, which would enable a more // generic `Lazy` impl, but in the general case we might not need / want to // fit every `usize` in `u32`. From b4cf2cdf870512373a656393f393bce84eb78d80 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sun, 10 Apr 2022 15:39:12 +0200 Subject: [PATCH 8/8] Simplify FixedSizeEncoding using const generics. --- compiler/rustc_metadata/src/lib.rs | 1 + compiler/rustc_metadata/src/rmeta/table.rs | 192 +++++++++------------ 2 files changed, 86 insertions(+), 107 deletions(-) diff --git a/compiler/rustc_metadata/src/lib.rs b/compiler/rustc_metadata/src/lib.rs index 5c7a1ccf6c276..aebd293f6c211 100644 --- a/compiler/rustc_metadata/src/lib.rs +++ b/compiler/rustc_metadata/src/lib.rs @@ -7,6 +7,7 @@ #![feature(proc_macro_internals)] #![feature(macro_metavar_expr)] #![feature(min_specialization)] +#![feature(slice_as_chunks)] #![feature(try_blocks)] #![feature(never_type)] #![recursion_limit = "256"] diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index b336649d36670..7a23cba536a0a 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -16,76 +16,34 @@ use tracing::debug; /// Unchecked invariant: `Self::default()` should encode as `[0; BYTE_LEN]`, /// but this has no impact on safety. pub(super) trait FixedSizeEncoding: Default { - const BYTE_LEN: usize; - - // FIXME(eddyb) convert to and from `[u8; Self::BYTE_LEN]` instead, - // once that starts being allowed by the compiler (i.e. lazy normalization). - fn from_bytes(b: &[u8]) -> Self; - fn write_to_bytes(self, b: &mut [u8]); - - // FIXME(eddyb) make these generic functions, or at least defaults here. - // (same problem as above, needs `[u8; Self::BYTE_LEN]`) - // For now, a macro (`fixed_size_encoding_byte_len_and_defaults`) is used. - - /// Read a `Self` value (encoded as `Self::BYTE_LEN` bytes), - /// from `&b[i * Self::BYTE_LEN..]`, returning `None` if `i` - /// is not in bounds, or `Some(Self::from_bytes(...))` otherwise. - fn maybe_read_from_bytes_at(b: &[u8], i: usize) -> Option; - /// Write a `Self` value (encoded as `Self::BYTE_LEN` bytes), - /// at `&mut b[i * Self::BYTE_LEN..]`, using `Self::write_to_bytes`. - fn write_to_bytes_at(self, b: &mut [u8], i: usize); -} + /// This should be `[u8; BYTE_LEN]`; + type ByteArray; -// HACK(eddyb) this shouldn't be needed (see comments on the methods above). -macro_rules! fixed_size_encoding_byte_len_and_defaults { - ($byte_len:expr) => { - const BYTE_LEN: usize = $byte_len; - fn maybe_read_from_bytes_at(b: &[u8], i: usize) -> Option { - const BYTE_LEN: usize = $byte_len; - // HACK(eddyb) ideally this would be done with fully safe code, - // but slicing `[u8]` with `i * N..` is optimized worse, due to the - // possibility of `i * N` overflowing, than indexing `[[u8; N]]`. - let b = unsafe { - std::slice::from_raw_parts(b.as_ptr() as *const [u8; BYTE_LEN], b.len() / BYTE_LEN) - }; - b.get(i).map(|b| FixedSizeEncoding::from_bytes(b)) - } - fn write_to_bytes_at(self, b: &mut [u8], i: usize) { - const BYTE_LEN: usize = $byte_len; - // HACK(eddyb) ideally this would be done with fully safe code, - // see similar comment in `read_from_bytes_at` for why it can't yet. - let b = unsafe { - std::slice::from_raw_parts_mut( - b.as_mut_ptr() as *mut [u8; BYTE_LEN], - b.len() / BYTE_LEN, - ) - }; - self.write_to_bytes(&mut b[i]); - } - }; + fn from_bytes(b: &Self::ByteArray) -> Self; + fn write_to_bytes(self, b: &mut Self::ByteArray); } impl FixedSizeEncoding for u32 { - fixed_size_encoding_byte_len_and_defaults!(4); + type ByteArray = [u8; 4]; - fn from_bytes(b: &[u8]) -> Self { - let mut bytes = [0; Self::BYTE_LEN]; - bytes.copy_from_slice(&b[..Self::BYTE_LEN]); - Self::from_le_bytes(bytes) + #[inline] + fn from_bytes(b: &[u8; 4]) -> Self { + Self::from_le_bytes(*b) } - fn write_to_bytes(self, b: &mut [u8]) { - b[..Self::BYTE_LEN].copy_from_slice(&self.to_le_bytes()); + #[inline] + fn write_to_bytes(self, b: &mut [u8; 4]) { + *b = self.to_le_bytes(); } } macro_rules! fixed_size_enum { ($ty:ty { $(($($pat:tt)*))* }) => { impl FixedSizeEncoding for Option<$ty> { - fixed_size_encoding_byte_len_and_defaults!(1); + type ByteArray = [u8;1]; #[inline] - fn from_bytes(b: &[u8]) -> Self { + fn from_bytes(b: &[u8;1]) -> Self { use $ty::*; if b[0] == 0 { return None; @@ -97,7 +55,7 @@ macro_rules! fixed_size_enum { } #[inline] - fn write_to_bytes(self, b: &mut [u8]) { + fn write_to_bytes(self, b: &mut [u8;1]) { use $ty::*; b[0] = match self { None => 0, @@ -184,30 +142,30 @@ fixed_size_enum! { // We directly encode `DefPathHash` because a `Lazy` would encur a 25% cost. impl FixedSizeEncoding for Option { - fixed_size_encoding_byte_len_and_defaults!(16); + type ByteArray = [u8; 16]; #[inline] - fn from_bytes(b: &[u8]) -> Self { - Some(DefPathHash(Fingerprint::from_le_bytes(b.try_into().unwrap()))) + fn from_bytes(b: &[u8; 16]) -> Self { + Some(DefPathHash(Fingerprint::from_le_bytes(*b))) } #[inline] - fn write_to_bytes(self, b: &mut [u8]) { + fn write_to_bytes(self, b: &mut [u8; 16]) { let Some(DefPathHash(fingerprint)) = self else { panic!("Trying to encode absent DefPathHash.") }; - b[..Self::BYTE_LEN].copy_from_slice(&fingerprint.to_le_bytes()); + *b = fingerprint.to_le_bytes(); } } // We directly encode RawDefId because using a `Lazy` would incur a 50% overhead in the worst case. impl FixedSizeEncoding for Option { - fixed_size_encoding_byte_len_and_defaults!(2 * u32::BYTE_LEN); + type ByteArray = [u8; 8]; #[inline] - fn from_bytes(b: &[u8]) -> Self { - let krate = u32::from_bytes(&b[0..4]); - let index = u32::from_bytes(&b[4..8]); + fn from_bytes(b: &[u8; 8]) -> Self { + let krate = u32::from_le_bytes(b[0..4].try_into().unwrap()); + let index = u32::from_le_bytes(b[4..8].try_into().unwrap()); if krate == 0 { return None; } @@ -215,14 +173,14 @@ impl FixedSizeEncoding for Option { } #[inline] - fn write_to_bytes(self, b: &mut [u8]) { + fn write_to_bytes(self, b: &mut [u8; 8]) { match self { - None => 0u32.write_to_bytes(b), + None => *b = [0; 8], Some(RawDefId { krate, index }) => { // CrateNum is less than `CrateNum::MAX_AS_U32`. debug_assert!(krate < u32::MAX); - (1 + krate).write_to_bytes(&mut b[0..4]); - index.write_to_bytes(&mut b[4..8]); + b[0..4].copy_from_slice(&(1 + krate).to_le_bytes()); + b[4..8].copy_from_slice(&index.to_le_bytes()); } } } @@ -232,44 +190,51 @@ impl FixedSizeEncoding for Option { // generic `Lazy` impl, but in the general case we might not need / want to // fit every `usize` in `u32`. impl FixedSizeEncoding for Option> { - fixed_size_encoding_byte_len_and_defaults!(u32::BYTE_LEN); + type ByteArray = [u8; 4]; - fn from_bytes(b: &[u8]) -> Self { - Some(Lazy::from_position(NonZeroUsize::new(u32::from_bytes(b) as usize)?)) + #[inline] + fn from_bytes(b: &[u8; 4]) -> Self { + let position = NonZeroUsize::new(u32::from_bytes(b) as usize)?; + Some(Lazy::from_position(position)) } - fn write_to_bytes(self, b: &mut [u8]) { + #[inline] + fn write_to_bytes(self, b: &mut [u8; 4]) { let position = self.map_or(0, |lazy| lazy.position.get()); let position: u32 = position.try_into().unwrap(); - position.write_to_bytes(b) } } impl FixedSizeEncoding for Option> { - fixed_size_encoding_byte_len_and_defaults!(u32::BYTE_LEN * 2); + type ByteArray = [u8; 8]; - fn from_bytes(b: &[u8]) -> Self { - Some(Lazy::from_position_and_meta( - >>::from_bytes(b)?.position, - u32::from_bytes(&b[u32::BYTE_LEN..]) as usize, - )) + #[inline] + fn from_bytes(b: &[u8; 8]) -> Self { + let ([ref position_bytes, ref meta_bytes],[])= b.as_chunks::<4>() else { panic!() }; + let position = NonZeroUsize::new(u32::from_bytes(position_bytes) as usize)?; + let len = u32::from_bytes(meta_bytes) as usize; + Some(Lazy::from_position_and_meta(position, len)) } - fn write_to_bytes(self, b: &mut [u8]) { - self.map(|lazy| Lazy::::from_position(lazy.position)).write_to_bytes(b); + #[inline] + fn write_to_bytes(self, b: &mut [u8; 8]) { + let ([ref mut position_bytes, ref mut meta_bytes],[])= b.as_chunks_mut::<4>() else { panic!() }; + + let position = self.map_or(0, |lazy| lazy.position.get()); + let position: u32 = position.try_into().unwrap(); + position.write_to_bytes(position_bytes); let len = self.map_or(0, |lazy| lazy.meta); let len: u32 = len.try_into().unwrap(); - - len.write_to_bytes(&mut b[u32::BYTE_LEN..]); + len.write_to_bytes(meta_bytes); } } /// Random-access table (i.e. offering constant-time `get`/`set`), similar to /// `Vec>`, but without requiring encoding or decoding all the values /// eagerly and in-order. -/// A total of `(max_idx + 1) * as FixedSizeEncoding>::BYTE_LEN` bytes +/// A total of `(max_idx + 1)` times `Option as FixedSizeEncoding>::ByteArray` /// are used for a table, where `max_idx` is the largest index passed to /// `TableBuilder::set`. pub(super) struct Table @@ -287,12 +252,8 @@ pub(super) struct TableBuilder where Option: FixedSizeEncoding, { - // FIXME(eddyb) use `IndexVec>::BYTE_LEN]>` instead of - // `Vec`, once that starts working (i.e. lazy normalization). - // Then again, that has the downside of not allowing `TableBuilder::encode` to - // obtain a `&[u8]` entirely in safe code, for writing the bytes out. - bytes: Vec, - _marker: PhantomData<(fn(&I), T)>, + blocks: IndexVec as FixedSizeEncoding>::ByteArray>, + _marker: PhantomData, } impl Default for TableBuilder @@ -300,7 +261,7 @@ where Option: FixedSizeEncoding, { fn default() -> Self { - TableBuilder { bytes: vec![], _marker: PhantomData } + TableBuilder { blocks: Default::default(), _marker: PhantomData } } } @@ -308,25 +269,29 @@ impl TableBuilder where Option: FixedSizeEncoding, { - pub(crate) fn set(&mut self, i: I, value: T) { + pub(crate) fn set(&mut self, i: I, value: T) + where + Option: FixedSizeEncoding, + { // FIXME(eddyb) investigate more compact encodings for sparse tables. // On the PR @michaelwoerister mentioned: // > Space requirements could perhaps be optimized by using the HAMT `popcnt` // > trick (i.e. divide things into buckets of 32 or 64 items and then // > store bit-masks of which item in each bucket is actually serialized). - let i = i.index(); - let needed = (i + 1) * >::BYTE_LEN; - if self.bytes.len() < needed { - self.bytes.resize(needed, 0); - } - - Some(value).write_to_bytes_at(&mut self.bytes, i); + self.blocks.ensure_contains_elem(i, || [0; N]); + Some(value).write_to_bytes(&mut self.blocks[i]); } - pub(crate) fn encode(&self, buf: &mut Encoder) -> Lazy> { + pub(crate) fn encode(&self, buf: &mut Encoder) -> Lazy> + where + Option: FixedSizeEncoding, + { let pos = buf.position(); - buf.emit_raw_bytes(&self.bytes).unwrap(); - Lazy::from_position_and_meta(NonZeroUsize::new(pos as usize).unwrap(), self.bytes.len()) + for block in &self.blocks { + buf.emit_raw_bytes(block).unwrap(); + } + let num_bytes = self.blocks.len() * N; + Lazy::from_position_and_meta(NonZeroUsize::new(pos as usize).unwrap(), num_bytes) } } @@ -334,6 +299,7 @@ impl LazyMeta for Table where Option: FixedSizeEncoding, { + /// Number of bytes in the data stream. type Meta = usize; } @@ -343,16 +309,28 @@ where { /// Given the metadata, extract out the value at a particular index (if any). #[inline(never)] - pub(super) fn get<'a, 'tcx, M: Metadata<'a, 'tcx>>(&self, metadata: M, i: I) -> Option { + pub(super) fn get<'a, 'tcx, M: Metadata<'a, 'tcx>, const N: usize>( + &self, + metadata: M, + i: I, + ) -> Option + where + Option: FixedSizeEncoding, + { debug!("Table::lookup: index={:?} len={:?}", i, self.meta); let start = self.position.get(); let bytes = &metadata.blob()[start..start + self.meta]; - >::maybe_read_from_bytes_at(bytes, i.index())? + let (bytes, []) = bytes.as_chunks::() else { panic!() }; + let bytes = bytes.get(i.index())?; + FixedSizeEncoding::from_bytes(bytes) } /// Size of the table in entries, including possible gaps. - pub(super) fn size(&self) -> usize { - self.meta / >::BYTE_LEN + pub(super) fn size(&self) -> usize + where + Option: FixedSizeEncoding, + { + self.meta / N } }