diff --git a/src/codegen/mod.rs b/src/codegen/mod.rs
index cf4fad5050..c5c8ebee7f 100644
--- a/src/codegen/mod.rs
+++ b/src/codegen/mod.rs
@@ -3275,8 +3275,6 @@ impl CodeGenerator for ObjCInterface {
}
}
-
-
pub fn codegen(context: &mut BindgenContext) -> Vec
> {
context.gen(|context| {
let counter = Cell::new(0);
@@ -3284,10 +3282,10 @@ pub fn codegen(context: &mut BindgenContext) -> Vec
> {
debug!("codegen: {:?}", context.options());
- let whitelisted_items: ItemSet = context.whitelisted_items().collect();
+ let whitelisted_items = context.whitelisted_items();
if context.options().emit_ir {
- for &id in whitelisted_items.iter() {
+ for &id in whitelisted_items {
let item = context.resolve_item(id);
println!("ir: {:?} = {:#?}", id, item);
}
@@ -3301,7 +3299,7 @@ pub fn codegen(context: &mut BindgenContext) -> Vec
> {
}
context.resolve_item(context.root_module())
- .codegen(context, &mut result, &whitelisted_items, &());
+ .codegen(context, &mut result, whitelisted_items, &());
result.items
})
diff --git a/src/ir/cant_derive_debug.rs b/src/ir/analysis/derive_debug.rs
similarity index 60%
rename from src/ir/cant_derive_debug.rs
rename to src/ir/analysis/derive_debug.rs
index f6a7fab7a5..6e72da1fb8 100644
--- a/src/ir/cant_derive_debug.rs
+++ b/src/ir/analysis/derive_debug.rs
@@ -1,9 +1,9 @@
//! Determining which types for which we can emit `#[derive(Debug)]`.
-use super::analysis::MonotoneFramework;
-use ir::context::{BindgenContext, ItemId};
-use ir::item::ItemSet;
+
+use super::{ConstrainResult, MonotoneFramework};
use std::collections::HashSet;
use std::collections::HashMap;
+use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::TypeKind;
@@ -16,7 +16,7 @@ use ir::comp::CompKind;
/// An analysis that finds for each IR item whether debug cannot be derived.
///
-/// We use the monotone constraint function `cant_derive_debug`, defined as
+/// We use the monotone constraint function `cannot_derive_debug`, defined as
/// follows:
///
/// * If T is Opaque and layout of the type is known, get this layout as opaque
@@ -34,17 +34,17 @@ use ir::comp::CompKind;
/// derived debug if any of the template arguments or template definition
/// cannot derive debug.
#[derive(Debug, Clone)]
-pub struct CantDeriveDebugAnalysis<'ctx, 'gen>
+pub struct CannotDeriveDebug<'ctx, 'gen>
where 'gen: 'ctx
{
ctx: &'ctx BindgenContext<'gen>,
// The incremental result of this analysis's computation. Everything in this
// set cannot derive debug.
- cant_derive_debug: HashSet,
+ cannot_derive_debug: HashSet,
// Dependencies saying that if a key ItemId has been inserted into the
- // `cant_derive_debug` set, then each of the ids in Vec need to be
+ // `cannot_derive_debug` set, then each of the ids in Vec need to be
// considered again.
//
// This is a subset of the natural IR graph with reversed edges, where we
@@ -53,7 +53,7 @@ pub struct CantDeriveDebugAnalysis<'ctx, 'gen>
dependencies: HashMap>,
}
-impl<'ctx, 'gen> CantDeriveDebugAnalysis<'ctx, 'gen> {
+impl<'ctx, 'gen> CannotDeriveDebug<'ctx, 'gen> {
fn consider_edge(kind: EdgeKind) -> bool {
match kind {
// These are the only edges that can affect whether a type can derive
@@ -77,79 +77,69 @@ impl<'ctx, 'gen> CantDeriveDebugAnalysis<'ctx, 'gen> {
}
}
- fn insert(&mut self, id: ItemId) -> bool {
- let was_already_in = self.cant_derive_debug.insert(id);
+ fn insert(&mut self, id: ItemId) -> ConstrainResult {
+ let was_not_already_in_set = self.cannot_derive_debug.insert(id);
assert!(
- was_already_in,
- format!("We shouldn't try and insert twice because if it was already in the set, \
- `constrain` would have exited early.: {:?}", id)
+ was_not_already_in_set,
+ "We shouldn't try and insert {:?} twice because if it was \
+ already in the set, `constrain` should have exited early.",
+ id
);
- true
+ ConstrainResult::Changed
}
}
-impl<'ctx, 'gen> MonotoneFramework for CantDeriveDebugAnalysis<'ctx, 'gen> {
+impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
type Node = ItemId;
type Extra = &'ctx BindgenContext<'gen>;
type Output = HashSet;
- fn new(ctx: &'ctx BindgenContext<'gen>) -> CantDeriveDebugAnalysis<'ctx, 'gen> {
- let cant_derive_debug = HashSet::new();
+ fn new(ctx: &'ctx BindgenContext<'gen>) -> CannotDeriveDebug<'ctx, 'gen> {
+ let cannot_derive_debug = HashSet::new();
let mut dependencies = HashMap::new();
- let whitelisted_items: HashSet<_> = ctx.whitelisted_items().collect();
-
- let whitelisted_and_blacklisted_items: ItemSet = whitelisted_items.iter()
- .cloned()
- .flat_map(|i| {
- let mut reachable = vec![i];
- i.trace(ctx, &mut |s, _| {
- reachable.push(s);
- }, &());
- reachable
- })
- .collect();
- for item in whitelisted_and_blacklisted_items {
+ for &item in ctx.whitelisted_items() {
dependencies.entry(item).or_insert(vec![]);
{
// We reverse our natural IR graph edges to find dependencies
// between nodes.
item.trace(ctx, &mut |sub_item: ItemId, edge_kind| {
- if Self::consider_edge(edge_kind) {
- dependencies.entry(sub_item)
- .or_insert(vec![])
- .push(item);
+ if ctx.whitelisted_items().contains(&sub_item) &&
+ Self::consider_edge(edge_kind) {
+ dependencies.entry(sub_item)
+ .or_insert(vec![])
+ .push(item);
}
}, &());
}
}
- CantDeriveDebugAnalysis {
- ctx: ctx,
- cant_derive_debug: cant_derive_debug,
- dependencies: dependencies,
+ CannotDeriveDebug {
+ ctx,
+ cannot_derive_debug,
+ dependencies,
}
}
fn initial_worklist(&self) -> Vec {
- self.ctx.whitelisted_items().collect()
+ self.ctx.whitelisted_items().iter().cloned().collect()
}
- fn constrain(&mut self, id: ItemId) -> bool {
- if self.cant_derive_debug.contains(&id) {
- return false;
+ fn constrain(&mut self, id: ItemId) -> ConstrainResult {
+ if self.cannot_derive_debug.contains(&id) {
+ return ConstrainResult::Same;
}
let item = self.ctx.resolve_item(id);
let ty = match item.as_type() {
- None => return false,
+ None => return ConstrainResult::Same,
Some(ty) => ty
};
match *ty.kind() {
- // handle the simple case
- // These can derive debug without further information
+ // Handle the simple cases. These can derive debug without further
+ // information.
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(..) |
@@ -164,97 +154,113 @@ impl<'ctx, 'gen> MonotoneFramework for CantDeriveDebugAnalysis<'ctx, 'gen> {
TypeKind::ObjCInterface(..) |
TypeKind::ObjCId |
TypeKind::ObjCSel => {
- return false;
+ ConstrainResult::Same
},
+
TypeKind::Opaque => {
if ty.layout(self.ctx)
.map_or(true, |l| l.opaque().can_trivially_derive_debug(self.ctx, ())) {
- return false;
+ ConstrainResult::Same
} else {
- return self.insert(id);
+ self.insert(id)
}
},
+
TypeKind::Array(t, len) => {
+ if self.cannot_derive_debug.contains(&t) {
+ return self.insert(id);
+ }
+
if len <= RUST_DERIVE_IN_ARRAY_LIMIT {
- if self.cant_derive_debug.contains(&t) {
- return self.insert(id);
- }
- return false;
+ ConstrainResult::Same
} else {
- return self.insert(id);
+ self.insert(id)
}
},
+
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
- if self.cant_derive_debug.contains(&t) {
- return self.insert(id);
+ if self.cannot_derive_debug.contains(&t) {
+ self.insert(id)
+ } else {
+ ConstrainResult::Same
}
- return false;
},
+
TypeKind::Comp(ref info) => {
if info.has_non_type_template_params() {
- if ty.layout(self.ctx).map_or(true,
- |l| l.opaque().can_trivially_derive_debug(self.ctx, ())) {
- return false;
+ if ty.layout(self.ctx)
+ .map_or(true,
+ |l| l.opaque().can_trivially_derive_debug(self.ctx, ())) {
+ return ConstrainResult::Same;
} else {
return self.insert(id);
}
}
+
if info.kind() == CompKind::Union {
if self.ctx.options().unstable_rust {
return self.insert(id);
}
- if ty.layout(self.ctx).map_or(true,
- |l| l.opaque().can_trivially_derive_debug(self.ctx, ())) {
- return false;
+ if ty.layout(self.ctx)
+ .map_or(true,
+ |l| l.opaque().can_trivially_derive_debug(self.ctx, ())) {
+ return ConstrainResult::Same;
} else {
return self.insert(id);
}
}
- let bases_cant_derive = info.base_members()
+
+ let bases_cannot_derive = info.base_members()
.iter()
- .any(|base| self.cant_derive_debug.contains(&base.ty));
- if bases_cant_derive {
+ .any(|base| self.cannot_derive_debug.contains(&base.ty));
+ if bases_cannot_derive {
return self.insert(id);
}
- let fields_cant_derive = info.fields()
+
+ let fields_cannot_derive = info.fields()
.iter()
.any(|f| {
- match f {
- &Field::DataMember(ref data) => self.cant_derive_debug.contains(&data.ty()),
- &Field::Bitfields(ref bfu) => bfu.bitfields()
- .iter().any(|b| {
- self.cant_derive_debug.contains(&b.ty())
- })
+ match *f {
+ Field::DataMember(ref data) => {
+ self.cannot_derive_debug.contains(&data.ty())
+ }
+ Field::Bitfields(ref bfu) => {
+ bfu.bitfields()
+ .iter().any(|b| {
+ self.cannot_derive_debug.contains(&b.ty())
+ })
+ }
}
});
- if fields_cant_derive {
+ if fields_cannot_derive {
return self.insert(id);
}
- false
+
+ ConstrainResult::Same
},
+
TypeKind::Pointer(inner) => {
- let inner_type = self.ctx.resolve_type(inner);
- if let TypeKind::Function(ref sig) =
- *inner_type.canonical_type(self.ctx).kind() {
- if sig.can_trivially_derive_debug(&self.ctx, ()) {
- return false;
- } else {
- return self.insert(id);
- }
+ let inner_type = self.ctx.resolve_type(inner).canonical_type(self.ctx);
+ if let TypeKind::Function(ref sig) = *inner_type.kind() {
+ if !sig.can_trivially_derive_debug(&self.ctx, ()) {
+ return self.insert(id);
}
- false
+ }
+ ConstrainResult::Same
},
+
TypeKind::TemplateInstantiation(ref template) => {
- let args_cant_derive = template.template_arguments()
+ let args_cannot_derive = template.template_arguments()
.iter()
- .any(|arg| self.cant_derive_debug.contains(&arg));
- if args_cant_derive {
+ .any(|arg| self.cannot_derive_debug.contains(&arg));
+ if args_cannot_derive {
return self.insert(id);
}
- let ty_cant_derive = template.template_definition()
+
+ let ty_cannot_derive = template.template_definition()
.into_resolver()
.through_type_refs()
.through_type_aliases()
@@ -268,7 +274,11 @@ impl<'ctx, 'gen> MonotoneFramework for CantDeriveDebugAnalysis<'ctx, 'gen> {
// idea of the layout than the definition does.
if c.has_non_type_template_params() {
let opaque = ty.layout(self.ctx)
- .or_else(|| self.ctx.resolve_type(template.template_definition()).layout(self.ctx))
+ .or_else(|| {
+ self.ctx
+ .resolve_type(template.template_definition())
+ .layout(self.ctx)
+ })
.unwrap_or(Layout::zero())
.opaque();
Some(!opaque.can_trivially_derive_debug(&self.ctx, ()))
@@ -276,11 +286,14 @@ impl<'ctx, 'gen> MonotoneFramework for CantDeriveDebugAnalysis<'ctx, 'gen> {
None
}
})
- .unwrap_or_else(|| self.cant_derive_debug.contains(&template.template_definition()));
- if ty_cant_derive {
+ .unwrap_or_else(|| {
+ self.cannot_derive_debug.contains(&template.template_definition())
+ });
+ if ty_cannot_derive {
return self.insert(id);
}
- false
+
+ ConstrainResult::Same
},
}
}
@@ -297,8 +310,8 @@ impl<'ctx, 'gen> MonotoneFramework for CantDeriveDebugAnalysis<'ctx, 'gen> {
}
}
-impl<'ctx, 'gen> From> for HashSet {
- fn from(analysis: CantDeriveDebugAnalysis<'ctx, 'gen>) -> Self {
- analysis.cant_derive_debug
+impl<'ctx, 'gen> From> for HashSet {
+ fn from(analysis: CannotDeriveDebug<'ctx, 'gen>) -> Self {
+ analysis.cannot_derive_debug
}
}
diff --git a/src/ir/analysis.rs b/src/ir/analysis/mod.rs
similarity index 73%
rename from src/ir/analysis.rs
rename to src/ir/analysis/mod.rs
index 28425a2c53..5fe6785bcc 100644
--- a/src/ir/analysis.rs
+++ b/src/ir/analysis/mod.rs
@@ -1,4 +1,48 @@
-//! Fix-point analyses on the IR using the monotone framework.
+//! Fix-point analyses on the IR using the "monotone framework".
+//!
+//! A lattice is a set with a partial ordering between elements, where there is
+//! a single least upper bound and a single greatest least bound for every
+//! subset. We are dealing with finite lattices, which means that it has a
+//! finite number of elements, and it follows that there exists a single top and
+//! a single bottom member of the lattice. For example, the power set of a
+//! finite set forms a finite lattice where partial ordering is defined by set
+//! inclusion, that is `a <= b` if `a` is a subset of `b`. Here is the finite
+//! lattice constructed from the set {0,1,2}:
+//!
+//! ```text
+//! .----- Top = {0,1,2} -----.
+//! / | \
+//! / | \
+//! / | \
+//! {0,1} -------. {0,2} .--------- {1,2}
+//! | \ / \ / |
+//! | / \ |
+//! | / \ / \ |
+//! {0} --------' {1} `---------- {2}
+//! \ | /
+//! \ | /
+//! \ | /
+//! `------ Bottom = {} ------'
+//! ```
+//!
+//! A monotone function `f` is a function where if `x <= y`, then it holds that
+//! `f(x) <= f(y)`. It should be clear that running a monotone function to a
+//! fix-point on a finite lattice will always terminate: `f` can only "move"
+//! along the lattice in a single direction, and therefore can only either find
+//! a fix-point in the middle of the lattice or continue to the top or bottom
+//! depending if it is ascending or descending the lattice respectively.
+//!
+//! For a deeper introduction to the general form of this kind of analysis, see
+//! [Static Program Analysis by Anders Møller and Michael I. Schwartzbach][spa].
+//!
+//! [spa]: https://cs.au.dk/~amoeller/spa/spa.pdf
+
+// Re-export individual analyses.
+mod template_params;
+pub use self::template_params::UsedTemplateParameters;
+mod derive_debug;
+pub use self::derive_debug::CannotDeriveDebug;
+
use std::fmt;
/// An analysis in the monotone framework.
@@ -45,10 +89,10 @@ pub trait MonotoneFramework: Sized + fmt::Debug {
///
/// If this results in changing our internal state (ie, we discovered that
/// we have not reached a fix-point and iteration should continue), return
- /// `true`. Otherwise, return `false`. When `constrain` returns false for
- /// all nodes in the set, we have reached a fix-point and the analysis is
- /// complete.
- fn constrain(&mut self, node: Self::Node) -> bool;
+ /// `ConstrainResult::Changed`. Otherwise, return `ConstrainResult::Same`.
+ /// When `constrain` returns `ConstrainResult::Same` for all nodes in the
+ /// set, we have reached a fix-point and the analysis is complete.
+ fn constrain(&mut self, node: Self::Node) -> ConstrainResult;
/// For each node `d` that depends on the given `node`'s current answer when
/// running `constrain(d)`, call `f(d)`. This informs us which new nodes to
@@ -58,6 +102,17 @@ pub trait MonotoneFramework: Sized + fmt::Debug {
where F: FnMut(Self::Node);
}
+/// Whether an analysis's `constrain` function modified the incremental results
+/// or not.
+pub enum ConstrainResult {
+ /// The incremental results were updated, and the fix-point computation
+ /// should continue.
+ Changed,
+
+ /// The incremental results were not updated.
+ Same,
+}
+
/// Run an analysis in the monotone framework.
pub fn analyze(extra: Analysis::Extra) -> Analysis::Output
where Analysis: MonotoneFramework,
@@ -66,7 +121,7 @@ pub fn analyze(extra: Analysis::Extra) -> Analysis::Output
let mut worklist = analysis.initial_worklist();
while let Some(node) = worklist.pop() {
- if analysis.constrain(node) {
+ if let ConstrainResult::Changed = analysis.constrain(node) {
analysis.each_depending_on(node, |needs_work| {
worklist.push(needs_work);
});
@@ -183,7 +238,7 @@ mod tests {
self.graph.0.keys().cloned().collect()
}
- fn constrain(&mut self, node: Node) -> bool {
+ fn constrain(&mut self, node: Node) -> ConstrainResult {
// The set of nodes reachable from a node `x` is
//
// reachable(x) = s_0 U s_1 U ... U reachable(s_0) U reachable(s_1) U ...
@@ -210,7 +265,11 @@ mod tests {
}
let new_size = self.reachable[&node].len();
- original_size != new_size
+ if original_size != new_size {
+ ConstrainResult::Changed
+ } else {
+ ConstrainResult::Same
+ }
}
fn each_depending_on(&self, node: Node, mut f: F)
diff --git a/src/ir/named.rs b/src/ir/analysis/template_params.rs
similarity index 86%
rename from src/ir/named.rs
rename to src/ir/analysis/template_params.rs
index 5351f30951..44878d9e16 100644
--- a/src/ir/named.rs
+++ b/src/ir/analysis/template_params.rs
@@ -75,63 +75,25 @@
//! union of its sub-item's used template parameters) and iterate to a
//! fixed-point.
//!
-//! We use the "monotone framework" for this fix-point analysis where our
-//! lattice is the mapping from each IR item to the powerset of the template
-//! parameters that appear in the input C++ header, our join function is set
-//! union, and we use the `ir::traversal::Trace` trait to implement the
-//! work-list optimization so we don't have to revisit every node in the graph
-//! when for every iteration towards the fix-point.
+//! We use the `ir::analysis::MonotoneFramework` infrastructure for this
+//! fix-point analysis, where our lattice is the mapping from each IR item to
+//! the powerset of the template parameters that appear in the input C++ header,
+//! our join function is set union. The set of template parameters appearing in
+//! the program is finite, as is the number of IR items. We start at our
+//! lattice's bottom element: every item mapping to an empty set of template
+//! parameters. Our analysis only adds members to each item's set of used
+//! template parameters, never removes them, so it is monotone. Because our
+//! lattice is finite and our constraint function is monotone, iteration to a
+//! fix-point will terminate.
//!
-//! A lattice is a set with a partial ordering between elements, where there is
-//! a single least upper bound and a single greatest least bound for every
-//! subset. We are dealing with finite lattices, which means that it has a
-//! finite number of elements, and it follows that there exists a single top and
-//! a single bottom member of the lattice. For example, the power set of a
-//! finite set forms a finite lattice where partial ordering is defined by set
-//! inclusion, that is `a <= b` if `a` is a subset of `b`. Here is the finite
-//! lattice constructed from the set {0,1,2}:
-//!
-//! ```text
-//! .----- Top = {0,1,2} -----.
-//! / | \
-//! / | \
-//! / | \
-//! {0,1} -------. {0,2} .--------- {1,2}
-//! | \ / \ / |
-//! | / \ |
-//! | / \ / \ |
-//! {0} --------' {1} `---------- {2}
-//! \ | /
-//! \ | /
-//! \ | /
-//! `------ Bottom = {} ------'
-//! ```
-//!
-//! A monotone function `f` is a function where if `x <= y`, then it holds that
-//! `f(x) <= f(y)`. It should be clear that running a monotone function to a
-//! fix-point on a finite lattice will always terminate: `f` can only "move"
-//! along the lattice in a single direction, and therefore can only either find
-//! a fix-point in the middle of the lattice or continue to the top or bottom
-//! depending if it is ascending or descending the lattice respectively.
-//!
-//! For our analysis, we are collecting the set of template parameters used by
-//! any given IR node. The set of template parameters appearing in the program
-//! is finite. Our lattice is their powerset. We start at the bottom element,
-//! the empty set. Our analysis only adds members to the set of used template
-//! parameters, never removes them, so it is monotone, and therefore iteration
-//! to a fix-point will terminate.
-//!
-//! For a deeper introduction to the general form of this kind of analysis, see
-//! [Static Program Analysis by Anders Møller and Michael I. Schwartzbach][spa].
-//!
-//! [spa]: https://cs.au.dk/~amoeller/spa/spa.pdf
-
-use super::analysis::MonotoneFramework;
-use super::context::{BindgenContext, ItemId};
-use super::item::{Item, ItemSet};
-use super::template::{TemplateInstantiation, TemplateParameters};
-use super::traversal::{EdgeKind, Trace};
-use super::ty::TypeKind;
+//! See `src/ir/analysis.rs` for more.
+
+use super::{ConstrainResult, MonotoneFramework};
+use ir::context::{BindgenContext, ItemId};
+use ir::item::{Item, ItemSet};
+use ir::template::{TemplateInstantiation, TemplateParameters};
+use ir::traversal::{EdgeKind, Trace};
+use ir::ty::TypeKind;
use std::collections::{HashMap, HashSet};
/// An analysis that finds for each IR item its set of template parameters that
@@ -388,7 +350,10 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
-> UsedTemplateParameters<'ctx, 'gen> {
let mut used = HashMap::new();
let mut dependencies = HashMap::new();
- let whitelisted_items: HashSet<_> = ctx.whitelisted_items().collect();
+ let whitelisted_items: HashSet<_> = ctx.whitelisted_items()
+ .iter()
+ .cloned()
+ .collect();
let whitelisted_and_blacklisted_items: ItemSet = whitelisted_items.iter()
.cloned()
@@ -491,6 +456,8 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
// blacklisted items.
self.ctx
.whitelisted_items()
+ .iter()
+ .cloned()
.flat_map(|i| {
let mut reachable = vec![i];
i.trace(self.ctx, &mut |s, _| {
@@ -501,7 +468,7 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
.collect()
}
- fn constrain(&mut self, id: ItemId) -> bool {
+ fn constrain(&mut self, id: ItemId) -> ConstrainResult {
// Invariant: all hash map entries' values are `Some` upon entering and
// exiting this method.
extra_assert!(self.used.values().all(|v| v.is_some()));
@@ -553,7 +520,11 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
self.used.insert(id, Some(used_by_this_id));
extra_assert!(self.used.values().all(|v| v.is_some()));
- new_len != original_len
+ if new_len != original_len {
+ ConstrainResult::Changed
+ } else {
+ ConstrainResult::Same
+ }
}
fn each_depending_on(&self, item: ItemId, mut f: F)
diff --git a/src/ir/context.rs b/src/ir/context.rs
index a0c76186a4..ec79577f3f 100644
--- a/src/ir/context.rs
+++ b/src/ir/context.rs
@@ -5,12 +5,10 @@ use super::int::IntKind;
use super::item::{IsOpaque, Item, ItemAncestors, ItemCanonicalPath, ItemSet};
use super::item_kind::ItemKind;
use super::module::{Module, ModuleKind};
-use super::named::UsedTemplateParameters;
-use super::analysis::analyze;
+use super::analysis::{analyze, UsedTemplateParameters, CannotDeriveDebug};
use super::template::{TemplateInstantiation, TemplateParameters};
use super::traversal::{self, Edge, ItemTraversal};
use super::ty::{FloatKind, Type, TypeKind};
-use super::cant_derive_debug::CantDeriveDebugAnalysis;
use BindgenOptions;
use cexpr;
use callbacks::ParseCallbacks;
@@ -159,6 +157,11 @@ pub struct BindgenContext<'ctx> {
/// Whether a bindgen complex was generated
generated_bindegen_complex: Cell,
+ /// The set of `ItemId`s that are whitelisted for code generation. This the
+ /// very first thing computed after parsing our IR, and before running any
+ /// of our analyses.
+ whitelisted: Option,
+
/// Map from an item's id to the set of template parameter items that it
/// uses. See `ir::named` for more details. Always `Some` during the codegen
/// phase.
@@ -171,9 +174,10 @@ pub struct BindgenContext<'ctx> {
/// Whether we need the mangling hack which removes the prefixing underscore.
needs_mangling_hack: bool,
- /// Set of ItemId that can't derive debug.
- /// Populated when we enter codegen by `compute_can_derive_debug`; always `None`
- /// before that and `Some` after.
+ /// The set of (`ItemId`s of) types that can't derive debug.
+ ///
+ /// This is populated when we enter codegen by `compute_can_derive_debug`
+ /// and is always `None` before that and `Some` after.
cant_derive_debug: Option>,
}
@@ -298,6 +302,7 @@ impl<'ctx> BindgenContext<'ctx> {
translation_unit: translation_unit,
options: options,
generated_bindegen_complex: Cell::new(false),
+ whitelisted: None,
used_template_parameters: None,
need_bitfield_allocation: Default::default(),
needs_mangling_hack: needs_mangling_hack,
@@ -758,6 +763,15 @@ impl<'ctx> BindgenContext<'ctx> {
self.process_replacements();
}
+ // And assert once again, because resolving type refs and processing
+ // replacements both mutate the IR graph.
+ self.assert_no_dangling_references();
+
+ // Compute the whitelisted set after processing replacements and
+ // resolving type refs, as those are the final mutations of the IR
+ // graph, and their completion means that the IR graph is now frozen.
+ self.compute_whitelisted_items();
+
// Make sure to do this after processing replacements, since that messes
// with the parentage and module children, and we want to assert that it
// messes with them correctly.
@@ -838,7 +852,7 @@ impl<'ctx> BindgenContext<'ctx> {
// If you aren't recursively whitelisting, then we can't really make
// any sense of template parameter usage, and you're on your own.
let mut used_params = HashMap::new();
- for id in self.whitelisted_items() {
+ for &id in self.whitelisted_items() {
used_params.entry(id)
.or_insert(id.self_template_params(self)
.map_or(Default::default(),
@@ -1568,79 +1582,91 @@ impl<'ctx> BindgenContext<'ctx> {
///
/// If no items are explicitly whitelisted, then all items are considered
/// whitelisted.
- pub fn whitelisted_items<'me>(&'me self) -> WhitelistedItems<'me, 'ctx> {
+ pub fn whitelisted_items(&self) -> &ItemSet {
assert!(self.in_codegen_phase());
assert!(self.current_module == self.root_module);
- let roots = self.items()
- // Only consider items that are enabled for codegen.
- .filter(|&(_, item)| item.is_enabled_for_codegen(self))
- .filter(|&(_, item)| {
- // If nothing is explicitly whitelisted, then everything is fair
- // game.
- if self.options().whitelisted_types.is_empty() &&
- self.options().whitelisted_functions.is_empty() &&
- self.options().whitelisted_vars.is_empty() {
- return true;
- }
+ self.whitelisted.as_ref().unwrap()
+ }
- // If this is a type that explicitly replaces another, we assume
- // you know what you're doing.
- if item.annotations().use_instead_of().is_some() {
- return true;
- }
+ /// Compute the whitelisted items set and populate `self.whitelisted`.
+ fn compute_whitelisted_items(&mut self) {
+ assert!(self.in_codegen_phase());
+ assert!(self.current_module == self.root_module);
+ assert!(self.whitelisted.is_none());
+
+ self.whitelisted = Some({
+ let roots = self.items()
+ // Only consider items that are enabled for codegen.
+ .filter(|&(_, item)| item.is_enabled_for_codegen(self))
+ .filter(|&(_, item)| {
+ // If nothing is explicitly whitelisted, then everything is fair
+ // game.
+ if self.options().whitelisted_types.is_empty() &&
+ self.options().whitelisted_functions.is_empty() &&
+ self.options().whitelisted_vars.is_empty() {
+ return true;
+ }
- let name = item.canonical_path(self)[1..].join("::");
- debug!("whitelisted_items: testing {:?}", name);
- match *item.kind() {
- ItemKind::Module(..) => true,
- ItemKind::Function(_) => {
- self.options().whitelisted_functions.matches(&name)
+ // If this is a type that explicitly replaces another, we assume
+ // you know what you're doing.
+ if item.annotations().use_instead_of().is_some() {
+ return true;
}
- ItemKind::Var(_) => {
- self.options().whitelisted_vars.matches(&name)
- }
- ItemKind::Type(ref ty) => {
- if self.options().whitelisted_types.matches(&name) {
- return true;
+
+ let name = item.canonical_path(self)[1..].join("::");
+ debug!("whitelisted_items: testing {:?}", name);
+ match *item.kind() {
+ ItemKind::Module(..) => true,
+ ItemKind::Function(_) => {
+ self.options().whitelisted_functions.matches(&name)
}
+ ItemKind::Var(_) => {
+ self.options().whitelisted_vars.matches(&name)
+ }
+ ItemKind::Type(ref ty) => {
+ if self.options().whitelisted_types.matches(&name) {
+ return true;
+ }
- let parent = self.resolve_item(item.parent_id());
- if parent.is_module() {
- let mut prefix_path = parent.canonical_path(self);
-
- // Unnamed top-level enums are special and we
- // whitelist them via the `whitelisted_vars` filter,
- // since they're effectively top-level constants,
- // and there's no way for them to be referenced
- // consistently.
- if let TypeKind::Enum(ref enum_) = *ty.kind() {
- if ty.name().is_none() &&
- enum_.variants().iter().any(|variant| {
- prefix_path.push(variant.name().into());
- let name = prefix_path[1..].join("::");
- prefix_path.pop().unwrap();
- self.options()
- .whitelisted_vars
- .matches(&name)
- }) {
- return true;
+ let parent = self.resolve_item(item.parent_id());
+ if parent.is_module() {
+ let mut prefix_path = parent.canonical_path(self);
+
+ // Unnamed top-level enums are special and we
+ // whitelist them via the `whitelisted_vars` filter,
+ // since they're effectively top-level constants,
+ // and there's no way for them to be referenced
+ // consistently.
+ if let TypeKind::Enum(ref enum_) = *ty.kind() {
+ if ty.name().is_none() &&
+ enum_.variants().iter().any(|variant| {
+ prefix_path.push(variant.name().into());
+ let name = prefix_path[1..].join("::");
+ prefix_path.pop().unwrap();
+ self.options()
+ .whitelisted_vars
+ .matches(&name)
+ }) {
+ return true;
+ }
}
}
- }
- false
+ false
+ }
}
- }
- })
- .map(|(&id, _)| id);
+ })
+ .map(|(&id, _)| id);
+
+ // The reversal preserves the expected ordering of traversal, resulting
+ // in more stable-ish bindgen-generated names for anonymous types (like
+ // unions).
+ let mut roots: Vec<_> = roots.collect();
+ roots.reverse();
- // The reversal preserves the expected ordering of traversal, resulting
- // in more stable-ish bindgen-generated names for anonymous types (like
- // unions).
- let mut roots: Vec<_> = roots.collect();
- roots.reverse();
- WhitelistedItems::new(self, roots)
+ WhitelistedItems::new(self, roots).collect()
+ });
}
/// Convenient method for getting the prefix to use for most traits in
@@ -1666,7 +1692,7 @@ impl<'ctx> BindgenContext<'ctx> {
/// Compute whether we can derive debug.
fn compute_cant_derive_debug(&mut self) {
assert!(self.cant_derive_debug.is_none());
- self.cant_derive_debug = Some(analyze::(self));
+ self.cant_derive_debug = Some(analyze::(self));
}
/// Look up whether the item with `id` can
diff --git a/src/ir/mod.rs b/src/ir/mod.rs
index f8b9edfa63..93894f94db 100644
--- a/src/ir/mod.rs
+++ b/src/ir/mod.rs
@@ -17,10 +17,8 @@ pub mod item;
pub mod item_kind;
pub mod layout;
pub mod module;
-pub mod named;
pub mod template;
pub mod traversal;
pub mod ty;
pub mod var;
pub mod objc;
-pub mod cant_derive_debug;
diff --git a/src/uses.rs b/src/uses.rs
index fee2be24ed..fc7c126e20 100644
--- a/src/uses.rs
+++ b/src/uses.rs
@@ -72,6 +72,8 @@ pub fn generate_dummy_uses(ctx: &mut BindgenContext,
try!(writeln!(dest, ""));
let type_items = ctx.whitelisted_items()
+ .iter()
+ .cloned()
.map(|id| ctx.resolve_item(id))
.filter(|item| {
// We only want type items.