Skip to content

Commit 3522703

Browse files
committed
policy: reduce error paths in Concrete::extract_key to one
The Taproot compiler has a couple algorithms used to find an optimal internal key to extract from a policy. One is `to_tapleaf_prob_vec` which is a fundamental part of the compiler, which iterates over the disjunction-only root of the tree and returns every branch along with its probabilities. This is currently implemented using both allocations and recursion. By pulling the logic out into an iterator we can get a clearer, faster algorithm that cannot stack-overflow. This algorithm is then fed into Concrete::extract_key. The goal of this algorithm is to find the highest-probability tapbranch consisting of just a single key. This algorithm inexplicably works by: * Lifting the policy to a semantic policy, maybe returning an error * Iterating over the entire policy, extracting a list of every key. * Calling to_tapleaf_prob_vec to get a vector of tapleaves, which it then iterates over to find the key-only branches, which it collects into a BTreeMap mapping the key to probabilities. * Iterating over the extracted lists of keys, and for each key, reducing the semantic policy by that key and comparing to Trivial (logically, this is asking "is this key in a tree of disjunctions from the root"). * For each such key that it finds, looking it up in the map, potentially returning an error (actually this error path is impossible to hit). * and manually minimizing the looked up probability. With to_tapleaf_prob_vec replaced by an iterator there is a simpler and more direct algorithm: * Iterate through all the tapbranches/probability pairs, filtering for key-only branches, and maximizing by probability. This can only fail if there are no key-only branches, and this is reflected by only having one error branch.
1 parent 9f53c01 commit 3522703

File tree

1 file changed

+51
-58
lines changed

1 file changed

+51
-58
lines changed

src/policy/concrete.rs

Lines changed: 51 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,7 @@ use bitcoin::absolute;
1212
use {
1313
crate::descriptor::TapTree,
1414
crate::miniscript::ScriptContext,
15-
crate::policy::compiler::CompilerError,
16-
crate::policy::compiler::OrdF64,
17-
crate::policy::{compiler, Concrete, Liftable, Semantic},
15+
crate::policy::compiler::{self, CompilerError, OrdF64},
1816
crate::Descriptor,
1917
crate::Miniscript,
2018
crate::Tap,
@@ -126,8 +124,41 @@ impl error::Error for PolicyError {
126124
}
127125
}
128126

127+
#[cfg(feature = "compiler")]
128+
struct TapleafProbabilityIter<'p, Pk: MiniscriptKey> {
129+
stack: Vec<(f64, &'p Policy<Pk>)>,
130+
}
131+
132+
#[cfg(feature = "compiler")]
133+
impl<'p, Pk: MiniscriptKey> Iterator for TapleafProbabilityIter<'p, Pk> {
134+
type Item = (f64, &'p Policy<Pk>);
135+
136+
fn next(&mut self) -> Option<Self::Item> {
137+
loop {
138+
let (top_prob, top) = self.stack.pop()?;
139+
140+
match top {
141+
Policy::Or(ref subs) => {
142+
let total_sub_prob = subs.iter().map(|prob_sub| prob_sub.0).sum::<usize>();
143+
for (sub_prob, sub) in subs.iter().rev() {
144+
let ratio = *sub_prob as f64 / total_sub_prob as f64;
145+
self.stack.push((top_prob * ratio, sub));
146+
}
147+
}
148+
Policy::Thresh(ref thresh) if thresh.is_or() => {
149+
let n64 = thresh.n() as f64;
150+
for sub in thresh.iter().rev() {
151+
self.stack.push((top_prob / n64, sub));
152+
}
153+
}
154+
_ => return Some((top_prob, top)),
155+
}
156+
}
157+
}
158+
}
159+
129160
impl<Pk: MiniscriptKey> Policy<Pk> {
130-
/// Flattens the [`Policy`] tree structure into a vector of tuples `(leaf script, leaf probability)`
161+
/// Flattens the [`Policy`] tree structure into an iterator of tuples `(leaf script, leaf probability)`
131162
/// with leaf probabilities corresponding to odds for each sub-branch in the policy.
132163
/// We calculate the probability of selecting the sub-branch at every level and calculate the
133164
/// leaf probabilities as the probability of traversing through required branches to reach the
@@ -150,60 +181,22 @@ impl<Pk: MiniscriptKey> Policy<Pk> {
150181
/// Since this splitting might lead to exponential blow-up, we constrain the number of
151182
/// leaf-nodes to [`MAX_COMPILATION_LEAVES`].
152183
#[cfg(feature = "compiler")]
153-
fn to_tapleaf_prob_vec(&self, prob: f64) -> Vec<(f64, Policy<Pk>)> {
154-
match self {
155-
Policy::Or(ref subs) => {
156-
let total_odds: usize = subs.iter().map(|(ref k, _)| k).sum();
157-
subs.iter()
158-
.flat_map(|(k, ref policy)| {
159-
policy.to_tapleaf_prob_vec(prob * *k as f64 / total_odds as f64)
160-
})
161-
.collect::<Vec<_>>()
162-
}
163-
Policy::Thresh(ref thresh) if thresh.is_or() => {
164-
let total_odds = thresh.n();
165-
thresh
166-
.iter()
167-
.flat_map(|policy| policy.to_tapleaf_prob_vec(prob / total_odds as f64))
168-
.collect::<Vec<_>>()
169-
}
170-
x => vec![(prob, x.clone())],
171-
}
184+
fn tapleaf_probability_iter(&self) -> TapleafProbabilityIter<Pk> {
185+
TapleafProbabilityIter { stack: vec![(1.0, self)] }
172186
}
173187

174188
/// Extracts the internal_key from this policy tree.
175189
#[cfg(feature = "compiler")]
176190
fn extract_key(self, unspendable_key: Option<Pk>) -> Result<(Pk, Policy<Pk>), Error> {
177-
let mut internal_key: Option<Pk> = None;
178-
{
179-
let mut prob = 0.;
180-
let semantic_policy = self.lift()?;
181-
let concrete_keys = self.keys();
182-
let key_prob_map: BTreeMap<_, _> = self
183-
.to_tapleaf_prob_vec(1.0)
184-
.into_iter()
185-
.filter(|(_, ref pol)| matches!(pol, Concrete::Key(..)))
186-
.map(|(prob, key)| (key, prob))
187-
.collect();
188-
189-
for key in concrete_keys.into_iter() {
190-
if semantic_policy
191-
.clone()
192-
.satisfy_constraint(&Semantic::Key(key.clone()), true)
193-
== Semantic::Trivial
194-
{
195-
match key_prob_map.get(&Concrete::Key(key.clone())) {
196-
Some(val) => {
197-
if *val > prob {
198-
prob = *val;
199-
internal_key = Some(key.clone());
200-
}
201-
}
202-
None => return Err(errstr("Key should have existed in the BTreeMap!")),
203-
}
204-
}
205-
}
206-
}
191+
let internal_key = self
192+
.tapleaf_probability_iter()
193+
.filter_map(|(prob, ref pol)| match pol {
194+
Policy::Key(pk) => Some((OrdF64(prob), pk)),
195+
_ => None,
196+
})
197+
.max_by_key(|(prob, _)| *prob)
198+
.map(|(_, pk)| pk.clone());
199+
207200
match (internal_key, unspendable_key) {
208201
(Some(ref key), _) => Ok((key.clone(), self.translate_unsatisfiable_pk(key))),
209202
(_, Some(key)) => Ok((key, self)),
@@ -242,14 +235,13 @@ impl<Pk: MiniscriptKey> Policy<Pk> {
242235
match policy {
243236
Policy::Trivial => None,
244237
policy => {
245-
let vec_policies: Vec<_> = policy.to_tapleaf_prob_vec(1.0);
246238
let mut leaf_compilations: Vec<(OrdF64, Miniscript<Pk, Tap>)> = vec![];
247-
for (prob, pol) in vec_policies {
239+
for (prob, pol) in policy.tapleaf_probability_iter() {
248240
// policy corresponding to the key (replaced by unsatisfiable) is skipped
249-
if pol == Policy::Unsatisfiable {
241+
if *pol == Policy::Unsatisfiable {
250242
continue;
251243
}
252-
let compilation = compiler::best_compilation::<Pk, Tap>(&pol)?;
244+
let compilation = compiler::best_compilation::<Pk, Tap>(pol)?;
253245
compilation.sanity_check()?;
254246
leaf_compilations.push((OrdF64(prob), compilation));
255247
}
@@ -371,7 +363,7 @@ impl<Pk: MiniscriptKey> Policy<Pk> {
371363
///
372364
/// This function is supposed to incrementally expand i.e. represent the policy as
373365
/// disjunction over sub-policies output by it. The probability calculations are similar
374-
/// to [`Policy::to_tapleaf_prob_vec`].
366+
/// to [`Policy::tapleaf_probability_iter`].
375367
#[cfg(feature = "compiler")]
376368
fn enumerate_pol(&self, prob: f64) -> Vec<(f64, Arc<Self>)> {
377369
match self {
@@ -1085,6 +1077,7 @@ mod compiler_tests {
10851077
use core::str::FromStr;
10861078

10871079
use super::*;
1080+
use crate::policy::Concrete;
10881081

10891082
#[test]
10901083
fn test_gen_comb() {

0 commit comments

Comments
 (0)