diff --git a/src/iter/mod.rs b/src/iter/mod.rs index 02e59b6e2..7f35605a8 100644 --- a/src/iter/mod.rs +++ b/src/iter/mod.rs @@ -15,7 +15,7 @@ pub use tree::{ }; use crate::sync::Arc; -use crate::{Miniscript, MiniscriptKey, ScriptContext, Terminal}; +use crate::{policy, Miniscript, MiniscriptKey, ScriptContext, Terminal}; impl<'a, Pk: MiniscriptKey, Ctx: ScriptContext> TreeLike for &'a Miniscript { fn as_node(&self) -> Tree { @@ -68,3 +68,29 @@ impl TreeLike for Arc } } } + +impl<'a, Pk: MiniscriptKey> TreeLike for &'a policy::Concrete { + fn as_node(&self) -> Tree { + use policy::Concrete::*; + match *self { + Unsatisfiable | Trivial | Key(_) | After(_) | Older(_) | Sha256(_) | Hash256(_) + | Ripemd160(_) | Hash160(_) => Tree::Nullary, + And(ref subs) => Tree::Nary(subs.iter().map(Arc::as_ref).collect()), + Or(ref v) => Tree::Nary(v.iter().map(|(_, p)| Arc::as_ref(p)).collect()), + Threshold(_, ref subs) => Tree::Nary(subs.iter().map(Arc::as_ref).collect()), + } + } +} + +impl<'a, Pk: MiniscriptKey> TreeLike for Arc> { + fn as_node(&self) -> Tree { + use policy::Concrete::*; + match self.as_ref() { + Unsatisfiable | Trivial | Key(_) | After(_) | Older(_) | Sha256(_) | Hash256(_) + | Ripemd160(_) | Hash160(_) => Tree::Nullary, + And(ref subs) => Tree::Nary(subs.iter().map(Arc::clone).collect()), + Or(ref v) => Tree::Nary(v.iter().map(|(_, p)| Arc::clone(p)).collect()), + Threshold(_, ref subs) => Tree::Nary(subs.iter().map(Arc::clone).collect()), + } + } +} diff --git a/src/policy/compiler.rs b/src/policy/compiler.rs index 33423314d..bb1701223 100644 --- a/src/policy/compiler.rs +++ b/src/policy/compiler.rs @@ -871,7 +871,7 @@ where let rw = subs[1].0 as f64 / total; //and-or - if let (Concrete::And(x), _) = (&subs[0].1, &subs[1].1) { + if let (Concrete::And(x), _) = (&subs[0].1.as_ref(), &subs[1].1) { let mut a1 = best_compilations( policy_cache, &x[0], @@ -894,7 +894,7 @@ where compile_tern!(&mut a1, &mut b2, &mut c, [lw, rw]); compile_tern!(&mut b1, &mut a2, &mut c, [lw, rw]); }; - if let (_, Concrete::And(x)) = (&subs[0].1, &subs[1].1) { + if let (_, Concrete::And(x)) = (&subs[0].1, &subs[1].1.as_ref()) { let mut a1 = best_compilations( policy_cache, &x[0], @@ -1005,7 +1005,7 @@ where let key_vec: Vec = subs .iter() .filter_map(|s| { - if let Concrete::Key(ref pk) = *s { + if let Concrete::Key(ref pk) = s.as_ref() { Some(pk.clone()) } else { None @@ -1025,7 +1025,9 @@ where _ if k == subs.len() => { let mut it = subs.iter(); let mut policy = it.next().expect("No sub policy in thresh() ?").clone(); - policy = it.fold(policy, |acc, pol| Concrete::And(vec![acc, pol.clone()])); + policy = it.fold(policy, |acc, pol| { + Arc::new(Concrete::And(vec![acc, pol.clone()])) + }); ret = best_compilations(policy_cache, &policy, sat_prob, dissat_prob)?; } @@ -1239,8 +1241,11 @@ mod tests { fn compile_timelocks() { // artificially create a policy that is problematic and try to compile let pol: SPolicy = Concrete::And(vec![ - Concrete::Key("A".to_string()), - Concrete::And(vec![Concrete::after(9), Concrete::after(1000_000_000)]), + Arc::new(Concrete::Key("A".to_string())), + Arc::new(Concrete::And(vec![ + Arc::new(Concrete::after(9)), + Arc::new(Concrete::after(1000_000_000)), + ])), ]); assert!(pol.compile::().is_err()); @@ -1310,7 +1315,7 @@ mod tests { #[test] fn compile_misc() { let (keys, sig) = pubkeys_and_a_sig(10); - let key_pol: Vec = keys.iter().map(|k| Concrete::Key(*k)).collect(); + let key_pol: Vec> = keys.iter().map(|k| Arc::new(Concrete::Key(*k))).collect(); let policy: BPolicy = Concrete::Key(keys[0].clone()); let ms: SegwitMiniScript = policy.compile().unwrap(); @@ -1346,13 +1351,16 @@ mod tests { // Liquid policy let policy: BPolicy = Concrete::Or(vec![ - (127, Concrete::Threshold(3, key_pol[0..5].to_owned())), + ( + 127, + Arc::new(Concrete::Threshold(3, key_pol[0..5].to_owned())), + ), ( 1, - Concrete::And(vec![ - Concrete::Older(Sequence::from_height(10000)), - Concrete::Threshold(2, key_pol[5..8].to_owned()), - ]), + Arc::new(Concrete::And(vec![ + Arc::new(Concrete::Older(Sequence::from_height(10000))), + Concrete::Threshold(2, key_pol[5..8].to_owned()).into(), + ])), ), ]); @@ -1471,8 +1479,10 @@ mod tests { // and to a ms thresh otherwise. // k = 1 (or 2) does not compile, see https://github.com/rust-bitcoin/rust-miniscript/issues/114 for k in &[10, 15, 21] { - let pubkeys: Vec> = - keys.iter().map(|pubkey| Concrete::Key(*pubkey)).collect(); + let pubkeys: Vec>> = keys + .iter() + .map(|pubkey| Concrete::Key(*pubkey).into()) + .collect(); let big_thresh = Concrete::Threshold(*k, pubkeys); let big_thresh_ms: SegwitMiniScript = big_thresh.compile().unwrap(); if *k == 21 { @@ -1499,18 +1509,18 @@ mod tests { // or(thresh(52, [pubkey; 52]), thresh(52, [pubkey; 52])) results in a 3642-bytes long // witness script with only 54 stack elements let (keys, _) = pubkeys_and_a_sig(104); - let keys_a: Vec> = keys[..keys.len() / 2] + let keys_a: Vec>> = keys[..keys.len() / 2] .iter() - .map(|pubkey| Concrete::Key(*pubkey)) + .map(|pubkey| Concrete::Key(*pubkey).into()) .collect(); - let keys_b: Vec> = keys[keys.len() / 2..] + let keys_b: Vec>> = keys[keys.len() / 2..] .iter() - .map(|pubkey| Concrete::Key(*pubkey)) + .map(|pubkey| Concrete::Key(*pubkey).into()) .collect(); let thresh_res: Result = Concrete::Or(vec![ - (1, Concrete::Threshold(keys_a.len(), keys_a)), - (1, Concrete::Threshold(keys_b.len(), keys_b)), + (1, Concrete::Threshold(keys_a.len(), keys_a.into()).into()), + (1, Concrete::Threshold(keys_b.len(), keys_b.into()).into()), ]) .compile(); let script_size = thresh_res.clone().and_then(|m| Ok(m.script_size())); @@ -1523,8 +1533,10 @@ mod tests { // Hit the maximum witness stack elements limit let (keys, _) = pubkeys_and_a_sig(100); - let keys: Vec> = - keys.iter().map(|pubkey| Concrete::Key(*pubkey)).collect(); + let keys: Vec>> = keys + .iter() + .map(|pubkey| Concrete::Key(*pubkey).into()) + .collect(); let thresh_res: Result = Concrete::Threshold(keys.len(), keys).compile(); let n_elements = thresh_res @@ -1542,8 +1554,10 @@ mod tests { fn shared_limits() { // Test the maximum number of OPs with a 67-of-68 multisig let (keys, _) = pubkeys_and_a_sig(68); - let keys: Vec> = - keys.iter().map(|pubkey| Concrete::Key(*pubkey)).collect(); + let keys: Vec>> = keys + .iter() + .map(|pubkey| Concrete::Key(*pubkey).into()) + .collect(); let thresh_res: Result = Concrete::Threshold(keys.len() - 1, keys).compile(); let ops_count = thresh_res.clone().and_then(|m| Ok(m.ext.ops.op_count())); @@ -1555,8 +1569,10 @@ mod tests { ); // For legacy too.. let (keys, _) = pubkeys_and_a_sig(68); - let keys: Vec> = - keys.iter().map(|pubkey| Concrete::Key(*pubkey)).collect(); + let keys: Vec>> = keys + .iter() + .map(|pubkey| Concrete::Key(*pubkey).into()) + .collect(); let thresh_res = Concrete::Threshold(keys.len() - 1, keys).compile::(); let ops_count = thresh_res.clone().and_then(|m| Ok(m.ext.ops.op_count())); assert_eq!( @@ -1568,7 +1584,7 @@ mod tests { // Test that we refuse to compile policies with duplicated keys let (keys, _) = pubkeys_and_a_sig(1); - let key = Concrete::Key(keys[0]); + let key = Arc::new(Concrete::Key(keys[0])); let res = Concrete::Or(vec![(1, key.clone()), (1, key.clone())]).compile::(); assert_eq!( res, @@ -1577,7 +1593,7 @@ mod tests { )) ); // Same for legacy - let res = Concrete::Or(vec![(1, key.clone()), (1, key)]).compile::(); + let res = Concrete::Or(vec![(1, key.clone()), (1, key.clone())]).compile::(); assert_eq!( res, Err(CompilerError::PolicyError( diff --git a/src/policy/concrete.rs b/src/policy/concrete.rs index 7a6d6a2e5..47d223995 100644 --- a/src/policy/concrete.rs +++ b/src/policy/concrete.rs @@ -8,6 +8,7 @@ use core::{fmt, str}; use std::error; use bitcoin::{absolute, Sequence}; +use sync::Arc; #[cfg(feature = "compiler")] use { crate::descriptor::TapTree, @@ -19,11 +20,11 @@ use { crate::Miniscript, crate::Tap, core::cmp::Reverse, - sync::Arc, }; use super::ENTAILMENT_MAX_TERMINALS; use crate::expression::{self, FromTree}; +use crate::iter::TreeLike; use crate::miniscript::types::extra_props::TimelockInfo; use crate::prelude::*; #[cfg(all(doc, not(feature = "compiler")))] @@ -58,12 +59,12 @@ pub enum Policy { /// A HASH160 whose preimage must be provided to satisfy the descriptor. Hash160(Pk::Hash160), /// A list of sub-policies, all of which must be satisfied. - And(Vec>), + And(Vec>>), /// A list of sub-policies, one of which must be satisfied, along with /// relative probabilities for each one. - Or(Vec<(usize, Policy)>), + Or(Vec<(usize, Arc>)>), /// A set of descriptors, satisfactions must be provided for `k` of them. - Threshold(usize, Vec>), + Threshold(usize, Vec>>), } impl Policy @@ -83,105 +84,6 @@ where } } -/// Lightweight repr of Concrete policy which corresponds directly to a -/// Miniscript structure, and whose disjunctions are annotated with satisfaction -/// probabilities to assist the compiler -#[cfg(feature = "compiler")] -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -enum PolicyArc { - /// Unsatisfiable - Unsatisfiable, - /// Trivially satisfiable - Trivial, - /// A public key which must sign to satisfy the descriptor - Key(Pk), - /// An absolute locktime restriction - After(AbsLockTime), - /// A relative locktime restriction - Older(u32), - /// A SHA256 whose preimage must be provided to satisfy the descriptor - Sha256(Pk::Sha256), - /// A SHA256d whose preimage must be provided to satisfy the descriptor - Hash256(Pk::Hash256), - /// A RIPEMD160 whose preimage must be provided to satisfy the descriptor - Ripemd160(Pk::Ripemd160), - /// A HASH160 whose preimage must be provided to satisfy the descriptor - Hash160(Pk::Hash160), - /// A list of sub-policies' references, all of which must be satisfied - And(Vec>>), - /// A list of sub-policies's references, one of which must be satisfied, - /// along with relative probabilities for each one - Or(Vec<(usize, Arc>)>), - /// A set of descriptors' references, satisfactions must be provided for `k` of them - Threshold(usize, Vec>>), -} - -#[cfg(feature = "compiler")] -impl From> for Policy { - fn from(p: PolicyArc) -> Self { - match p { - PolicyArc::Unsatisfiable => Policy::Unsatisfiable, - PolicyArc::Trivial => Policy::Trivial, - PolicyArc::Key(pk) => Policy::Key(pk), - PolicyArc::After(t) => Policy::After(t), - PolicyArc::Older(t) => Policy::Older(Sequence::from_consensus(t)), - PolicyArc::Sha256(hash) => Policy::Sha256(hash), - PolicyArc::Hash256(hash) => Policy::Hash256(hash), - PolicyArc::Ripemd160(hash) => Policy::Ripemd160(hash), - PolicyArc::Hash160(hash) => Policy::Hash160(hash), - PolicyArc::And(subs) => Policy::And( - subs.into_iter() - .map(|pol| Self::from((*pol).clone())) - .collect(), - ), - PolicyArc::Or(subs) => Policy::Or( - subs.into_iter() - .map(|(odds, sub)| (odds, Self::from((*sub).clone()))) - .collect(), - ), - PolicyArc::Threshold(k, subs) => Policy::Threshold( - k, - subs.into_iter() - .map(|pol| Self::from((*pol).clone())) - .collect(), - ), - } - } -} - -#[cfg(feature = "compiler")] -impl From> for PolicyArc { - fn from(p: Policy) -> Self { - match p { - Policy::Unsatisfiable => PolicyArc::Unsatisfiable, - Policy::Trivial => PolicyArc::Trivial, - Policy::Key(pk) => PolicyArc::Key(pk), - Policy::After(lock_time) => PolicyArc::After(lock_time), - Policy::Older(Sequence(t)) => PolicyArc::Older(t), - Policy::Sha256(hash) => PolicyArc::Sha256(hash), - Policy::Hash256(hash) => PolicyArc::Hash256(hash), - Policy::Ripemd160(hash) => PolicyArc::Ripemd160(hash), - Policy::Hash160(hash) => PolicyArc::Hash160(hash), - Policy::And(subs) => PolicyArc::And( - subs.iter() - .map(|sub| Arc::new(Self::from(sub.clone()))) - .collect(), - ), - Policy::Or(subs) => PolicyArc::Or( - subs.iter() - .map(|(odds, sub)| (*odds, Arc::new(Self::from(sub.clone())))) - .collect(), - ), - Policy::Threshold(k, subs) => PolicyArc::Threshold( - k, - subs.iter() - .map(|sub| Arc::new(Self::from(sub.clone()))) - .collect(), - ), - } - } -} - /// Detailed error type for concrete policies. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum PolicyError { @@ -449,17 +351,12 @@ impl Policy { match policy { Policy::Trivial => None, policy => { - let pol = PolicyArc::from(policy); - let leaf_compilations: Vec<_> = pol + let leaf_compilations: Vec<_> = policy .enumerate_policy_tree(1.0) .into_iter() - .filter(|x| x.1 != Arc::new(PolicyArc::Unsatisfiable)) + .filter(|x| *x.1 != Policy::Unsatisfiable) .map(|(prob, ref pol)| { - let converted_pol = Policy::::from((**pol).clone()); - ( - OrdF64(prob), - compiler::best_compilation(&converted_pol).unwrap(), - ) + (OrdF64(prob), compiler::best_compilation(pol).unwrap()) }) .collect(); let tap_tree = with_huffman_tree::(leaf_compilations).unwrap(); @@ -522,7 +419,7 @@ impl Policy { } #[cfg(feature = "compiler")] -impl PolicyArc { +impl Policy { /// Returns a vector of policies whose disjunction is isomorphic to the initial one. /// /// This function is supposed to incrementally expand i.e. represent the policy as @@ -531,21 +428,19 @@ impl PolicyArc { #[cfg(feature = "compiler")] fn enumerate_pol(&self, prob: f64) -> Vec<(f64, Arc)> { match self { - PolicyArc::Or(subs) => { + Policy::Or(subs) => { let total_odds = subs.iter().fold(0, |acc, x| acc + x.0); subs.iter() .map(|(odds, pol)| (prob * *odds as f64 / total_odds as f64, pol.clone())) .collect::>() } - PolicyArc::Threshold(k, subs) if *k == 1 => { + Policy::Threshold(k, subs) if *k == 1 => { let total_odds = subs.len(); subs.iter() .map(|pol| (prob / total_odds as f64, pol.clone())) .collect::>() } - PolicyArc::Threshold(k, subs) if *k != subs.len() => { - generate_combination(subs, prob, *k) - } + Policy::Threshold(k, subs) if *k != subs.len() => generate_combination(subs, prob, *k), pol => vec![(prob, Arc::new(pol.clone()))], } } @@ -583,7 +478,7 @@ impl PolicyArc { 'outer: loop { //--- FIND a plausible node --- let mut prob: Reverse = Reverse(OrdF64(0.0)); - let mut curr_policy: Arc = Arc::new(PolicyArc::Unsatisfiable); + let mut curr_policy: Arc = Arc::new(Policy::Unsatisfiable); let mut curr_pol_replace_vec: Vec<(f64, Arc)> = vec![]; let mut no_more_enum = false; @@ -656,30 +551,21 @@ impl PolicyArc { impl ForEachKey for Policy { fn for_each_key<'a, F: FnMut(&'a Pk) -> bool>(&'a self, mut pred: F) -> bool { - self.real_for_each_key(&mut pred) - } -} - -impl Policy { - fn real_for_each_key<'a, F: FnMut(&'a Pk) -> bool>(&'a self, pred: &mut F) -> bool { - match *self { - Policy::Unsatisfiable | Policy::Trivial => true, - Policy::Key(ref pk) => pred(pk), - Policy::Sha256(..) - | Policy::Hash256(..) - | Policy::Ripemd160(..) - | Policy::Hash160(..) - | Policy::After(..) - | Policy::Older(..) => true, - Policy::Threshold(_, ref subs) | Policy::And(ref subs) => { - subs.iter().all(|sub| sub.real_for_each_key(&mut *pred)) + for policy in self.pre_order_iter() { + match policy { + Policy::Key(ref pk) => { + if !pred(pk) { + return false; + } + } + _ => {} } - Policy::Or(ref subs) => subs - .iter() - .all(|(_, sub)| sub.real_for_each_key(&mut *pred)), } + true } +} +impl Policy { /// Converts a policy using one kind of public key to another type of public key. /// /// For example usage please see [`crate::policy::semantic::Policy::translate_pk`]. @@ -688,81 +574,70 @@ impl Policy { T: Translator, Q: MiniscriptKey, { - self._translate_pk(t) - } - - fn _translate_pk(&self, t: &mut T) -> Result, E> - where - T: Translator, - Q: MiniscriptKey, - { - match *self { - Policy::Unsatisfiable => Ok(Policy::Unsatisfiable), - Policy::Trivial => Ok(Policy::Trivial), - Policy::Key(ref pk) => t.pk(pk).map(Policy::Key), - Policy::Sha256(ref h) => t.sha256(h).map(Policy::Sha256), - Policy::Hash256(ref h) => t.hash256(h).map(Policy::Hash256), - Policy::Ripemd160(ref h) => t.ripemd160(h).map(Policy::Ripemd160), - Policy::Hash160(ref h) => t.hash160(h).map(Policy::Hash160), - Policy::Older(n) => Ok(Policy::Older(n)), - Policy::After(n) => Ok(Policy::After(n)), - Policy::Threshold(k, ref subs) => { - let new_subs: Result>, _> = - subs.iter().map(|sub| sub._translate_pk(t)).collect(); - new_subs.map(|ok| Policy::Threshold(k, ok)) - } - Policy::And(ref subs) => Ok(Policy::And( - subs.iter() - .map(|sub| sub._translate_pk(t)) - .collect::>, E>>()?, - )), - Policy::Or(ref subs) => Ok(Policy::Or( - subs.iter() - .map(|(prob, sub)| Ok((*prob, sub._translate_pk(t)?))) - .collect::)>, E>>()?, - )), + use Policy::*; + + let mut translated = vec![]; + for data in Arc::new(self.clone()).post_order_iter() { + // convenience method to reduce typing + let child_n = |n| Arc::clone(&translated[data.child_indices[n]]); + + let new_policy = match data.node.as_ref() { + Unsatisfiable => Unsatisfiable, + Trivial => Trivial, + Key(ref pk) => t.pk(pk).map(Key)?, + Sha256(ref h) => t.sha256(h).map(Sha256)?, + Hash256(ref h) => t.hash256(h).map(Hash256)?, + Ripemd160(ref h) => t.ripemd160(h).map(Ripemd160)?, + Hash160(ref h) => t.hash160(h).map(Hash160)?, + Older(n) => Older(*n), + After(n) => After(*n), + Threshold(k, ref subs) => Threshold(*k, (0..subs.len()).map(child_n).collect()), + And(ref subs) => And((0..subs.len()).map(child_n).collect()), + Or(ref subs) => Or((0..subs.len()).map(|i| (i, child_n(i))).collect()), + }; + translated.push(Arc::new(new_policy)); } + + Ok(Arc::try_unwrap(translated.pop().unwrap()).unwrap()) } /// Translates `Concrete::Key(key)` to `Concrete::Unsatisfiable` when extracting `TapKey`. pub fn translate_unsatisfiable_pk(self, key: &Pk) -> Policy { - match self { - Policy::Key(ref k) if k.clone() == *key => Policy::Unsatisfiable, - Policy::And(subs) => Policy::And( - subs.into_iter() - .map(|sub| sub.translate_unsatisfiable_pk(key)) - .collect::>(), - ), - Policy::Or(subs) => Policy::Or( - subs.into_iter() - .map(|(k, sub)| (k, sub.translate_unsatisfiable_pk(key))) - .collect::>(), - ), - Policy::Threshold(k, subs) => Policy::Threshold( - k, - subs.into_iter() - .map(|sub| sub.translate_unsatisfiable_pk(key)) - .collect::>(), - ), - x => x, + use Policy::*; + + let mut translated = vec![]; + for data in Arc::new(self.clone()).post_order_iter() { + // convenience method to reduce typing + let child_n = |n| Arc::clone(&translated[data.child_indices[n]]); + + let new_policy = match data.node.as_ref() { + Policy::Key(ref k) if k.clone() == *key => Some(Policy::Unsatisfiable), + Threshold(k, ref subs) => { + Some(Threshold(*k, (0..subs.len()).map(child_n).collect())) + } + And(ref subs) => Some(And((0..subs.len()).map(child_n).collect())), + Or(ref subs) => Some(Or((0..subs.len()).map(|i| (i, child_n(i))).collect())), + _ => None, + }; + match new_policy { + Some(new_policy) => translated.push(Arc::new(new_policy)), + None => translated.push(Arc::clone(&data.node)), + } } + + Arc::try_unwrap(translated.pop().unwrap()).unwrap() } /// Gets all keys in the policy. pub fn keys(&self) -> Vec<&Pk> { - match *self { - Policy::Key(ref pk) => vec![pk], - Policy::Threshold(_k, ref subs) => { - subs.iter().flat_map(|sub| sub.keys()).collect::>() + let mut keys = vec![]; + for policy in self.pre_order_iter() { + match policy { + Policy::Key(ref pk) => keys.push(pk), + _ => {} } - Policy::And(ref subs) => subs.iter().flat_map(|sub| sub.keys()).collect::>(), - Policy::Or(ref subs) => subs - .iter() - .flat_map(|(ref _k, ref sub)| sub.keys()) - .collect::>(), - // map all hashes and time - _ => vec![], } + keys } /// Gets the number of [TapLeaf](`TapTree::Leaf`)s considering exhaustive root-level [`Policy::Or`] @@ -1144,7 +1019,7 @@ impl_block_str!( for arg in &top.args { subs.push(Policy::from_tree(arg)?); } - Ok(Policy::And(subs)) + Ok(Policy::And(subs.into_iter().map(|p| Arc::new(p)).collect())) } ("or", _) => { if top.args.len() != 2 { @@ -1154,7 +1029,7 @@ impl_block_str!( for arg in &top.args { subs.push(Policy::from_tree_prob(arg, true)?); } - Ok(Policy::Or(subs)) + Ok(Policy::Or(subs.into_iter().map(|(probability, policy)| (probability, Arc::new(policy))).collect())) } ("thresh", nsubs) => { if top.args.is_empty() || !top.args[0].args.is_empty() { @@ -1170,7 +1045,7 @@ impl_block_str!( for arg in &top.args[1..] { subs.push(Policy::from_tree(arg)?); } - Ok(Policy::Threshold(thresh as usize, subs)) + Ok(Policy::Threshold(thresh as usize, subs.into_iter().map(|p| Arc::new(p)).collect())) } _ => Err(errstr(top.name)), } @@ -1222,22 +1097,22 @@ fn with_huffman_tree( /// any one of the conditions exclusively. #[cfg(feature = "compiler")] fn generate_combination( - policy_vec: &Vec>>, + policy_vec: &Vec>>, prob: f64, k: usize, -) -> Vec<(f64, Arc>)> { +) -> Vec<(f64, Arc>)> { debug_assert!(k <= policy_vec.len()); - let mut ret: Vec<(f64, Arc>)> = vec![]; + let mut ret: Vec<(f64, Arc>)> = vec![]; for i in 0..policy_vec.len() { - let policies: Vec>> = policy_vec + let policies: Vec>> = policy_vec .iter() .enumerate() .filter_map(|(j, sub)| if j != i { Some(Arc::clone(sub)) } else { None }) .collect(); ret.push(( prob / policy_vec.len() as f64, - Arc::new(PolicyArc::Threshold(k, policies)), + Arc::new(Policy::Threshold(k, policies)), )); } ret @@ -1250,7 +1125,7 @@ mod compiler_tests { use sync::Arc; use super::Concrete; - use crate::policy::concrete::{generate_combination, PolicyArc}; + use crate::policy::concrete::{generate_combination, Policy}; use crate::prelude::*; #[test] @@ -1261,46 +1136,46 @@ mod compiler_tests { .collect(); let policy_vec = policies .into_iter() - .map(|pol| Arc::new(PolicyArc::from(pol))) + .map(|pol| Arc::new(Policy::from(pol))) .collect::>(); let combinations = generate_combination(&policy_vec, 1.0, 2); - let comb_a: Vec>> = vec![ + let comb_a: Vec>> = vec![ policy_str!("pk(B)"), policy_str!("pk(C)"), policy_str!("pk(D)"), ] .into_iter() - .map(|pol| Arc::new(PolicyArc::from(pol))) + .map(|pol| Arc::new(Policy::from(pol))) .collect(); - let comb_b: Vec>> = vec![ + let comb_b: Vec>> = vec![ policy_str!("pk(A)"), policy_str!("pk(C)"), policy_str!("pk(D)"), ] .into_iter() - .map(|pol| Arc::new(PolicyArc::from(pol))) + .map(|pol| Arc::new(Policy::from(pol))) .collect(); - let comb_c: Vec>> = vec![ + let comb_c: Vec>> = vec![ policy_str!("pk(A)"), policy_str!("pk(B)"), policy_str!("pk(D)"), ] .into_iter() - .map(|pol| Arc::new(PolicyArc::from(pol))) + .map(|pol| Arc::new(Policy::from(pol))) .collect(); - let comb_d: Vec>> = vec![ + let comb_d: Vec>> = vec![ policy_str!("pk(A)"), policy_str!("pk(B)"), policy_str!("pk(C)"), ] .into_iter() - .map(|pol| Arc::new(PolicyArc::from(pol))) + .map(|pol| Arc::new(Policy::from(pol))) .collect(); let expected_comb = vec![comb_a, comb_b, comb_c, comb_d] .into_iter() - .map(|sub_pol| (0.25, Arc::new(PolicyArc::Threshold(2, sub_pol)))) + .map(|sub_pol| (0.25, Arc::new(Policy::Threshold(2, sub_pol)))) .collect::>(); assert_eq!(combinations, expected_comb); } diff --git a/src/policy/mod.rs b/src/policy/mod.rs index 27165c2e2..e29930df1 100644 --- a/src/policy/mod.rs +++ b/src/policy/mod.rs @@ -198,7 +198,8 @@ impl Liftable for Concrete { Concrete::Ripemd160(ref h) => Semantic::Ripemd160(h.clone()), Concrete::Hash160(ref h) => Semantic::Hash160(h.clone()), Concrete::And(ref subs) => { - let semantic_subs: Result<_, Error> = subs.iter().map(Liftable::lift).collect(); + let semantic_subs: Result<_, Error> = + subs.iter().map(|p| Liftable::lift(p.as_ref())).collect(); Semantic::Threshold(2, semantic_subs?) } Concrete::Or(ref subs) => { @@ -207,7 +208,8 @@ impl Liftable for Concrete { Semantic::Threshold(1, semantic_subs?) } Concrete::Threshold(k, ref subs) => { - let semantic_subs: Result<_, Error> = subs.iter().map(Liftable::lift).collect(); + let semantic_subs: Result<_, Error> = + subs.iter().map(|p| Liftable::lift(p.as_ref())).collect(); Semantic::Threshold(k, semantic_subs?) } }