Contribution improvements

This commit is contained in:
Alekos Filini 2020-02-17 14:22:53 +01:00
parent 914719ccf4
commit f605f4b34d
No known key found for this signature in database
GPG Key ID: 5E8AFC3034FDFA4F
9 changed files with 460 additions and 221 deletions

View File

@ -13,7 +13,7 @@ base64 = "^0.11"
# Optional dependencies # Optional dependencies
sled = { version = "0.31.0", optional = true } sled = { version = "0.31.0", optional = true }
electrum-client = { version = "0.1.0-beta.1", optional = true } electrum-client = { version = "0.1.0-beta.5", optional = true }
[features] [features]
minimal = [] minimal = []

View File

@ -17,13 +17,12 @@ fn main() {
let extended_desc = ExtendedDescriptor::from_str(desc).unwrap(); let extended_desc = ExtendedDescriptor::from_str(desc).unwrap();
println!("{:?}", extended_desc); println!("{:?}", extended_desc);
let policy = extended_desc.extract_policy().unwrap();
println!("policy: {}", serde_json::to_string(&policy).unwrap());
let derived_desc = extended_desc.derive(42).unwrap(); let derived_desc = extended_desc.derive(42).unwrap();
println!("{:?}", derived_desc); println!("{:?}", derived_desc);
if let Descriptor::Wsh(x) = &derived_desc {
println!("{}", serde_json::to_string(&x.extract_policy()).unwrap());
}
let addr = derived_desc.address(Network::Testnet).unwrap(); let addr = derived_desc.address(Network::Testnet).unwrap();
println!("{}", addr); println!("{}", addr);

View File

@ -304,11 +304,10 @@ fn main() {
let psbt: PartiallySignedTransaction = deserialize(&psbt).unwrap(); let psbt: PartiallySignedTransaction = deserialize(&psbt).unwrap();
let (psbt, finalized) = wallet.sign(psbt).unwrap(); let (psbt, finalized) = wallet.sign(psbt).unwrap();
println!("PSBT: {}", base64::encode(&serialize(&psbt)));
println!("Finalized: {}", finalized); println!("Finalized: {}", finalized);
if finalized { if finalized {
println!("Extracted: {}", serialize_hex(&psbt.extract_tx())); println!("Extracted: {}", serialize_hex(&psbt.extract_tx()));
} else {
println!("PSBT: {}", base64::encode(&serialize(&psbt)));
} }
} }
}; };

View File

@ -6,6 +6,8 @@ pub enum Error {
MalformedInput, MalformedInput,
KeyParsingError(String), KeyParsingError(String),
Policy(crate::descriptor::policy::PolicyError),
InputIndexDoesntExist, InputIndexDoesntExist,
MissingPublicKey, MissingPublicKey,
MissingDetails, MissingDetails,
@ -32,3 +34,4 @@ impl_error!(bitcoin::util::base58::Error, Base58);
impl_error!(bitcoin::util::key::Error, PK); impl_error!(bitcoin::util::key::Error, PK);
impl_error!(miniscript::Error, Miniscript); impl_error!(miniscript::Error, Miniscript);
impl_error!(bitcoin::hashes::hex::Error, Hex); impl_error!(bitcoin::hashes::hex::Error, Hex);
impl_error!(crate::descriptor::policy::PolicyError, Policy);

View File

@ -75,7 +75,6 @@ impl DescriptorExtendedKey {
final_path.into() final_path.into()
} }
pub fn derive<C: secp256k1::Verification + secp256k1::Signing>( pub fn derive<C: secp256k1::Verification + secp256k1::Signing>(
&self, &self,
ctx: &secp256k1::Secp256k1<C>, ctx: &secp256k1::Secp256k1<C>,

View File

@ -10,7 +10,7 @@ use bitcoin::util::bip32::{DerivationPath, ExtendedPrivKey, Fingerprint};
use bitcoin::util::psbt::PartiallySignedTransaction as PSBT; use bitcoin::util::psbt::PartiallySignedTransaction as PSBT;
use bitcoin::{PrivateKey, PublicKey, Script}; use bitcoin::{PrivateKey, PublicKey, Script};
pub use miniscript::{descriptor::Descriptor, Miniscript}; pub use miniscript::{Descriptor, Miniscript, MiniscriptKey, Terminal};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -27,11 +27,14 @@ pub use self::extended_key::{DerivationIndex, DescriptorExtendedKey};
pub use self::policy::Policy; pub use self::policy::Policy;
trait MiniscriptExtractPolicy { trait MiniscriptExtractPolicy {
fn extract_policy(&self, lookup_map: &BTreeMap<String, Box<dyn Key>>) -> Option<Policy>; fn extract_policy(
&self,
lookup_map: &BTreeMap<String, Box<dyn Key>>,
) -> Result<Option<Policy>, Error>;
} }
pub trait ExtractPolicy { pub trait ExtractPolicy {
fn extract_policy(&self) -> Option<Policy>; fn extract_policy(&self) -> Result<Option<Policy>, Error>;
} }
#[derive(Debug, Clone, Hash, PartialEq, PartialOrd, Eq, Ord, Default)] #[derive(Debug, Clone, Hash, PartialEq, PartialOrd, Eq, Ord, Default)]
@ -228,6 +231,12 @@ impl std::clone::Clone for ExtendedDescriptor {
} }
} }
impl std::convert::AsRef<StringDescriptor> for ExtendedDescriptor {
fn as_ref(&self) -> &StringDescriptor {
&self.internal
}
}
impl ExtendedDescriptor { impl ExtendedDescriptor {
fn parse_string(string: &str) -> Result<(String, Box<dyn Key>), Error> { fn parse_string(string: &str) -> Result<(String, Box<dyn Key>), Error> {
if let Ok(pk) = PublicKey::from_str(string) { if let Ok(pk) = PublicKey::from_str(string) {
@ -271,13 +280,18 @@ impl ExtendedDescriptor {
&self, &self,
miniscript: Miniscript<PublicKey>, miniscript: Miniscript<PublicKey>,
) -> Result<DerivedDescriptor, Error> { ) -> Result<DerivedDescriptor, Error> {
// TODO: make sure they are "equivalent" let derived_desc = match self.internal {
match self.internal { Descriptor::Bare(_) => Descriptor::Bare(miniscript),
Descriptor::Bare(_) => Ok(Descriptor::Bare(miniscript)), Descriptor::Sh(_) => Descriptor::Sh(miniscript),
Descriptor::Sh(_) => Ok(Descriptor::Sh(miniscript)), Descriptor::Wsh(_) => Descriptor::Wsh(miniscript),
Descriptor::Wsh(_) => Ok(Descriptor::Wsh(miniscript)), Descriptor::ShWsh(_) => Descriptor::ShWsh(miniscript),
Descriptor::ShWsh(_) => Ok(Descriptor::ShWsh(miniscript)), _ => return Err(Error::CantDeriveWithMiniscript),
_ => Err(Error::CantDeriveWithMiniscript), };
if !self.same_structure(&derived_desc) {
Err(Error::CantDeriveWithMiniscript)
} else {
Ok(derived_desc)
} }
} }
@ -388,10 +402,29 @@ impl ExtendedDescriptor {
pub fn is_fixed(&self) -> bool { pub fn is_fixed(&self) -> bool {
self.keys.iter().all(|(_, key)| key.is_fixed()) self.keys.iter().all(|(_, key)| key.is_fixed())
} }
pub fn same_structure<K: MiniscriptKey>(&self, other: &Descriptor<K>) -> bool {
// Translate all the public keys to () and then check if the two descriptors are equal.
// TODO: translate hashes to their default value before checking for ==
let func_string = |_string: &String| -> Result<_, Error> { Ok(DummyKey::default()) };
let func_generic_pk = |_data: &K| -> Result<_, Error> { Ok(DummyKey::default()) };
let func_generic_pkh =
|_data: &<K as MiniscriptKey>::Hash| -> Result<_, Error> { Ok(DummyKey::default()) };
let translated_a = self.internal.translate_pk(func_string, func_string);
let translated_b = other.translate_pk(func_generic_pk, func_generic_pkh);
match (translated_a, translated_b) {
(Ok(a), Ok(b)) => a == b,
_ => false,
}
}
} }
impl ExtractPolicy for ExtendedDescriptor { impl ExtractPolicy for ExtendedDescriptor {
fn extract_policy(&self) -> Option<Policy> { fn extract_policy(&self) -> Result<Option<Policy>, Error> {
self.internal.extract_policy(&self.keys) self.internal.extract_policy(&self.keys)
} }
} }
@ -479,7 +512,10 @@ mod test {
.to_string(), .to_string(),
"mqwpxxvfv3QbM8PU8uBx2jaNt9btQqvQNx" "mqwpxxvfv3QbM8PU8uBx2jaNt9btQqvQNx"
); );
assert_eq!(desc.get_secret_keys().into_iter().collect::<Vec<_>>().len(), 1); assert_eq!(
desc.get_secret_keys().into_iter().collect::<Vec<_>>().len(),
1
);
} }
#[test] #[test]
@ -503,7 +539,10 @@ mod test {
.to_string(), .to_string(),
"mqwpxxvfv3QbM8PU8uBx2jaNt9btQqvQNx" "mqwpxxvfv3QbM8PU8uBx2jaNt9btQqvQNx"
); );
assert_eq!(desc.get_secret_keys().into_iter().collect::<Vec<_>>().len(), 0); assert_eq!(
desc.get_secret_keys().into_iter().collect::<Vec<_>>().len(),
0
);
} }
#[test] #[test]

View File

@ -1,22 +1,30 @@
use std::collections::{BTreeMap, HashSet}; use std::cmp::max;
use std::collections::{BTreeMap, HashSet, VecDeque};
use serde::Serialize; use serde::ser::SerializeMap;
use serde::{Serialize, Serializer};
use bitcoin::hashes::*; use bitcoin::hashes::*;
use bitcoin::secp256k1::Secp256k1; use bitcoin::secp256k1::Secp256k1;
use bitcoin::util::bip32::Fingerprint; use bitcoin::util::bip32::Fingerprint;
use bitcoin::util::psbt;
use bitcoin::PublicKey; use bitcoin::PublicKey;
use miniscript::{Descriptor, Miniscript, Terminal}; use miniscript::{Descriptor, Miniscript, Terminal};
use descriptor::{Key, MiniscriptExtractPolicy}; #[allow(unused_imports)]
use log::{debug, error, info, trace};
#[derive(Debug, Serialize)] use super::error::Error;
use crate::descriptor::{Key, MiniscriptExtractPolicy};
use crate::psbt::PSBTSatisfier;
#[derive(Debug, Clone, Default, Serialize)]
pub struct PKOrF { pub struct PKOrF {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pubkey: Option<PublicKey>, pubkey: Option<PublicKey>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pubkey_hash: Option<hash160::Hash>,
#[serde(skip_serializing_if = "Option::is_none")]
fingerprint: Option<Fingerprint>, fingerprint: Option<Fingerprint>,
} }
@ -28,28 +36,23 @@ impl PKOrF {
if let Some(fing) = k.fingerprint(&secp) { if let Some(fing) = k.fingerprint(&secp) {
PKOrF { PKOrF {
fingerprint: Some(fing), fingerprint: Some(fing),
pubkey: None, ..Default::default()
} }
} else { } else {
PKOrF { PKOrF {
fingerprint: None,
pubkey: Some(pubkey), pubkey: Some(pubkey),
..Default::default()
} }
} }
} }
} }
#[derive(Debug, Serialize)] #[derive(Debug, Clone, Serialize)]
#[serde(tag = "type", rename_all = "UPPERCASE")] #[serde(tag = "type", rename_all = "UPPERCASE")]
pub enum SatisfiableItem { pub enum SatisfiableItem {
// Leaves // Leaves
Signature(PKOrF), Signature(PKOrF),
SignatureKey { SignatureKey(PKOrF),
#[serde(skip_serializing_if = "Option::is_none")]
fingerprint: Option<Fingerprint>,
#[serde(skip_serializing_if = "Option::is_none")]
pubkey_hash: Option<hash160::Hash>,
},
SHA256Preimage { SHA256Preimage {
hash: sha256::Hash, hash: sha256::Hash,
}, },
@ -90,95 +93,240 @@ impl SatisfiableItem {
_ => true, _ => true,
} }
} }
fn satisfy(&self, _input: &psbt::Input) -> Satisfaction {
Satisfaction::None
}
} }
#[derive(Debug, Clone, PartialEq, Eq, Serialize)] fn combinations(vec: &Vec<usize>, size: usize) -> Vec<Vec<usize>> {
assert!(vec.len() >= size);
let mut answer = Vec::new();
let mut queue = VecDeque::new();
for (index, val) in vec.iter().enumerate() {
let mut new_vec = Vec::with_capacity(size);
new_vec.push(*val);
queue.push_back((index, new_vec));
}
while let Some((index, vals)) = queue.pop_front() {
if vals.len() >= size {
answer.push(vals);
} else {
for (new_index, val) in vec.iter().skip(index + 1).enumerate() {
let mut cloned = vals.clone();
cloned.push(*val);
queue.push_front((new_index, cloned));
}
}
}
answer
}
fn mix<T: Clone>(vec: Vec<Vec<T>>) -> Vec<Vec<T>> {
if vec.is_empty() || vec.iter().any(Vec::is_empty) {
return vec![];
}
let mut answer = Vec::new();
let size = vec.len();
let mut queue = VecDeque::new();
for i in &vec[0] {
let mut new_vec = Vec::with_capacity(size);
new_vec.push(i.clone());
queue.push_back(new_vec);
}
while let Some(vals) = queue.pop_front() {
if vals.len() >= size {
answer.push(vals);
} else {
let level = vals.len();
for i in &vec[level] {
let mut cloned = vals.clone();
cloned.push(i.clone());
queue.push_front(cloned);
}
}
}
answer
}
pub type ConditionMap = BTreeMap<usize, HashSet<Condition>>;
pub type FoldedConditionMap = BTreeMap<Vec<usize>, HashSet<Condition>>;
fn serialize_folded_cond_map<S>(
input_map: &FoldedConditionMap,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(input_map.len()))?;
for (k, v) in input_map {
let k_string = format!("{:?}", k);
map.serialize_entry(&k_string, v)?;
}
map.end()
}
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "type", rename_all = "UPPERCASE")] #[serde(tag = "type", rename_all = "UPPERCASE")]
pub enum Satisfaction { pub enum Satisfaction {
Complete {
#[serde(skip_serializing_if = "PathRequirements::is_null")]
condition: PathRequirements,
},
Partial { Partial {
m: usize,
n: usize, n: usize,
completed: HashSet<usize>, m: usize,
items: Vec<usize>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
conditions: ConditionMap,
},
PartialComplete {
n: usize,
m: usize,
items: Vec<usize>,
#[serde(
serialize_with = "serialize_folded_cond_map",
skip_serializing_if = "BTreeMap::is_empty"
)]
conditions: FoldedConditionMap,
},
Complete {
condition: Condition,
}, },
None, None,
} }
impl Satisfaction { impl Satisfaction {
fn from_items_threshold(items: HashSet<usize>, threshold: usize) -> Satisfaction { pub fn is_leaf(&self) -> bool {
Satisfaction::Partial { match self {
m: items.len(), Satisfaction::None | Satisfaction::Complete { .. } => true,
n: threshold, Satisfaction::PartialComplete { .. } | Satisfaction::Partial { .. } => false,
completed: items,
} }
} }
}
impl<'a> std::ops::Add<&'a Satisfaction> for Satisfaction { // add `inner` as one of self's partial items. this only makes sense on partials
type Output = Satisfaction; fn add(&mut self, inner: &Satisfaction, inner_index: usize) -> Result<(), PolicyError> {
match self {
fn add(self, other: &'a Satisfaction) -> Satisfaction { Satisfaction::None | Satisfaction::Complete { .. } => Err(PolicyError::AddOnLeaf),
&self + other Satisfaction::PartialComplete { .. } => Err(PolicyError::AddOnPartialComplete),
} Satisfaction::Partial {
} n,
ref mut conditions,
impl<'a, 'b> std::ops::Add<&'b Satisfaction> for &'a Satisfaction { ref mut items,
type Output = Satisfaction; ..
} => {
fn add(self, other: &'b Satisfaction) -> Satisfaction { if inner_index >= *n || items.contains(&inner_index) {
match (self, other) { return Err(PolicyError::IndexOutOfRange(inner_index));
// complete-complete
(
Satisfaction::Complete { condition: mut a },
Satisfaction::Complete { condition: b },
) => {
a.merge(&b).unwrap();
Satisfaction::Complete { condition: a }
}
// complete-<any>
(Satisfaction::Complete { condition }, _) => Satisfaction::Complete {
condition: *condition,
},
(_, Satisfaction::Complete { condition }) => Satisfaction::Complete {
condition: *condition,
},
// none-<any>
(Satisfaction::None, any) => any.clone(),
(any, Satisfaction::None) => any.clone(),
// partial-partial
(
Satisfaction::Partial {
m: _,
n: a_n,
completed: a_items,
},
Satisfaction::Partial {
m: _,
n: _,
completed: b_items,
},
) => {
let union: HashSet<_> = a_items.union(&b_items).cloned().collect();
Satisfaction::Partial {
m: union.len(),
n: *a_n,
completed: union,
} }
match inner {
// not relevant if not completed yet
Satisfaction::None | Satisfaction::Partial { .. } => return Ok(()),
Satisfaction::Complete { condition } => {
items.push(inner_index);
conditions.insert(inner_index, vec![*condition].into_iter().collect());
}
Satisfaction::PartialComplete {
conditions: other_conditions,
..
} => {
items.push(inner_index);
let conditions_set = other_conditions
.values()
.fold(HashSet::new(), |set, i| set.union(&i).cloned().collect());
conditions.insert(inner_index, conditions_set);
}
}
Ok(())
} }
} }
} }
fn finalize(&mut self) -> Result<(), PolicyError> {
// if partial try to bump it to a partialcomplete
if let Satisfaction::Partial {
n,
m,
items,
conditions,
} = self
{
if items.len() >= *m {
let mut map = BTreeMap::new();
let indexes = combinations(items, *m);
// `indexes` at this point is a Vec<Vec<usize>>, with the "n choose k" of items (m of n)
indexes
.into_iter()
// .inspect(|x| println!("--- orig --- {:?}", x))
// we map each of the combinations of elements into a tuple of ([choosen items], [conditions]). unfortunately, those items have potentially more than one
// condition (think about ORs), so we also use `mix` to expand those, i.e. [[0], [1, 2]] becomes [[0, 1], [0, 2]]. This is necessary to make sure that we
// consider every possibile options and check whether or not they are compatible.
.map(|i_vec| {
mix(i_vec
.iter()
.map(|i| {
conditions
.get(i)
.and_then(|set| Some(set.clone().into_iter().collect()))
.unwrap_or(vec![])
})
.collect())
.into_iter()
.map(|x| (i_vec.clone(), x))
.collect::<Vec<(Vec<usize>, Vec<Condition>)>>()
})
// .inspect(|x: &Vec<(Vec<usize>, Vec<Condition>)>| println!("fetch {:?}", x))
// since the previous step can turn one item of the iterator into multiple ones, we call flatten to expand them out
.flatten()
// .inspect(|x| println!("flat {:?}", x))
// try to fold all the conditions for this specific combination of indexes/options. if they are not compatibile, try_fold will be Err
.map(|(key, val)| {
(
key,
val.into_iter()
.try_fold(Condition::default(), |acc, v| acc.merge(&v)),
)
})
// .inspect(|x| println!("try_fold {:?}", x))
// filter out all the incompatible combinations
.filter(|(_, val)| val.is_ok())
// .inspect(|x| println!("filter {:?}", x))
// push them into the map
.for_each(|(key, val)| {
map.entry(key)
.or_insert_with(HashSet::new)
.insert(val.unwrap());
});
// TODO: if the map is empty, the conditions are not compatible, return an error?
*self = Satisfaction::PartialComplete {
n: *n,
m: *m,
items: items.clone(),
conditions: map,
};
}
}
Ok(())
}
}
impl From<bool> for Satisfaction {
fn from(other: bool) -> Self {
if other {
Satisfaction::Complete {
condition: Default::default(),
}
} else {
Satisfaction::None
}
}
} }
#[derive(Debug, Serialize)] #[derive(Debug, Clone, Serialize)]
pub struct Policy { pub struct Policy {
#[serde(flatten)] #[serde(flatten)]
item: SatisfiableItem, item: SatisfiableItem,
@ -186,39 +334,39 @@ pub struct Policy {
contribution: Satisfaction, contribution: Satisfaction,
} }
#[derive(Debug, Default, Eq, PartialEq, Clone, Copy, Serialize)] #[derive(Hash, Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Serialize)]
pub struct PathRequirements { pub struct Condition {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub csv: Option<u32>, pub csv: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub timelock: Option<u32>, pub timelock: Option<u32>,
} }
impl PathRequirements { impl Condition {
pub fn merge(&mut self, other: &Self) -> Result<(), PolicyError> { fn merge_timelock(a: u32, b: u32) -> Result<u32, PolicyError> {
if other.is_null() { const BLOCKS_TIMELOCK_THRESHOLD: u32 = 500000000;
return Ok(());
if (a < BLOCKS_TIMELOCK_THRESHOLD) != (b < BLOCKS_TIMELOCK_THRESHOLD) {
Err(PolicyError::MixedTimelockUnits)
} else {
Ok(max(a, b))
}
}
fn merge(mut self, other: &Condition) -> Result<Self, PolicyError> {
match (self.csv, other.csv) {
(Some(a), Some(b)) => self.csv = Some(Self::merge_timelock(a, b)?),
(None, any) => self.csv = any,
_ => {}
} }
match (self.csv, other.csv) {
(Some(old), Some(new)) if old != new => Err(PolicyError::DifferentCSV(old, new)),
_ => {
self.csv = self.csv.or(other.csv);
Ok(())
}
}?;
match (self.timelock, other.timelock) { match (self.timelock, other.timelock) {
// TODO: we could actually set the timelock to the highest of the two, but we would (Some(a), Some(b)) => self.timelock = Some(Self::merge_timelock(a, b)?),
// have to first check that they are both in the same "unit" (blocks vs time) (None, any) => self.timelock = any,
(Some(old), Some(new)) if old != new => Err(PolicyError::DifferentTimelock(old, new)), _ => {}
_ => { }
self.timelock = self.timelock.or(other.timelock);
Ok(())
}
}?;
Ok(()) Ok(self)
} }
pub fn is_null(&self) -> bool { pub fn is_null(&self) -> bool {
@ -230,9 +378,11 @@ impl PathRequirements {
pub enum PolicyError { pub enum PolicyError {
NotEnoughItemsSelected(usize), NotEnoughItemsSelected(usize),
TooManyItemsSelected(usize), TooManyItemsSelected(usize),
IndexOutOfRange(usize, usize), IndexOutOfRange(usize),
DifferentCSV(u32, u32), AddOnLeaf,
DifferentTimelock(u32, u32), AddOnPartialComplete,
MixedTimelockUnits,
IncompatibleConditions,
} }
impl Policy { impl Policy {
@ -244,71 +394,95 @@ impl Policy {
} }
} }
pub fn make_and(a: Option<Policy>, b: Option<Policy>) -> Option<Policy> { pub fn make_and(a: Option<Policy>, b: Option<Policy>) -> Result<Option<Policy>, PolicyError> {
match (a, b) { match (a, b) {
(None, None) => None, (None, None) => Ok(None),
(Some(x), None) | (None, Some(x)) => Some(x), (Some(x), None) | (None, Some(x)) => Ok(Some(x)),
(Some(a), Some(b)) => Self::make_thresh(vec![a, b], 2), (Some(a), Some(b)) => Self::make_thresh(vec![a, b], 2),
} }
} }
pub fn make_or(a: Option<Policy>, b: Option<Policy>) -> Option<Policy> { pub fn make_or(a: Option<Policy>, b: Option<Policy>) -> Result<Option<Policy>, PolicyError> {
match (a, b) { match (a, b) {
(None, None) => None, (None, None) => Ok(None),
(Some(x), None) | (None, Some(x)) => Some(x), (Some(x), None) | (None, Some(x)) => Ok(Some(x)),
(Some(a), Some(b)) => Self::make_thresh(vec![a, b], 1), (Some(a), Some(b)) => Self::make_thresh(vec![a, b], 1),
} }
} }
pub fn make_thresh(items: Vec<Policy>, threshold: usize) -> Option<Policy> { pub fn make_thresh(
items: Vec<Policy>,
threshold: usize,
) -> Result<Option<Policy>, PolicyError> {
if threshold == 0 { if threshold == 0 {
return None; return Ok(None);
} }
let contribution = items.iter().fold( let mut contribution = Satisfaction::Partial {
Satisfaction::Partial { n: items.len(),
m: 0, m: threshold,
n: threshold, items: vec![],
completed: HashSet::new(), conditions: Default::default(),
}, };
|acc, x| acc + &x.contribution, for (index, item) in items.iter().enumerate() {
); contribution.add(&item.contribution, index)?;
}
contribution.finalize()?;
let mut policy: Policy = SatisfiableItem::Thresh { items, threshold }.into(); let mut policy: Policy = SatisfiableItem::Thresh { items, threshold }.into();
policy.contribution = contribution; policy.contribution = contribution;
Some(policy) Ok(Some(policy))
} }
fn make_multisig(keys: Vec<Option<&Box<dyn Key>>>, threshold: usize) -> Option<Policy> { fn make_multisig(
keys: Vec<Option<&Box<dyn Key>>>,
threshold: usize,
) -> Result<Option<Policy>, PolicyError> {
if threshold == 0 {
return Ok(None);
}
let parsed_keys = keys.iter().map(|k| PKOrF::from_key(k.unwrap())).collect(); let parsed_keys = keys.iter().map(|k| PKOrF::from_key(k.unwrap())).collect();
let mut contribution = Satisfaction::Partial {
n: keys.len(),
m: threshold,
items: vec![],
conditions: Default::default(),
};
for (index, key) in keys.iter().enumerate() {
let val = if key.is_some() && key.unwrap().has_secret() {
Satisfaction::Complete {
condition: Default::default(),
}
} else {
Satisfaction::None
};
contribution.add(&val, index)?;
}
contribution.finalize()?;
let mut policy: Policy = SatisfiableItem::Multisig { let mut policy: Policy = SatisfiableItem::Multisig {
keys: parsed_keys, keys: parsed_keys,
threshold, threshold,
} }
.into(); .into();
let our_keys = keys policy.contribution = contribution;
.iter()
.enumerate()
.filter(|(_, x)| x.is_some() && x.unwrap().has_secret())
.map(|(k, _)| k)
.collect();
policy.contribution = Satisfaction::from_items_threshold(our_keys, threshold);
Some(policy) Ok(Some(policy))
} }
pub fn satisfy(&mut self, input: &psbt::Input) { pub fn satisfy(&mut self, _satisfier: &PSBTSatisfier, _desc_node: &Terminal<PublicKey>) {
self.satisfaction = self.item.satisfy(input); //self.satisfaction = self.item.satisfy(satisfier, desc_node);
//self.contribution += &self.satisfaction;
} }
pub fn requires_path(&self) -> bool { pub fn requires_path(&self) -> bool {
self.get_requirements(&vec![]).is_err() self.get_requirements(&vec![]).is_err()
} }
pub fn get_requirements( pub fn get_requirements(&self, path: &Vec<Vec<usize>>) -> Result<Condition, PolicyError> {
&self,
path: &Vec<Vec<usize>>,
) -> Result<PathRequirements, PolicyError> {
self.recursive_get_requirements(path, 0) self.recursive_get_requirements(path, 0)
} }
@ -316,7 +490,7 @@ impl Policy {
&self, &self,
path: &Vec<Vec<usize>>, path: &Vec<Vec<usize>>,
index: usize, index: usize,
) -> Result<PathRequirements, PolicyError> { ) -> Result<Condition, PolicyError> {
// if items.len() == threshold, selected can be omitted and we take all of them by default // if items.len() == threshold, selected can be omitted and we take all of them by default
let default = match &self.item { let default = match &self.item {
SatisfiableItem::Thresh { items, threshold } if items.len() == *threshold => { SatisfiableItem::Thresh { items, threshold } if items.len() == *threshold => {
@ -339,8 +513,8 @@ impl Policy {
// if all the requirements are null we don't care about `selected` because there // if all the requirements are null we don't care about `selected` because there
// are no requirements // are no requirements
if mapped_req.iter().all(PathRequirements::is_null) { if mapped_req.iter().all(Condition::is_null) {
return Ok(PathRequirements::default()); return Ok(Condition::default());
} }
// if we have something, make sure we have enough items. note that the user can set // if we have something, make sure we have enough items. note that the user can set
@ -351,27 +525,27 @@ impl Policy {
} }
// check the selected items, see if there are conflicting requirements // check the selected items, see if there are conflicting requirements
let mut requirements = PathRequirements::default(); let mut requirements = Condition::default();
for item_index in selected { for item_index in selected {
requirements.merge( requirements = requirements.merge(
mapped_req mapped_req
.get(*item_index) .get(*item_index)
.ok_or(PolicyError::IndexOutOfRange(*item_index, index))?, .ok_or(PolicyError::IndexOutOfRange(*item_index))?,
)?; )?;
} }
Ok(requirements) Ok(requirements)
} }
_ if !selected.is_empty() => Err(PolicyError::TooManyItemsSelected(index)), _ if !selected.is_empty() => Err(PolicyError::TooManyItemsSelected(index)),
SatisfiableItem::AbsoluteTimelock { value } => Ok(PathRequirements { SatisfiableItem::AbsoluteTimelock { value } => Ok(Condition {
csv: None, csv: None,
timelock: Some(*value), timelock: Some(*value),
}), }),
SatisfiableItem::RelativeTimelock { value } => Ok(PathRequirements { SatisfiableItem::RelativeTimelock { value } => Ok(Condition {
csv: Some(*value), csv: Some(*value),
timelock: None, timelock: None,
}), }),
_ => Ok(PathRequirements::default()), _ => Ok(Condition::default()),
} }
} }
} }
@ -403,15 +577,15 @@ fn signature_key_from_string(key: Option<&Box<dyn Key>>) -> Option<Policy> {
key.map(|k| { key.map(|k| {
let pubkey = k.as_public_key(&secp, None).unwrap(); let pubkey = k.as_public_key(&secp, None).unwrap();
let mut policy: Policy = if let Some(fing) = k.fingerprint(&secp) { let mut policy: Policy = if let Some(fing) = k.fingerprint(&secp) {
SatisfiableItem::SignatureKey { SatisfiableItem::SignatureKey(PKOrF {
fingerprint: Some(fing), fingerprint: Some(fing),
pubkey_hash: None, ..Default::default()
} })
} else { } else {
SatisfiableItem::SignatureKey { SatisfiableItem::SignatureKey(PKOrF {
fingerprint: None,
pubkey_hash: Some(hash160::Hash::hash(&pubkey.to_bytes())), pubkey_hash: Some(hash160::Hash::hash(&pubkey.to_bytes())),
} ..Default::default()
})
} }
.into(); .into();
policy.contribution = if k.has_secret() { policy.contribution = if k.has_secret() {
@ -427,8 +601,11 @@ fn signature_key_from_string(key: Option<&Box<dyn Key>>) -> Option<Policy> {
} }
impl MiniscriptExtractPolicy for Miniscript<String> { impl MiniscriptExtractPolicy for Miniscript<String> {
fn extract_policy(&self, lookup_map: &BTreeMap<String, Box<dyn Key>>) -> Option<Policy> { fn extract_policy(
match &self.node { &self,
lookup_map: &BTreeMap<String, Box<dyn Key>>,
) -> Result<Option<Policy>, Error> {
Ok(match &self.node {
// Leaves // Leaves
Terminal::True | Terminal::False => None, Terminal::True | Terminal::False => None,
Terminal::Pk(pubkey) => signature_from_string(lookup_map.get(pubkey)), Terminal::Pk(pubkey) => signature_from_string(lookup_map.get(pubkey)),
@ -436,9 +613,9 @@ impl MiniscriptExtractPolicy for Miniscript<String> {
Terminal::After(value) => { Terminal::After(value) => {
let mut policy: Policy = SatisfiableItem::AbsoluteTimelock { value: *value }.into(); let mut policy: Policy = SatisfiableItem::AbsoluteTimelock { value: *value }.into();
policy.contribution = Satisfaction::Complete { policy.contribution = Satisfaction::Complete {
condition: PathRequirements { condition: Condition {
csv: None,
timelock: Some(*value), timelock: Some(*value),
csv: None,
}, },
}; };
@ -447,9 +624,9 @@ impl MiniscriptExtractPolicy for Miniscript<String> {
Terminal::Older(value) => { Terminal::Older(value) => {
let mut policy: Policy = SatisfiableItem::RelativeTimelock { value: *value }.into(); let mut policy: Policy = SatisfiableItem::RelativeTimelock { value: *value }.into();
policy.contribution = Satisfaction::Complete { policy.contribution = Satisfaction::Complete {
condition: PathRequirements { condition: Condition {
csv: Some(*value),
timelock: None, timelock: None,
csv: Some(*value),
}, },
}; };
@ -466,7 +643,7 @@ impl MiniscriptExtractPolicy for Miniscript<String> {
Some(SatisfiableItem::HASH160Preimage { hash: *hash }.into()) Some(SatisfiableItem::HASH160Preimage { hash: *hash }.into())
} }
Terminal::ThreshM(k, pks) => { Terminal::ThreshM(k, pks) => {
Policy::make_multisig(pks.iter().map(|s| lookup_map.get(s)).collect(), *k) Policy::make_multisig(pks.iter().map(|s| lookup_map.get(s)).collect(), *k)?
} }
// Identities // Identities
Terminal::Alt(inner) Terminal::Alt(inner)
@ -475,52 +652,58 @@ impl MiniscriptExtractPolicy for Miniscript<String> {
| Terminal::DupIf(inner) | Terminal::DupIf(inner)
| Terminal::Verify(inner) | Terminal::Verify(inner)
| Terminal::NonZero(inner) | Terminal::NonZero(inner)
| Terminal::ZeroNotEqual(inner) => inner.extract_policy(lookup_map), | Terminal::ZeroNotEqual(inner) => inner.extract_policy(lookup_map)?,
// Complex policies // Complex policies
Terminal::AndV(a, b) | Terminal::AndB(a, b) => { Terminal::AndV(a, b) | Terminal::AndB(a, b) => {
Policy::make_and(a.extract_policy(lookup_map), b.extract_policy(lookup_map)) Policy::make_and(a.extract_policy(lookup_map)?, b.extract_policy(lookup_map)?)?
} }
Terminal::AndOr(x, y, z) => Policy::make_or( Terminal::AndOr(x, y, z) => Policy::make_or(
Policy::make_and(x.extract_policy(lookup_map), y.extract_policy(lookup_map)), Policy::make_and(x.extract_policy(lookup_map)?, y.extract_policy(lookup_map)?)?,
z.extract_policy(lookup_map), z.extract_policy(lookup_map)?,
), )?,
Terminal::OrB(a, b) Terminal::OrB(a, b)
| Terminal::OrD(a, b) | Terminal::OrD(a, b)
| Terminal::OrC(a, b) | Terminal::OrC(a, b)
| Terminal::OrI(a, b) => { | Terminal::OrI(a, b) => {
Policy::make_or(a.extract_policy(lookup_map), b.extract_policy(lookup_map)) Policy::make_or(a.extract_policy(lookup_map)?, b.extract_policy(lookup_map)?)?
} }
Terminal::Thresh(k, nodes) => { Terminal::Thresh(k, nodes) => {
let mut threshold = *k; let mut threshold = *k;
let mapped: Vec<_> = nodes let mapped: Vec<_> = nodes
.iter() .iter()
.filter_map(|n| n.extract_policy(lookup_map)) .map(|n| n.extract_policy(lookup_map))
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.filter_map(|x| x)
.collect(); .collect();
if mapped.len() < nodes.len() { if mapped.len() < nodes.len() {
threshold = match threshold.checked_sub(nodes.len() - mapped.len()) { threshold = match threshold.checked_sub(nodes.len() - mapped.len()) {
None => return None, None => return Ok(None),
Some(x) => x, Some(x) => x,
}; };
} }
Policy::make_thresh(mapped, threshold) Policy::make_thresh(mapped, threshold)?
} }
} })
} }
} }
impl MiniscriptExtractPolicy for Descriptor<String> { impl MiniscriptExtractPolicy for Descriptor<String> {
fn extract_policy(&self, lookup_map: &BTreeMap<String, Box<dyn Key>>) -> Option<Policy> { fn extract_policy(
&self,
lookup_map: &BTreeMap<String, Box<dyn Key>>,
) -> Result<Option<Policy>, Error> {
match self { match self {
Descriptor::Pk(pubkey) Descriptor::Pk(pubkey)
| Descriptor::Pkh(pubkey) | Descriptor::Pkh(pubkey)
| Descriptor::Wpkh(pubkey) | Descriptor::Wpkh(pubkey)
| Descriptor::ShWpkh(pubkey) => signature_from_string(lookup_map.get(pubkey)), | Descriptor::ShWpkh(pubkey) => Ok(signature_from_string(lookup_map.get(pubkey))),
Descriptor::Bare(inner) Descriptor::Bare(inner)
| Descriptor::Sh(inner) | Descriptor::Sh(inner)
| Descriptor::Wsh(inner) | Descriptor::Wsh(inner)
| Descriptor::ShWsh(inner) => inner.extract_policy(lookup_map), | Descriptor::ShWsh(inner) => Ok(inner.extract_policy(lookup_map)?),
} }
} }
} }

View File

@ -14,6 +14,7 @@ pub enum Error {
DifferentTransactions, DifferentTransactions,
ChecksumMismatch, ChecksumMismatch,
DifferentDescriptorStructure,
SpendingPolicyRequired, SpendingPolicyRequired,
InvalidPolicyPathError(crate::descriptor::policy::PolicyError), InvalidPolicyPathError(crate::descriptor::policy::PolicyError),

View File

@ -9,7 +9,6 @@ use std::time::{Instant, SystemTime, UNIX_EPOCH};
use bitcoin::blockdata::opcodes; use bitcoin::blockdata::opcodes;
use bitcoin::blockdata::script::Builder; use bitcoin::blockdata::script::Builder;
use bitcoin::consensus::encode::serialize; use bitcoin::consensus::encode::serialize;
use bitcoin::secp256k1::{All, Secp256k1};
use bitcoin::util::bip32::{ChildNumber, DerivationPath}; use bitcoin::util::bip32::{ChildNumber, DerivationPath};
use bitcoin::util::psbt::PartiallySignedTransaction as PSBT; use bitcoin::util::psbt::PartiallySignedTransaction as PSBT;
use bitcoin::{ use bitcoin::{
@ -45,8 +44,7 @@ pub struct Wallet<S: Read + Write, D: BatchDatabase> {
network: Network, network: Network,
client: Option<RefCell<Client<S>>>, client: Option<RefCell<Client<S>>>,
database: RefCell<D>, // TODO: save descriptor checksum and check when loading database: RefCell<D>,
_secp: Secp256k1<All>,
} }
// offline actions, always available // offline actions, always available
@ -72,13 +70,17 @@ where
ScriptType::Internal, ScriptType::Internal,
get_checksum(desc)?.as_bytes(), get_checksum(desc)?.as_bytes(),
)?; )?;
Some(ExtendedDescriptor::from_str(desc)?)
let parsed = ExtendedDescriptor::from_str(desc)?;
if !parsed.same_structure(descriptor.as_ref()) {
return Err(Error::DifferentDescriptorStructure);
}
Some(parsed)
} }
None => None, None => None,
}; };
// TODO: make sure that both descriptor have the same structure
Ok(Wallet { Ok(Wallet {
descriptor, descriptor,
change_descriptor, change_descriptor,
@ -86,7 +88,6 @@ where
client: None, client: None,
database: RefCell::new(database), database: RefCell::new(database),
_secp: Secp256k1::gen_new(),
}) })
} }
@ -132,13 +133,11 @@ where
utxos: Option<Vec<OutPoint>>, utxos: Option<Vec<OutPoint>>,
unspendable: Option<Vec<OutPoint>>, unspendable: Option<Vec<OutPoint>>,
) -> Result<(PSBT, TransactionDetails), Error> { ) -> Result<(PSBT, TransactionDetails), Error> {
let policy = self.descriptor.extract_policy().unwrap(); let policy = self.descriptor.extract_policy()?.unwrap();
if policy.requires_path() && policy_path.is_none() { if policy.requires_path() && policy_path.is_none() {
return Err(Error::SpendingPolicyRequired); return Err(Error::SpendingPolicyRequired);
} }
let requirements = policy_path.map_or(Ok(Default::default()), |path| { let requirements = policy.get_requirements(&policy_path.unwrap_or(vec![]))?;
policy.get_requirements(&path)
})?;
debug!("requirements: {:?}", requirements); debug!("requirements: {:?}", requirements);
let mut tx = Transaction { let mut tx = Transaction {
@ -197,9 +196,14 @@ where
input_witness_weight, input_witness_weight,
fee_val, fee_val,
)?; )?;
inputs let n_sequence = if let Some(csv) = requirements.csv {
.iter_mut() csv
.for_each(|i| i.sequence = requirements.csv.unwrap_or(0xFFFFFFFF)); } else if requirements.timelock.is_some() {
0xFFFFFFFE
} else {
0xFFFFFFFF
};
inputs.iter_mut().for_each(|i| i.sequence = n_sequence);
tx.input.append(&mut inputs); tx.input.append(&mut inputs);
// prepare the change output // prepare the change output
@ -300,9 +304,8 @@ where
Ok((psbt, transaction_details)) Ok((psbt, transaction_details))
} }
// TODO: define an enum for signing errors // TODO: move down to the "internals"
pub fn sign(&self, mut psbt: PSBT) -> Result<(PSBT, bool), Error> { fn add_hd_keypaths(&self, psbt: &mut PSBT) -> Result<(), Error> {
let tx = &psbt.global.unsigned_tx;
let mut input_utxos = Vec::with_capacity(psbt.inputs.len()); let mut input_utxos = Vec::with_capacity(psbt.inputs.len());
for n in 0..psbt.inputs.len() { for n in 0..psbt.inputs.len() {
input_utxos.push(psbt.get_utxo_for(n).clone()); input_utxos.push(psbt.get_utxo_for(n).clone());
@ -339,6 +342,16 @@ where
} }
} }
Ok(())
}
// TODO: define an enum for signing errors
pub fn sign(&self, mut psbt: PSBT) -> Result<(PSBT, bool), Error> {
// this helps us doing our job later
self.add_hd_keypaths(&mut psbt)?;
let tx = &psbt.global.unsigned_tx;
let mut signer = PSBTSigner::from_descriptor(&psbt.global.unsigned_tx, &self.descriptor)?; let mut signer = PSBTSigner::from_descriptor(&psbt.global.unsigned_tx, &self.descriptor)?;
if let Some(desc) = &self.change_descriptor { if let Some(desc) = &self.change_descriptor {
let change_signer = PSBTSigner::from_descriptor(&psbt.global.unsigned_tx, desc)?; let change_signer = PSBTSigner::from_descriptor(&psbt.global.unsigned_tx, desc)?;
@ -480,9 +493,9 @@ where
pub fn policies(&self, script_type: ScriptType) -> Result<Option<Policy>, Error> { pub fn policies(&self, script_type: ScriptType) -> Result<Option<Policy>, Error> {
match (script_type, self.change_descriptor.as_ref()) { match (script_type, self.change_descriptor.as_ref()) {
(ScriptType::External, _) => Ok(self.descriptor.extract_policy()), (ScriptType::External, _) => Ok(self.descriptor.extract_policy()?),
(ScriptType::Internal, None) => Ok(None), (ScriptType::Internal, None) => Ok(None),
(ScriptType::Internal, Some(desc)) => Ok(desc.extract_policy()), (ScriptType::Internal, Some(desc)) => Ok(desc.extract_policy()?),
} }
} }
@ -688,13 +701,17 @@ where
ScriptType::Internal, ScriptType::Internal,
get_checksum(desc)?.as_bytes(), get_checksum(desc)?.as_bytes(),
)?; )?;
Some(ExtendedDescriptor::from_str(desc)?)
let parsed = ExtendedDescriptor::from_str(desc)?;
if !parsed.same_structure(descriptor.as_ref()) {
return Err(Error::DifferentDescriptorStructure);
}
Some(parsed)
} }
None => None, None => None,
}; };
// TODO: make sure that both descriptor have the same structure
Ok(Wallet { Ok(Wallet {
descriptor, descriptor,
change_descriptor, change_descriptor,
@ -702,7 +719,6 @@ where
client: Some(RefCell::new(client)), client: Some(RefCell::new(client)),
database: RefCell::new(database), database: RefCell::new(database),
_secp: Secp256k1::gen_new(),
}) })
} }
@ -944,7 +960,7 @@ where
.as_ref() .as_ref()
.unwrap() .unwrap()
.borrow_mut() .borrow_mut()
.batch_script_get_history(chunk.iter().collect::<Vec<_>>())?; // TODO: fix electrum client .batch_script_get_history(chunk.iter())?;
for (script, history) in chunk.into_iter().zip(call_result.into_iter()) { for (script, history) in chunk.into_iter().zip(call_result.into_iter()) {
trace!("received history for {:?}, size {}", script, history.len()); trace!("received history for {:?}, size {}", script, history.len());