Merge commit 'refs/pull/285/head' of github.com:bitcoindevkit/bdk
This commit is contained in:
commit
1cbd47b988
@ -97,7 +97,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
|||||||
|
|
||||||
let network = matches
|
let network = matches
|
||||||
.value_of("network")
|
.value_of("network")
|
||||||
.and_then(|n| Some(Network::from_str(n)))
|
.map(|n| Network::from_str(n))
|
||||||
.transpose()
|
.transpose()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap_or(Network::Testnet);
|
.unwrap_or(Network::Testnet);
|
||||||
|
31
scripts/cargo-check.sh
Executable file
31
scripts/cargo-check.sh
Executable file
@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Run various invocations of cargo check
|
||||||
|
|
||||||
|
features=( "default" "compiler" "electrum" "esplora" "compact_filters" "key-value-db" "async-interface" "all-keys" "keys-bip39" )
|
||||||
|
toolchains=( "+stable" "+1.45" "+nightly" )
|
||||||
|
|
||||||
|
main() {
|
||||||
|
check_src
|
||||||
|
check_all_targets
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check with all features, with various toolchains.
|
||||||
|
check_src() {
|
||||||
|
for toolchain in "${toolchains[@]}"; do
|
||||||
|
cmd="cargo $toolchain clippy --all-targets --no-default-features"
|
||||||
|
|
||||||
|
for feature in "${features[@]}"; do
|
||||||
|
touch_files
|
||||||
|
$cmd --features "$feature"
|
||||||
|
done
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Touch files to prevent cached warnings from not showing up.
|
||||||
|
touch_files() {
|
||||||
|
touch $(find . -name *.rs)
|
||||||
|
}
|
||||||
|
|
||||||
|
main
|
||||||
|
exit 0
|
@ -239,6 +239,7 @@ impl Blockchain for CompactFiltersBlockchain {
|
|||||||
vec![Capability::FullHistory].into_iter().collect()
|
vec![Capability::FullHistory].into_iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mutex_atomic)] // Mutex is easier to understand than a CAS loop.
|
||||||
fn setup<D: BatchDatabase, P: 'static + Progress>(
|
fn setup<D: BatchDatabase, P: 'static + Progress>(
|
||||||
&self,
|
&self,
|
||||||
_stop_gap: Option<usize>, // TODO: move to electrum and esplora only
|
_stop_gap: Option<usize>, // TODO: move to electrum and esplora only
|
||||||
|
@ -46,6 +46,8 @@ use bitcoin::BlockHash;
|
|||||||
use bitcoin::BlockHeader;
|
use bitcoin::BlockHeader;
|
||||||
use bitcoin::Network;
|
use bitcoin::Network;
|
||||||
|
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
use super::CompactFiltersError;
|
use super::CompactFiltersError;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
@ -119,7 +121,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize(data: &[u8]) -> Result<Self, CompactFiltersError> {
|
fn deserialize(data: &[u8]) -> Result<Self, CompactFiltersError> {
|
||||||
Ok(deserialize(data).map_err(|_| CompactFiltersError::DataCorruption)?)
|
deserialize(data).map_err(|_| CompactFiltersError::DataCorruption)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -436,15 +438,14 @@ impl ChainStore<Full> {
|
|||||||
|
|
||||||
let key = StoreEntry::BlockHeaderIndex(Some(*block_hash)).get_key();
|
let key = StoreEntry::BlockHeaderIndex(Some(*block_hash)).get_key();
|
||||||
let data = read_store.get_pinned_cf(cf_handle, key)?;
|
let data = read_store.get_pinned_cf(cf_handle, key)?;
|
||||||
Ok(data
|
data.map(|data| {
|
||||||
.map(|data| {
|
Ok::<_, CompactFiltersError>(usize::from_be_bytes(
|
||||||
Ok::<_, CompactFiltersError>(usize::from_be_bytes(
|
data.as_ref()
|
||||||
data.as_ref()
|
.try_into()
|
||||||
.try_into()
|
.map_err(|_| CompactFiltersError::DataCorruption)?,
|
||||||
.map_err(|_| CompactFiltersError::DataCorruption)?,
|
))
|
||||||
))
|
})
|
||||||
})
|
.transpose()
|
||||||
.transpose()?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_block_hash(&self, height: usize) -> Result<Option<BlockHash>, CompactFiltersError> {
|
pub fn get_block_hash(&self, height: usize) -> Result<Option<BlockHash>, CompactFiltersError> {
|
||||||
@ -453,13 +454,12 @@ impl ChainStore<Full> {
|
|||||||
|
|
||||||
let key = StoreEntry::BlockHeader(Some(height)).get_key();
|
let key = StoreEntry::BlockHeader(Some(height)).get_key();
|
||||||
let data = read_store.get_pinned_cf(cf_handle, key)?;
|
let data = read_store.get_pinned_cf(cf_handle, key)?;
|
||||||
Ok(data
|
data.map(|data| {
|
||||||
.map(|data| {
|
let (header, _): (BlockHeader, Uint256) =
|
||||||
let (header, _): (BlockHeader, Uint256) =
|
deserialize(&data).map_err(|_| CompactFiltersError::DataCorruption)?;
|
||||||
deserialize(&data).map_err(|_| CompactFiltersError::DataCorruption)?;
|
Ok::<_, CompactFiltersError>(header.block_hash())
|
||||||
Ok::<_, CompactFiltersError>(header.block_hash())
|
})
|
||||||
})
|
.transpose()
|
||||||
.transpose()?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_full_block(&self, block: &Block, height: usize) -> Result<(), CompactFiltersError> {
|
pub fn save_full_block(&self, block: &Block, height: usize) -> Result<(), CompactFiltersError> {
|
||||||
@ -475,10 +475,10 @@ impl ChainStore<Full> {
|
|||||||
let key = StoreEntry::Block(Some(height)).get_key();
|
let key = StoreEntry::Block(Some(height)).get_key();
|
||||||
let opt_block = read_store.get_pinned(key)?;
|
let opt_block = read_store.get_pinned(key)?;
|
||||||
|
|
||||||
Ok(opt_block
|
opt_block
|
||||||
.map(|data| deserialize(&data))
|
.map(|data| deserialize(&data))
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(|_| CompactFiltersError::DataCorruption)?)
|
.map_err(|_| CompactFiltersError::DataCorruption)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_blocks_until(&self, height: usize) -> Result<(), CompactFiltersError> {
|
pub fn delete_blocks_until(&self, height: usize) -> Result<(), CompactFiltersError> {
|
||||||
@ -565,14 +565,14 @@ impl<T: StoreType> ChainStore<T> {
|
|||||||
let prefix = StoreEntry::BlockHeader(None).get_key();
|
let prefix = StoreEntry::BlockHeader(None).get_key();
|
||||||
let iterator = read_store.prefix_iterator_cf(cf_handle, prefix);
|
let iterator = read_store.prefix_iterator_cf(cf_handle, prefix);
|
||||||
|
|
||||||
Ok(iterator
|
iterator
|
||||||
.last()
|
.last()
|
||||||
.map(|(_, v)| -> Result<_, CompactFiltersError> {
|
.map(|(_, v)| -> Result<_, CompactFiltersError> {
|
||||||
let (header, _): (BlockHeader, Uint256) = SerializeDb::deserialize(&v)?;
|
let (header, _): (BlockHeader, Uint256) = SerializeDb::deserialize(&v)?;
|
||||||
|
|
||||||
Ok(header.block_hash())
|
Ok(header.block_hash())
|
||||||
})
|
})
|
||||||
.transpose()?)
|
.transpose()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn apply(
|
pub fn apply(
|
||||||
@ -716,11 +716,11 @@ impl CFStore {
|
|||||||
|
|
||||||
// FIXME: we have to filter manually because rocksdb sometimes returns stuff that doesn't
|
// FIXME: we have to filter manually because rocksdb sometimes returns stuff that doesn't
|
||||||
// have the right prefix
|
// have the right prefix
|
||||||
Ok(iterator
|
iterator
|
||||||
.filter(|(k, _)| k.starts_with(&prefix))
|
.filter(|(k, _)| k.starts_with(&prefix))
|
||||||
.skip(1)
|
.skip(1)
|
||||||
.map(|(_, data)| Ok::<_, CompactFiltersError>(BundleEntry::deserialize(&data)?.1))
|
.map(|(_, data)| Ok::<_, CompactFiltersError>(BundleEntry::deserialize(&data)?.1))
|
||||||
.collect::<Result<_, _>>()?)
|
.collect::<Result<_, _>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn replace_checkpoints(
|
pub fn replace_checkpoints(
|
||||||
|
@ -312,24 +312,17 @@ impl BatchDatabase for AnyDatabase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn commit_batch(&mut self, batch: Self::Batch) -> Result<(), Error> {
|
fn commit_batch(&mut self, batch: Self::Batch) -> Result<(), Error> {
|
||||||
// TODO: refactor once `move_ref_pattern` is stable
|
|
||||||
#[allow(irrefutable_let_patterns)]
|
|
||||||
match self {
|
match self {
|
||||||
AnyDatabase::Memory(db) => {
|
AnyDatabase::Memory(db) => match batch {
|
||||||
if let AnyBatch::Memory(batch) = batch {
|
AnyBatch::Memory(batch) => db.commit_batch(batch),
|
||||||
db.commit_batch(batch)
|
#[cfg(feature = "key-value-db")]
|
||||||
} else {
|
_ => unimplemented!("Sled batch shouldn't be used with Memory db."),
|
||||||
unimplemented!()
|
},
|
||||||
}
|
|
||||||
}
|
|
||||||
#[cfg(feature = "key-value-db")]
|
#[cfg(feature = "key-value-db")]
|
||||||
AnyDatabase::Sled(db) => {
|
AnyDatabase::Sled(db) => match batch {
|
||||||
if let AnyBatch::Sled(batch) = batch {
|
AnyBatch::Sled(batch) => db.commit_batch(batch),
|
||||||
db.commit_batch(batch)
|
_ => unimplemented!("Memory batch shouldn't be used with Sled db."),
|
||||||
} else {
|
},
|
||||||
unimplemented!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -227,7 +227,7 @@ pub mod test {
|
|||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tree.get_path_from_script_pubkey(&script).unwrap(),
|
tree.get_path_from_script_pubkey(&script).unwrap(),
|
||||||
Some((keychain, path.clone()))
|
Some((keychain, path))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -256,7 +256,7 @@ pub mod test {
|
|||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tree.get_path_from_script_pubkey(&script).unwrap(),
|
tree.get_path_from_script_pubkey(&script).unwrap(),
|
||||||
Some((keychain, path.clone()))
|
Some((keychain, path))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,10 +228,11 @@ macro_rules! impl_sortedmulti {
|
|||||||
use $crate::keys::IntoDescriptorKey;
|
use $crate::keys::IntoDescriptorKey;
|
||||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||||
|
|
||||||
let mut keys = vec![];
|
let keys = vec![
|
||||||
$(
|
$(
|
||||||
keys.push($key.into_descriptor_key());
|
$key.into_descriptor_key(),
|
||||||
)*
|
)*
|
||||||
|
];
|
||||||
|
|
||||||
keys.into_iter().collect::<Result<Vec<_>, _>>()
|
keys.into_iter().collect::<Result<Vec<_>, _>>()
|
||||||
.map_err($crate::descriptor::DescriptorError::Key)
|
.map_err($crate::descriptor::DescriptorError::Key)
|
||||||
@ -656,10 +657,11 @@ macro_rules! fragment {
|
|||||||
use $crate::keys::IntoDescriptorKey;
|
use $crate::keys::IntoDescriptorKey;
|
||||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||||
|
|
||||||
let mut keys = vec![];
|
let keys = vec![
|
||||||
$(
|
$(
|
||||||
keys.push($key.into_descriptor_key());
|
$key.into_descriptor_key(),
|
||||||
)*
|
)*
|
||||||
|
];
|
||||||
|
|
||||||
keys.into_iter().collect::<Result<Vec<_>, _>>()
|
keys.into_iter().collect::<Result<Vec<_>, _>>()
|
||||||
.map_err($crate::descriptor::DescriptorError::Key)
|
.map_err($crate::descriptor::DescriptorError::Key)
|
||||||
@ -968,7 +970,7 @@ mod test {
|
|||||||
fn test_valid_networks() {
|
fn test_valid_networks() {
|
||||||
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||||
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
||||||
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
|
let desc_key = (xprv, path).into_descriptor_key().unwrap();
|
||||||
|
|
||||||
let (_desc, _key_map, valid_networks) = descriptor!(pkh(desc_key)).unwrap();
|
let (_desc, _key_map, valid_networks) = descriptor!(pkh(desc_key)).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -978,7 +980,7 @@ mod test {
|
|||||||
|
|
||||||
let xprv = bip32::ExtendedPrivKey::from_str("xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi").unwrap();
|
let xprv = bip32::ExtendedPrivKey::from_str("xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi").unwrap();
|
||||||
let path = bip32::DerivationPath::from_str("m/10/20/30/40").unwrap();
|
let path = bip32::DerivationPath::from_str("m/10/20/30/40").unwrap();
|
||||||
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
|
let desc_key = (xprv, path).into_descriptor_key().unwrap();
|
||||||
|
|
||||||
let (_desc, _key_map, valid_networks) = descriptor!(wpkh(desc_key)).unwrap();
|
let (_desc, _key_map, valid_networks) = descriptor!(wpkh(desc_key)).unwrap();
|
||||||
assert_eq!(valid_networks, [Bitcoin].iter().cloned().collect());
|
assert_eq!(valid_networks, [Bitcoin].iter().cloned().collect());
|
||||||
@ -1005,12 +1007,9 @@ mod test {
|
|||||||
descriptor!(sh(wsh(multi(2, desc_key1, desc_key2, desc_key3)))).unwrap();
|
descriptor!(sh(wsh(multi(2, desc_key1, desc_key2, desc_key3)))).unwrap();
|
||||||
assert_eq!(key_map.len(), 3);
|
assert_eq!(key_map.len(), 3);
|
||||||
|
|
||||||
let desc_key1: DescriptorKey<Segwitv0> =
|
let desc_key1: DescriptorKey<Segwitv0> = (xprv1, path1).into_descriptor_key().unwrap();
|
||||||
(xprv1, path1.clone()).into_descriptor_key().unwrap();
|
let desc_key2: DescriptorKey<Segwitv0> = (xprv2, path2).into_descriptor_key().unwrap();
|
||||||
let desc_key2: DescriptorKey<Segwitv0> =
|
let desc_key3: DescriptorKey<Segwitv0> = (xprv3, path3).into_descriptor_key().unwrap();
|
||||||
(xprv2, path2.clone()).into_descriptor_key().unwrap();
|
|
||||||
let desc_key3: DescriptorKey<Segwitv0> =
|
|
||||||
(xprv3, path3.clone()).into_descriptor_key().unwrap();
|
|
||||||
|
|
||||||
let (key1, _key_map, _valid_networks) = desc_key1.extract(&secp).unwrap();
|
let (key1, _key_map, _valid_networks) = desc_key1.extract(&secp).unwrap();
|
||||||
let (key2, _key_map, _valid_networks) = desc_key2.extract(&secp).unwrap();
|
let (key2, _key_map, _valid_networks) = desc_key2.extract(&secp).unwrap();
|
||||||
@ -1026,7 +1025,7 @@ mod test {
|
|||||||
// this compiles
|
// this compiles
|
||||||
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||||
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
let path = bip32::DerivationPath::from_str("m/0").unwrap();
|
||||||
let desc_key: DescriptorKey<Legacy> = (xprv, path.clone()).into_descriptor_key().unwrap();
|
let desc_key: DescriptorKey<Legacy> = (xprv, path).into_descriptor_key().unwrap();
|
||||||
|
|
||||||
let (desc, _key_map, _valid_networks) = descriptor!(pkh(desc_key)).unwrap();
|
let (desc, _key_map, _valid_networks) = descriptor!(pkh(desc_key)).unwrap();
|
||||||
assert_eq!(desc.to_string(), "pkh(tpubD6NzVbkrYhZ4WR7a4vY1VT3khMJMeAxVsfq9TBJyJWrNk247zCJtV7AWf6UJP7rAVsn8NNKdJi3gFyKPTmWZS9iukb91xbn2HbFSMQm2igY/0/*)#yrnz9pp2");
|
assert_eq!(desc.to_string(), "pkh(tpubD6NzVbkrYhZ4WR7a4vY1VT3khMJMeAxVsfq9TBJyJWrNk247zCJtV7AWf6UJP7rAVsn8NNKdJi3gFyKPTmWZS9iukb91xbn2HbFSMQm2igY/0/*)#yrnz9pp2");
|
||||||
|
@ -938,7 +938,7 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
|
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
|
||||||
);
|
);
|
||||||
assert!(matches!(&policy.contribution, Satisfaction::None));
|
assert!(matches!(&policy.contribution, Satisfaction::None));
|
||||||
|
|
||||||
@ -953,7 +953,7 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
|
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
|
||||||
);
|
);
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.contribution, Satisfaction::Complete {condition} if condition.csv == None && condition.timelock == None)
|
matches!(&policy.contribution, Satisfaction::Complete {condition} if condition.csv == None && condition.timelock == None)
|
||||||
@ -1039,8 +1039,8 @@ mod test {
|
|||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.item, Multisig { keys, threshold } if threshold == &1
|
matches!(&policy.item, Multisig { keys, threshold } if threshold == &1
|
||||||
&& &keys[0].fingerprint.unwrap() == &fingerprint0
|
&& keys[0].fingerprint.unwrap() == fingerprint0
|
||||||
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
|
&& keys[1].fingerprint.unwrap() == fingerprint1)
|
||||||
);
|
);
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.contribution, Satisfaction::PartialComplete { n, m, items, conditions, .. } if n == &2
|
matches!(&policy.contribution, Satisfaction::PartialComplete { n, m, items, conditions, .. } if n == &2
|
||||||
@ -1071,8 +1071,8 @@ mod test {
|
|||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.item, Multisig { keys, threshold } if threshold == &2
|
matches!(&policy.item, Multisig { keys, threshold } if threshold == &2
|
||||||
&& &keys[0].fingerprint.unwrap() == &fingerprint0
|
&& keys[0].fingerprint.unwrap() == fingerprint0
|
||||||
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
|
&& keys[1].fingerprint.unwrap() == fingerprint1)
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
@ -1103,7 +1103,7 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
|
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
|
||||||
);
|
);
|
||||||
assert!(matches!(&policy.contribution, Satisfaction::None));
|
assert!(matches!(&policy.contribution, Satisfaction::None));
|
||||||
|
|
||||||
@ -1119,7 +1119,7 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
|
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
|
||||||
);
|
);
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.contribution, Satisfaction::Complete {condition} if condition.csv == None && condition.timelock == None)
|
matches!(&policy.contribution, Satisfaction::Complete {condition} if condition.csv == None && condition.timelock == None)
|
||||||
@ -1147,8 +1147,8 @@ mod test {
|
|||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.item, Multisig { keys, threshold } if threshold == &1
|
matches!(&policy.item, Multisig { keys, threshold } if threshold == &1
|
||||||
&& &keys[0].fingerprint.unwrap() == &fingerprint0
|
&& keys[0].fingerprint.unwrap() == fingerprint0
|
||||||
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
|
&& keys[1].fingerprint.unwrap() == fingerprint1)
|
||||||
);
|
);
|
||||||
assert!(
|
assert!(
|
||||||
matches!(&policy.contribution, Satisfaction::PartialComplete { n, m, items, conditions, .. } if n == &2
|
matches!(&policy.contribution, Satisfaction::PartialComplete { n, m, items, conditions, .. } if n == &2
|
||||||
|
@ -75,7 +75,7 @@ impl<T: DescriptorTemplate> IntoWalletDescriptor for T {
|
|||||||
secp: &SecpCtx,
|
secp: &SecpCtx,
|
||||||
network: Network,
|
network: Network,
|
||||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||||
Ok(self.build()?.into_wallet_descriptor(secp, network)?)
|
self.build()?.into_wallet_descriptor(secp, network)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,7 +108,7 @@ pub struct P2PKH<K: IntoDescriptorKey<Legacy>>(pub K);
|
|||||||
|
|
||||||
impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for P2PKH<K> {
|
impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for P2PKH<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(descriptor!(pkh(self.0))?)
|
descriptor!(pkh(self.0))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -142,7 +142,7 @@ pub struct P2WPKH_P2SH<K: IntoDescriptorKey<Segwitv0>>(pub K);
|
|||||||
|
|
||||||
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH_P2SH<K> {
|
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH_P2SH<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(descriptor!(sh(wpkh(self.0)))?)
|
descriptor!(sh(wpkh(self.0)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -175,7 +175,7 @@ pub struct P2WPKH<K: IntoDescriptorKey<Segwitv0>>(pub K);
|
|||||||
|
|
||||||
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH<K> {
|
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(descriptor!(wpkh(self.0))?)
|
descriptor!(wpkh(self.0))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -210,7 +210,7 @@ pub struct BIP44<K: DerivableKey<Legacy>>(pub K, pub KeychainKind);
|
|||||||
|
|
||||||
impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44<K> {
|
impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(P2PKH(legacy::make_bipxx_private(44, self.0, self.1)?).build()?)
|
P2PKH(legacy::make_bipxx_private(44, self.0, self.1)?).build()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -249,7 +249,7 @@ pub struct BIP44Public<K: DerivableKey<Legacy>>(pub K, pub bip32::Fingerprint, p
|
|||||||
|
|
||||||
impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44Public<K> {
|
impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44Public<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(P2PKH(legacy::make_bipxx_public(44, self.0, self.1, self.2)?).build()?)
|
P2PKH(legacy::make_bipxx_public(44, self.0, self.1, self.2)?).build()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -284,7 +284,7 @@ pub struct BIP49<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
|||||||
|
|
||||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49<K> {
|
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(P2WPKH_P2SH(segwit_v0::make_bipxx_private(49, self.0, self.1)?).build()?)
|
P2WPKH_P2SH(segwit_v0::make_bipxx_private(49, self.0, self.1)?).build()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -323,7 +323,7 @@ pub struct BIP49Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint,
|
|||||||
|
|
||||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49Public<K> {
|
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49Public<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(P2WPKH_P2SH(segwit_v0::make_bipxx_public(49, self.0, self.1, self.2)?).build()?)
|
P2WPKH_P2SH(segwit_v0::make_bipxx_public(49, self.0, self.1, self.2)?).build()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -358,7 +358,7 @@ pub struct BIP84<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
|||||||
|
|
||||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84<K> {
|
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(P2WPKH(segwit_v0::make_bipxx_private(84, self.0, self.1)?).build()?)
|
P2WPKH(segwit_v0::make_bipxx_private(84, self.0, self.1)?).build()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -397,7 +397,7 @@ pub struct BIP84Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint,
|
|||||||
|
|
||||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84Public<K> {
|
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84Public<K> {
|
||||||
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
Ok(P2WPKH(segwit_v0::make_bipxx_public(84, self.0, self.1, self.2)?).build()?)
|
P2WPKH(segwit_v0::make_bipxx_public(84, self.0, self.1, self.2)?).build()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -440,11 +440,11 @@ macro_rules! expand_make_bipxx {
|
|||||||
KeychainKind::Internal => vec![bip32::ChildNumber::from_normal_idx(1)?].into(),
|
KeychainKind::Internal => vec![bip32::ChildNumber::from_normal_idx(1)?].into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut source_path = Vec::with_capacity(3);
|
let source_path = bip32::DerivationPath::from(vec![
|
||||||
source_path.push(bip32::ChildNumber::from_hardened_idx(bip)?);
|
bip32::ChildNumber::from_hardened_idx(bip)?,
|
||||||
source_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
|
bip32::ChildNumber::from_hardened_idx(0)?,
|
||||||
source_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
|
bip32::ChildNumber::from_hardened_idx(0)?,
|
||||||
let source_path: bip32::DerivationPath = source_path.into();
|
]);
|
||||||
|
|
||||||
Ok((key, (parent_fingerprint, source_path), derivation_path))
|
Ok((key, (parent_fingerprint, source_path), derivation_path))
|
||||||
}
|
}
|
||||||
|
@ -733,7 +733,7 @@ fn expand_multi_keys<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
|||||||
) -> Result<(Vec<DescriptorPublicKey>, KeyMap, ValidNetworks), KeyError> {
|
) -> Result<(Vec<DescriptorPublicKey>, KeyMap, ValidNetworks), KeyError> {
|
||||||
let (pks, key_maps_networks): (Vec<_>, Vec<_>) = pks
|
let (pks, key_maps_networks): (Vec<_>, Vec<_>) = pks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|key| Ok::<_, KeyError>(key.into_descriptor_key()?.extract(secp)?))
|
.map(|key| key.into_descriptor_key()?.extract(secp))
|
||||||
.collect::<Result<Vec<_>, _>>()?
|
.collect::<Result<Vec<_>, _>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(a, b, c)| (a, (b, c)))
|
.map(|(a, b, c)| (a, (b, c)))
|
||||||
|
@ -225,7 +225,6 @@ extern crate async_trait;
|
|||||||
extern crate bdk_macros;
|
extern crate bdk_macros;
|
||||||
|
|
||||||
#[cfg(feature = "compact_filters")]
|
#[cfg(feature = "compact_filters")]
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
|
|
||||||
#[cfg(feature = "electrum")]
|
#[cfg(feature = "electrum")]
|
||||||
|
@ -815,7 +815,7 @@ mod test {
|
|||||||
.coin_select(
|
.coin_select(
|
||||||
&database,
|
&database,
|
||||||
vec![],
|
vec![],
|
||||||
utxos.clone(),
|
utxos,
|
||||||
FeeRate::from_sat_per_vb(1.0),
|
FeeRate::from_sat_per_vb(1.0),
|
||||||
99932, // first utxo's effective value
|
99932, // first utxo's effective value
|
||||||
0.0,
|
0.0,
|
||||||
|
@ -1970,7 +1970,7 @@ mod test {
|
|||||||
|
|
||||||
assert_eq!(psbt.inputs[0].bip32_derivation.len(), 1);
|
assert_eq!(psbt.inputs[0].bip32_derivation.len(), 1);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
psbt.inputs[0].bip32_derivation.values().nth(0).unwrap(),
|
psbt.inputs[0].bip32_derivation.values().next().unwrap(),
|
||||||
&(
|
&(
|
||||||
Fingerprint::from_str("d34db33f").unwrap(),
|
Fingerprint::from_str("d34db33f").unwrap(),
|
||||||
DerivationPath::from_str("m/44'/0'/0'/0/0").unwrap()
|
DerivationPath::from_str("m/44'/0'/0'/0/0").unwrap()
|
||||||
@ -1996,7 +1996,7 @@ mod test {
|
|||||||
|
|
||||||
assert_eq!(psbt.outputs[0].bip32_derivation.len(), 1);
|
assert_eq!(psbt.outputs[0].bip32_derivation.len(), 1);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
psbt.outputs[0].bip32_derivation.values().nth(0).unwrap(),
|
psbt.outputs[0].bip32_derivation.values().next().unwrap(),
|
||||||
&(
|
&(
|
||||||
Fingerprint::from_str("d34db33f").unwrap(),
|
Fingerprint::from_str("d34db33f").unwrap(),
|
||||||
DerivationPath::from_str("m/44'/0'/0'/0/5").unwrap()
|
DerivationPath::from_str("m/44'/0'/0'/0/5").unwrap()
|
||||||
@ -3429,15 +3429,18 @@ mod test {
|
|||||||
let (mut psbt, _) = builder.finish().unwrap();
|
let (mut psbt, _) = builder.finish().unwrap();
|
||||||
|
|
||||||
// add another input to the psbt that is at least passable.
|
// add another input to the psbt that is at least passable.
|
||||||
let mut dud_input = bitcoin::util::psbt::Input::default();
|
let dud_input = bitcoin::util::psbt::Input {
|
||||||
dud_input.witness_utxo = Some(TxOut {
|
witness_utxo: Some(TxOut {
|
||||||
value: 100_000,
|
value: 100_000,
|
||||||
script_pubkey: miniscript::Descriptor::<bitcoin::PublicKey>::from_str(
|
script_pubkey: miniscript::Descriptor::<bitcoin::PublicKey>::from_str(
|
||||||
"wpkh(025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357)",
|
"wpkh(025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357)",
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.script_pubkey(),
|
.script_pubkey(),
|
||||||
});
|
}),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
psbt.inputs.push(dud_input);
|
psbt.inputs.push(dud_input);
|
||||||
psbt.global.unsigned_tx.input.push(bitcoin::TxIn::default());
|
psbt.global.unsigned_tx.input.push(bitcoin::TxIn::default());
|
||||||
let (psbt, is_final) = wallet.sign(psbt, None).unwrap();
|
let (psbt, is_final) = wallet.sign(psbt, None).unwrap();
|
||||||
|
@ -569,6 +569,11 @@ mod signers_container_tests {
|
|||||||
use miniscript::ScriptContext;
|
use miniscript::ScriptContext;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
fn is_equal(this: &Arc<dyn Signer>, that: &Arc<DummySigner>) -> bool {
|
||||||
|
let secp = Secp256k1::new();
|
||||||
|
this.id(&secp) == that.id(&secp)
|
||||||
|
}
|
||||||
|
|
||||||
// Signers added with the same ordering (like `Ordering::default`) created from `KeyMap`
|
// Signers added with the same ordering (like `Ordering::default`) created from `KeyMap`
|
||||||
// should be preserved and not overwritten.
|
// should be preserved and not overwritten.
|
||||||
// This happens usually when a set of signers is created from a descriptor with private keys.
|
// This happens usually when a set of signers is created from a descriptor with private keys.
|
||||||
@ -593,73 +598,58 @@ mod signers_container_tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn signers_sorted_by_ordering() {
|
fn signers_sorted_by_ordering() {
|
||||||
let mut signers = SignersContainer::new();
|
let mut signers = SignersContainer::new();
|
||||||
let signer1 = Arc::new(DummySigner);
|
let signer1 = Arc::new(DummySigner { number: 1 });
|
||||||
let signer2 = Arc::new(DummySigner);
|
let signer2 = Arc::new(DummySigner { number: 2 });
|
||||||
let signer3 = Arc::new(DummySigner);
|
let signer3 = Arc::new(DummySigner { number: 3 });
|
||||||
|
|
||||||
signers.add_external(
|
// Mixed order insertions verifies we are not inserting at head or tail.
|
||||||
SignerId::Fingerprint(b"cafe"[..].into()),
|
signers.add_external(SignerId::Dummy(2), SignerOrdering(2), signer2.clone());
|
||||||
SignerOrdering(1),
|
signers.add_external(SignerId::Dummy(1), SignerOrdering(1), signer1.clone());
|
||||||
signer1.clone(),
|
signers.add_external(SignerId::Dummy(3), SignerOrdering(3), signer3.clone());
|
||||||
);
|
|
||||||
signers.add_external(
|
|
||||||
SignerId::Fingerprint(b"babe"[..].into()),
|
|
||||||
SignerOrdering(2),
|
|
||||||
signer2.clone(),
|
|
||||||
);
|
|
||||||
signers.add_external(
|
|
||||||
SignerId::Fingerprint(b"feed"[..].into()),
|
|
||||||
SignerOrdering(3),
|
|
||||||
signer3.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check that signers are sorted from lowest to highest ordering
|
// Check that signers are sorted from lowest to highest ordering
|
||||||
let signers = signers.signers();
|
let signers = signers.signers();
|
||||||
assert_eq!(Arc::as_ptr(signers[0]), Arc::as_ptr(&signer1));
|
|
||||||
assert_eq!(Arc::as_ptr(signers[1]), Arc::as_ptr(&signer2));
|
assert!(is_equal(signers[0], &signer1));
|
||||||
assert_eq!(Arc::as_ptr(signers[2]), Arc::as_ptr(&signer3));
|
assert!(is_equal(signers[1], &signer2));
|
||||||
|
assert!(is_equal(signers[2], &signer3));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn find_signer_by_id() {
|
fn find_signer_by_id() {
|
||||||
let mut signers = SignersContainer::new();
|
let mut signers = SignersContainer::new();
|
||||||
let signer1: Arc<dyn Signer> = Arc::new(DummySigner);
|
let signer1 = Arc::new(DummySigner { number: 1 });
|
||||||
let signer2: Arc<dyn Signer> = Arc::new(DummySigner);
|
let signer2 = Arc::new(DummySigner { number: 2 });
|
||||||
let signer3: Arc<dyn Signer> = Arc::new(DummySigner);
|
let signer3 = Arc::new(DummySigner { number: 3 });
|
||||||
let signer4: Arc<dyn Signer> = Arc::new(DummySigner);
|
let signer4 = Arc::new(DummySigner { number: 3 }); // Same ID as `signer3` but will use lower ordering.
|
||||||
|
|
||||||
let id1 = SignerId::Fingerprint(b"cafe"[..].into());
|
let id1 = SignerId::Dummy(1);
|
||||||
let id2 = SignerId::Fingerprint(b"babe"[..].into());
|
let id2 = SignerId::Dummy(2);
|
||||||
let id3 = SignerId::Fingerprint(b"feed"[..].into());
|
let id3 = SignerId::Dummy(3);
|
||||||
let id_nonexistent = SignerId::Fingerprint(b"fefe"[..].into());
|
let id_nonexistent = SignerId::Dummy(999);
|
||||||
|
|
||||||
signers.add_external(id1.clone(), SignerOrdering(1), signer1.clone());
|
signers.add_external(id1.clone(), SignerOrdering(1), signer1.clone());
|
||||||
signers.add_external(id2.clone(), SignerOrdering(2), signer2.clone());
|
signers.add_external(id2.clone(), SignerOrdering(2), signer2.clone());
|
||||||
signers.add_external(id3.clone(), SignerOrdering(3), signer3.clone());
|
signers.add_external(id3.clone(), SignerOrdering(3), signer3.clone());
|
||||||
|
|
||||||
assert!(
|
assert!(matches!(signers.find(id1), Some(signer) if is_equal(signer, &signer1)));
|
||||||
matches!(signers.find(id1), Some(signer) if Arc::as_ptr(&signer1) == Arc::as_ptr(signer))
|
assert!(matches!(signers.find(id2), Some(signer) if is_equal(signer, &signer2)));
|
||||||
);
|
assert!(matches!(signers.find(id3.clone()), Some(signer) if is_equal(signer, &signer3)));
|
||||||
assert!(
|
|
||||||
matches!(signers.find(id2), Some(signer) if Arc::as_ptr(&signer2) == Arc::as_ptr(signer))
|
|
||||||
);
|
|
||||||
assert!(
|
|
||||||
matches!(signers.find(id3.clone()), Some(signer) if Arc::as_ptr(&signer3) == Arc::as_ptr(signer))
|
|
||||||
);
|
|
||||||
|
|
||||||
// The `signer4` has the same ID as `signer3` but lower ordering.
|
// The `signer4` has the same ID as `signer3` but lower ordering.
|
||||||
// It should be found by `id3` instead of `signer3`.
|
// It should be found by `id3` instead of `signer3`.
|
||||||
signers.add_external(id3.clone(), SignerOrdering(2), signer4.clone());
|
signers.add_external(id3.clone(), SignerOrdering(2), signer4.clone());
|
||||||
assert!(
|
assert!(matches!(signers.find(id3), Some(signer) if is_equal(signer, &signer4)));
|
||||||
matches!(signers.find(id3), Some(signer) if Arc::as_ptr(&signer4) == Arc::as_ptr(signer))
|
|
||||||
);
|
|
||||||
|
|
||||||
// Can't find anything with ID that doesn't exist
|
// Can't find anything with ID that doesn't exist
|
||||||
assert!(matches!(signers.find(id_nonexistent), None));
|
assert!(matches!(signers.find(id_nonexistent), None));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
struct DummySigner;
|
struct DummySigner {
|
||||||
|
number: u64,
|
||||||
|
}
|
||||||
|
|
||||||
impl Signer for DummySigner {
|
impl Signer for DummySigner {
|
||||||
fn sign(
|
fn sign(
|
||||||
&self,
|
&self,
|
||||||
@ -671,7 +661,7 @@ mod signers_container_tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn id(&self, _secp: &SecpCtx) -> SignerId {
|
fn id(&self, _secp: &SecpCtx) -> SignerId {
|
||||||
SignerId::Dummy(42)
|
SignerId::Dummy(self.number)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sign_whole_tx(&self) -> bool {
|
fn sign_whole_tx(&self) -> bool {
|
||||||
|
@ -721,12 +721,12 @@ impl ChangeSpendPolicy {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
const ORDERING_TEST_TX: &'static str = "0200000003c26f3eb7932f7acddc5ddd26602b77e7516079b03090a16e2c2f54\
|
const ORDERING_TEST_TX: &str = "0200000003c26f3eb7932f7acddc5ddd26602b77e7516079b03090a16e2c2f54\
|
||||||
85d1fd600f0100000000ffffffffc26f3eb7932f7acddc5ddd26602b77e75160\
|
85d1fd600f0100000000ffffffffc26f3eb7932f7acddc5ddd26602b77e75160\
|
||||||
79b03090a16e2c2f5485d1fd600f0000000000ffffffff571fb3e02278217852\
|
79b03090a16e2c2f5485d1fd600f0000000000ffffffff571fb3e02278217852\
|
||||||
dd5d299947e2b7354a639adc32ec1fa7b82cfb5dec530e0500000000ffffffff\
|
dd5d299947e2b7354a639adc32ec1fa7b82cfb5dec530e0500000000ffffffff\
|
||||||
03e80300000000000002aaeee80300000000000001aa200300000000000001ff\
|
03e80300000000000002aaeee80300000000000001aa200300000000000001ff\
|
||||||
00000000";
|
00000000";
|
||||||
macro_rules! ordering_test_tx {
|
macro_rules! ordering_test_tx {
|
||||||
() => {
|
() => {
|
||||||
deserialize::<bitcoin::Transaction>(&Vec::<u8>::from_hex(ORDERING_TEST_TX).unwrap())
|
deserialize::<bitcoin::Transaction>(&Vec::<u8>::from_hex(ORDERING_TEST_TX).unwrap())
|
||||||
@ -770,7 +770,7 @@ mod test {
|
|||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
let original_tx = ordering_test_tx!();
|
let original_tx = ordering_test_tx!();
|
||||||
let mut tx = original_tx.clone();
|
let mut tx = original_tx;
|
||||||
|
|
||||||
TxOrdering::BIP69Lexicographic.sort_tx(&mut tx);
|
TxOrdering::BIP69Lexicographic.sort_tx(&mut tx);
|
||||||
|
|
||||||
@ -828,9 +828,9 @@ mod test {
|
|||||||
let filtered = get_test_utxos()
|
let filtered = get_test_utxos()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|u| change_spend_policy.is_satisfied_by(u))
|
.filter(|u| change_spend_policy.is_satisfied_by(u))
|
||||||
.collect::<Vec<_>>();
|
.count();
|
||||||
|
|
||||||
assert_eq!(filtered.len(), 2);
|
assert_eq!(filtered, 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -156,8 +156,8 @@ pub struct ChunksIterator<I: Iterator> {
|
|||||||
size: usize,
|
size: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "electrum", feature = "esplora"))]
|
||||||
impl<I: Iterator> ChunksIterator<I> {
|
impl<I: Iterator> ChunksIterator<I> {
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn new(iter: I, size: usize) -> Self {
|
pub fn new(iter: I, size: usize) -> Self {
|
||||||
ChunksIterator { iter, size }
|
ChunksIterator { iter, size }
|
||||||
}
|
}
|
||||||
|
@ -60,13 +60,13 @@ fn get_auth() -> Auth {
|
|||||||
),
|
),
|
||||||
_ => Auth::CookieFile(PathBuf::from(
|
_ => Auth::CookieFile(PathBuf::from(
|
||||||
env::var("BDK_RPC_COOKIEFILE")
|
env::var("BDK_RPC_COOKIEFILE")
|
||||||
.unwrap_or("/home/user/.bitcoin/regtest/.cookie".to_string()),
|
.unwrap_or_else(|_| "/home/user/.bitcoin/regtest/.cookie".to_string()),
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_electrum_url() -> String {
|
pub fn get_electrum_url() -> String {
|
||||||
env::var("BDK_ELECTRUM_URL").unwrap_or("tcp://127.0.0.1:50001".to_string())
|
env::var("BDK_ELECTRUM_URL").unwrap_or_else(|_| "tcp://127.0.0.1:50001".to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TestClient {
|
pub struct TestClient {
|
||||||
@ -311,8 +311,8 @@ where
|
|||||||
|
|
||||||
impl TestClient {
|
impl TestClient {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let url = env::var("BDK_RPC_URL").unwrap_or("127.0.0.1:18443".to_string());
|
let url = env::var("BDK_RPC_URL").unwrap_or_else(|_| "127.0.0.1:18443".to_string());
|
||||||
let wallet = env::var("BDK_RPC_WALLET").unwrap_or("bdk-test".to_string());
|
let wallet = env::var("BDK_RPC_WALLET").unwrap_or_else(|_| "bdk-test".to_string());
|
||||||
let client =
|
let client =
|
||||||
RpcClient::new(format!("http://{}/wallet/{}", url, wallet), get_auth()).unwrap();
|
RpcClient::new(format!("http://{}/wallet/{}", url, wallet), get_auth()).unwrap();
|
||||||
let electrum = ElectrumClient::new(&get_electrum_url()).unwrap();
|
let electrum = ElectrumClient::new(&get_electrum_url()).unwrap();
|
||||||
@ -349,7 +349,7 @@ impl TestClient {
|
|||||||
|
|
||||||
pub fn receive(&mut self, meta_tx: TestIncomingTx) -> Txid {
|
pub fn receive(&mut self, meta_tx: TestIncomingTx) -> Txid {
|
||||||
assert!(
|
assert!(
|
||||||
meta_tx.output.len() > 0,
|
!meta_tx.output.is_empty(),
|
||||||
"can't create a transaction with no outputs"
|
"can't create a transaction with no outputs"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user