Merge commit 'refs/pull/285/head' of github.com:bitcoindevkit/bdk

This commit is contained in:
Alekos Filini 2021-02-26 10:14:01 +01:00
commit 1cbd47b988
No known key found for this signature in database
GPG Key ID: 431401E4A4530061
17 changed files with 173 additions and 157 deletions

View File

@ -97,7 +97,7 @@ fn main() -> Result<(), Box<dyn Error>> {
let network = matches
.value_of("network")
.and_then(|n| Some(Network::from_str(n)))
.map(|n| Network::from_str(n))
.transpose()
.unwrap()
.unwrap_or(Network::Testnet);

31
scripts/cargo-check.sh Executable file
View File

@ -0,0 +1,31 @@
#!/bin/bash
#
# Run various invocations of cargo check
features=( "default" "compiler" "electrum" "esplora" "compact_filters" "key-value-db" "async-interface" "all-keys" "keys-bip39" )
toolchains=( "+stable" "+1.45" "+nightly" )
main() {
check_src
check_all_targets
}
# Check with all features, with various toolchains.
check_src() {
for toolchain in "${toolchains[@]}"; do
cmd="cargo $toolchain clippy --all-targets --no-default-features"
for feature in "${features[@]}"; do
touch_files
$cmd --features "$feature"
done
done
}
# Touch files to prevent cached warnings from not showing up.
touch_files() {
touch $(find . -name *.rs)
}
main
exit 0

View File

@ -239,6 +239,7 @@ impl Blockchain for CompactFiltersBlockchain {
vec![Capability::FullHistory].into_iter().collect()
}
#[allow(clippy::mutex_atomic)] // Mutex is easier to understand than a CAS loop.
fn setup<D: BatchDatabase, P: 'static + Progress>(
&self,
_stop_gap: Option<usize>, // TODO: move to electrum and esplora only

View File

@ -46,6 +46,8 @@ use bitcoin::BlockHash;
use bitcoin::BlockHeader;
use bitcoin::Network;
use lazy_static::lazy_static;
use super::CompactFiltersError;
lazy_static! {
@ -119,7 +121,7 @@ where
}
fn deserialize(data: &[u8]) -> Result<Self, CompactFiltersError> {
Ok(deserialize(data).map_err(|_| CompactFiltersError::DataCorruption)?)
deserialize(data).map_err(|_| CompactFiltersError::DataCorruption)
}
}
@ -436,15 +438,14 @@ impl ChainStore<Full> {
let key = StoreEntry::BlockHeaderIndex(Some(*block_hash)).get_key();
let data = read_store.get_pinned_cf(cf_handle, key)?;
Ok(data
.map(|data| {
data.map(|data| {
Ok::<_, CompactFiltersError>(usize::from_be_bytes(
data.as_ref()
.try_into()
.map_err(|_| CompactFiltersError::DataCorruption)?,
))
})
.transpose()?)
.transpose()
}
pub fn get_block_hash(&self, height: usize) -> Result<Option<BlockHash>, CompactFiltersError> {
@ -453,13 +454,12 @@ impl ChainStore<Full> {
let key = StoreEntry::BlockHeader(Some(height)).get_key();
let data = read_store.get_pinned_cf(cf_handle, key)?;
Ok(data
.map(|data| {
data.map(|data| {
let (header, _): (BlockHeader, Uint256) =
deserialize(&data).map_err(|_| CompactFiltersError::DataCorruption)?;
Ok::<_, CompactFiltersError>(header.block_hash())
})
.transpose()?)
.transpose()
}
pub fn save_full_block(&self, block: &Block, height: usize) -> Result<(), CompactFiltersError> {
@ -475,10 +475,10 @@ impl ChainStore<Full> {
let key = StoreEntry::Block(Some(height)).get_key();
let opt_block = read_store.get_pinned(key)?;
Ok(opt_block
opt_block
.map(|data| deserialize(&data))
.transpose()
.map_err(|_| CompactFiltersError::DataCorruption)?)
.map_err(|_| CompactFiltersError::DataCorruption)
}
pub fn delete_blocks_until(&self, height: usize) -> Result<(), CompactFiltersError> {
@ -565,14 +565,14 @@ impl<T: StoreType> ChainStore<T> {
let prefix = StoreEntry::BlockHeader(None).get_key();
let iterator = read_store.prefix_iterator_cf(cf_handle, prefix);
Ok(iterator
iterator
.last()
.map(|(_, v)| -> Result<_, CompactFiltersError> {
let (header, _): (BlockHeader, Uint256) = SerializeDb::deserialize(&v)?;
Ok(header.block_hash())
})
.transpose()?)
.transpose()
}
pub fn apply(
@ -716,11 +716,11 @@ impl CFStore {
// FIXME: we have to filter manually because rocksdb sometimes returns stuff that doesn't
// have the right prefix
Ok(iterator
iterator
.filter(|(k, _)| k.starts_with(&prefix))
.skip(1)
.map(|(_, data)| Ok::<_, CompactFiltersError>(BundleEntry::deserialize(&data)?.1))
.collect::<Result<_, _>>()?)
.collect::<Result<_, _>>()
}
pub fn replace_checkpoints(

View File

@ -312,24 +312,17 @@ impl BatchDatabase for AnyDatabase {
}
}
fn commit_batch(&mut self, batch: Self::Batch) -> Result<(), Error> {
// TODO: refactor once `move_ref_pattern` is stable
#[allow(irrefutable_let_patterns)]
match self {
AnyDatabase::Memory(db) => {
if let AnyBatch::Memory(batch) = batch {
db.commit_batch(batch)
} else {
unimplemented!()
}
}
AnyDatabase::Memory(db) => match batch {
AnyBatch::Memory(batch) => db.commit_batch(batch),
#[cfg(feature = "key-value-db")]
AnyDatabase::Sled(db) => {
if let AnyBatch::Sled(batch) = batch {
db.commit_batch(batch)
} else {
unimplemented!()
}
}
_ => unimplemented!("Sled batch shouldn't be used with Memory db."),
},
#[cfg(feature = "key-value-db")]
AnyDatabase::Sled(db) => match batch {
AnyBatch::Sled(batch) => db.commit_batch(batch),
_ => unimplemented!("Memory batch shouldn't be used with Sled db."),
},
}
}
}

View File

@ -227,7 +227,7 @@ pub mod test {
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((keychain, path.clone()))
Some((keychain, path))
);
}
@ -256,7 +256,7 @@ pub mod test {
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((keychain, path.clone()))
Some((keychain, path))
);
}

View File

@ -228,10 +228,11 @@ macro_rules! impl_sortedmulti {
use $crate::keys::IntoDescriptorKey;
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
let mut keys = vec![];
let keys = vec![
$(
keys.push($key.into_descriptor_key());
$key.into_descriptor_key(),
)*
];
keys.into_iter().collect::<Result<Vec<_>, _>>()
.map_err($crate::descriptor::DescriptorError::Key)
@ -656,10 +657,11 @@ macro_rules! fragment {
use $crate::keys::IntoDescriptorKey;
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
let mut keys = vec![];
let keys = vec![
$(
keys.push($key.into_descriptor_key());
$key.into_descriptor_key(),
)*
];
keys.into_iter().collect::<Result<Vec<_>, _>>()
.map_err($crate::descriptor::DescriptorError::Key)
@ -968,7 +970,7 @@ mod test {
fn test_valid_networks() {
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
let path = bip32::DerivationPath::from_str("m/0").unwrap();
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
let desc_key = (xprv, path).into_descriptor_key().unwrap();
let (_desc, _key_map, valid_networks) = descriptor!(pkh(desc_key)).unwrap();
assert_eq!(
@ -978,7 +980,7 @@ mod test {
let xprv = bip32::ExtendedPrivKey::from_str("xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi").unwrap();
let path = bip32::DerivationPath::from_str("m/10/20/30/40").unwrap();
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
let desc_key = (xprv, path).into_descriptor_key().unwrap();
let (_desc, _key_map, valid_networks) = descriptor!(wpkh(desc_key)).unwrap();
assert_eq!(valid_networks, [Bitcoin].iter().cloned().collect());
@ -1005,12 +1007,9 @@ mod test {
descriptor!(sh(wsh(multi(2, desc_key1, desc_key2, desc_key3)))).unwrap();
assert_eq!(key_map.len(), 3);
let desc_key1: DescriptorKey<Segwitv0> =
(xprv1, path1.clone()).into_descriptor_key().unwrap();
let desc_key2: DescriptorKey<Segwitv0> =
(xprv2, path2.clone()).into_descriptor_key().unwrap();
let desc_key3: DescriptorKey<Segwitv0> =
(xprv3, path3.clone()).into_descriptor_key().unwrap();
let desc_key1: DescriptorKey<Segwitv0> = (xprv1, path1).into_descriptor_key().unwrap();
let desc_key2: DescriptorKey<Segwitv0> = (xprv2, path2).into_descriptor_key().unwrap();
let desc_key3: DescriptorKey<Segwitv0> = (xprv3, path3).into_descriptor_key().unwrap();
let (key1, _key_map, _valid_networks) = desc_key1.extract(&secp).unwrap();
let (key2, _key_map, _valid_networks) = desc_key2.extract(&secp).unwrap();
@ -1026,7 +1025,7 @@ mod test {
// this compiles
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
let path = bip32::DerivationPath::from_str("m/0").unwrap();
let desc_key: DescriptorKey<Legacy> = (xprv, path.clone()).into_descriptor_key().unwrap();
let desc_key: DescriptorKey<Legacy> = (xprv, path).into_descriptor_key().unwrap();
let (desc, _key_map, _valid_networks) = descriptor!(pkh(desc_key)).unwrap();
assert_eq!(desc.to_string(), "pkh(tpubD6NzVbkrYhZ4WR7a4vY1VT3khMJMeAxVsfq9TBJyJWrNk247zCJtV7AWf6UJP7rAVsn8NNKdJi3gFyKPTmWZS9iukb91xbn2HbFSMQm2igY/0/*)#yrnz9pp2");

View File

@ -938,7 +938,7 @@ mod test {
.unwrap();
assert!(
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
);
assert!(matches!(&policy.contribution, Satisfaction::None));
@ -953,7 +953,7 @@ mod test {
.unwrap();
assert!(
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
);
assert!(
matches!(&policy.contribution, Satisfaction::Complete {condition} if condition.csv == None && condition.timelock == None)
@ -1039,8 +1039,8 @@ mod test {
assert!(
matches!(&policy.item, Multisig { keys, threshold } if threshold == &1
&& &keys[0].fingerprint.unwrap() == &fingerprint0
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
&& keys[0].fingerprint.unwrap() == fingerprint0
&& keys[1].fingerprint.unwrap() == fingerprint1)
);
assert!(
matches!(&policy.contribution, Satisfaction::PartialComplete { n, m, items, conditions, .. } if n == &2
@ -1071,8 +1071,8 @@ mod test {
assert!(
matches!(&policy.item, Multisig { keys, threshold } if threshold == &2
&& &keys[0].fingerprint.unwrap() == &fingerprint0
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
&& keys[0].fingerprint.unwrap() == fingerprint0
&& keys[1].fingerprint.unwrap() == fingerprint1)
);
assert!(
@ -1103,7 +1103,7 @@ mod test {
.unwrap();
assert!(
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
);
assert!(matches!(&policy.contribution, Satisfaction::None));
@ -1119,7 +1119,7 @@ mod test {
.unwrap();
assert!(
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
);
assert!(
matches!(&policy.contribution, Satisfaction::Complete {condition} if condition.csv == None && condition.timelock == None)
@ -1147,8 +1147,8 @@ mod test {
assert!(
matches!(&policy.item, Multisig { keys, threshold } if threshold == &1
&& &keys[0].fingerprint.unwrap() == &fingerprint0
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
&& keys[0].fingerprint.unwrap() == fingerprint0
&& keys[1].fingerprint.unwrap() == fingerprint1)
);
assert!(
matches!(&policy.contribution, Satisfaction::PartialComplete { n, m, items, conditions, .. } if n == &2

View File

@ -75,7 +75,7 @@ impl<T: DescriptorTemplate> IntoWalletDescriptor for T {
secp: &SecpCtx,
network: Network,
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
Ok(self.build()?.into_wallet_descriptor(secp, network)?)
self.build()?.into_wallet_descriptor(secp, network)
}
}
@ -108,7 +108,7 @@ pub struct P2PKH<K: IntoDescriptorKey<Legacy>>(pub K);
impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for P2PKH<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(descriptor!(pkh(self.0))?)
descriptor!(pkh(self.0))
}
}
@ -142,7 +142,7 @@ pub struct P2WPKH_P2SH<K: IntoDescriptorKey<Segwitv0>>(pub K);
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH_P2SH<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(descriptor!(sh(wpkh(self.0)))?)
descriptor!(sh(wpkh(self.0)))
}
}
@ -175,7 +175,7 @@ pub struct P2WPKH<K: IntoDescriptorKey<Segwitv0>>(pub K);
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2WPKH<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(descriptor!(wpkh(self.0))?)
descriptor!(wpkh(self.0))
}
}
@ -210,7 +210,7 @@ pub struct BIP44<K: DerivableKey<Legacy>>(pub K, pub KeychainKind);
impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(P2PKH(legacy::make_bipxx_private(44, self.0, self.1)?).build()?)
P2PKH(legacy::make_bipxx_private(44, self.0, self.1)?).build()
}
}
@ -249,7 +249,7 @@ pub struct BIP44Public<K: DerivableKey<Legacy>>(pub K, pub bip32::Fingerprint, p
impl<K: DerivableKey<Legacy>> DescriptorTemplate for BIP44Public<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(P2PKH(legacy::make_bipxx_public(44, self.0, self.1, self.2)?).build()?)
P2PKH(legacy::make_bipxx_public(44, self.0, self.1, self.2)?).build()
}
}
@ -284,7 +284,7 @@ pub struct BIP49<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(P2WPKH_P2SH(segwit_v0::make_bipxx_private(49, self.0, self.1)?).build()?)
P2WPKH_P2SH(segwit_v0::make_bipxx_private(49, self.0, self.1)?).build()
}
}
@ -323,7 +323,7 @@ pub struct BIP49Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint,
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP49Public<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(P2WPKH_P2SH(segwit_v0::make_bipxx_public(49, self.0, self.1, self.2)?).build()?)
P2WPKH_P2SH(segwit_v0::make_bipxx_public(49, self.0, self.1, self.2)?).build()
}
}
@ -358,7 +358,7 @@ pub struct BIP84<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(P2WPKH(segwit_v0::make_bipxx_private(84, self.0, self.1)?).build()?)
P2WPKH(segwit_v0::make_bipxx_private(84, self.0, self.1)?).build()
}
}
@ -397,7 +397,7 @@ pub struct BIP84Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint,
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for BIP84Public<K> {
fn build(self) -> Result<DescriptorTemplateOut, DescriptorError> {
Ok(P2WPKH(segwit_v0::make_bipxx_public(84, self.0, self.1, self.2)?).build()?)
P2WPKH(segwit_v0::make_bipxx_public(84, self.0, self.1, self.2)?).build()
}
}
@ -440,11 +440,11 @@ macro_rules! expand_make_bipxx {
KeychainKind::Internal => vec![bip32::ChildNumber::from_normal_idx(1)?].into(),
};
let mut source_path = Vec::with_capacity(3);
source_path.push(bip32::ChildNumber::from_hardened_idx(bip)?);
source_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
source_path.push(bip32::ChildNumber::from_hardened_idx(0)?);
let source_path: bip32::DerivationPath = source_path.into();
let source_path = bip32::DerivationPath::from(vec![
bip32::ChildNumber::from_hardened_idx(bip)?,
bip32::ChildNumber::from_hardened_idx(0)?,
bip32::ChildNumber::from_hardened_idx(0)?,
]);
Ok((key, (parent_fingerprint, source_path), derivation_path))
}

View File

@ -733,7 +733,7 @@ fn expand_multi_keys<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
) -> Result<(Vec<DescriptorPublicKey>, KeyMap, ValidNetworks), KeyError> {
let (pks, key_maps_networks): (Vec<_>, Vec<_>) = pks
.into_iter()
.map(|key| Ok::<_, KeyError>(key.into_descriptor_key()?.extract(secp)?))
.map(|key| key.into_descriptor_key()?.extract(secp))
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.map(|(a, b, c)| (a, (b, c)))

View File

@ -225,7 +225,6 @@ extern crate async_trait;
extern crate bdk_macros;
#[cfg(feature = "compact_filters")]
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "electrum")]

View File

@ -815,7 +815,7 @@ mod test {
.coin_select(
&database,
vec![],
utxos.clone(),
utxos,
FeeRate::from_sat_per_vb(1.0),
99932, // first utxo's effective value
0.0,

View File

@ -1970,7 +1970,7 @@ mod test {
assert_eq!(psbt.inputs[0].bip32_derivation.len(), 1);
assert_eq!(
psbt.inputs[0].bip32_derivation.values().nth(0).unwrap(),
psbt.inputs[0].bip32_derivation.values().next().unwrap(),
&(
Fingerprint::from_str("d34db33f").unwrap(),
DerivationPath::from_str("m/44'/0'/0'/0/0").unwrap()
@ -1996,7 +1996,7 @@ mod test {
assert_eq!(psbt.outputs[0].bip32_derivation.len(), 1);
assert_eq!(
psbt.outputs[0].bip32_derivation.values().nth(0).unwrap(),
psbt.outputs[0].bip32_derivation.values().next().unwrap(),
&(
Fingerprint::from_str("d34db33f").unwrap(),
DerivationPath::from_str("m/44'/0'/0'/0/5").unwrap()
@ -3429,15 +3429,18 @@ mod test {
let (mut psbt, _) = builder.finish().unwrap();
// add another input to the psbt that is at least passable.
let mut dud_input = bitcoin::util::psbt::Input::default();
dud_input.witness_utxo = Some(TxOut {
let dud_input = bitcoin::util::psbt::Input {
witness_utxo: Some(TxOut {
value: 100_000,
script_pubkey: miniscript::Descriptor::<bitcoin::PublicKey>::from_str(
"wpkh(025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357)",
)
.unwrap()
.script_pubkey(),
});
}),
..Default::default()
};
psbt.inputs.push(dud_input);
psbt.global.unsigned_tx.input.push(bitcoin::TxIn::default());
let (psbt, is_final) = wallet.sign(psbt, None).unwrap();

View File

@ -569,6 +569,11 @@ mod signers_container_tests {
use miniscript::ScriptContext;
use std::str::FromStr;
fn is_equal(this: &Arc<dyn Signer>, that: &Arc<DummySigner>) -> bool {
let secp = Secp256k1::new();
this.id(&secp) == that.id(&secp)
}
// Signers added with the same ordering (like `Ordering::default`) created from `KeyMap`
// should be preserved and not overwritten.
// This happens usually when a set of signers is created from a descriptor with private keys.
@ -593,73 +598,58 @@ mod signers_container_tests {
#[test]
fn signers_sorted_by_ordering() {
let mut signers = SignersContainer::new();
let signer1 = Arc::new(DummySigner);
let signer2 = Arc::new(DummySigner);
let signer3 = Arc::new(DummySigner);
let signer1 = Arc::new(DummySigner { number: 1 });
let signer2 = Arc::new(DummySigner { number: 2 });
let signer3 = Arc::new(DummySigner { number: 3 });
signers.add_external(
SignerId::Fingerprint(b"cafe"[..].into()),
SignerOrdering(1),
signer1.clone(),
);
signers.add_external(
SignerId::Fingerprint(b"babe"[..].into()),
SignerOrdering(2),
signer2.clone(),
);
signers.add_external(
SignerId::Fingerprint(b"feed"[..].into()),
SignerOrdering(3),
signer3.clone(),
);
// Mixed order insertions verifies we are not inserting at head or tail.
signers.add_external(SignerId::Dummy(2), SignerOrdering(2), signer2.clone());
signers.add_external(SignerId::Dummy(1), SignerOrdering(1), signer1.clone());
signers.add_external(SignerId::Dummy(3), SignerOrdering(3), signer3.clone());
// Check that signers are sorted from lowest to highest ordering
let signers = signers.signers();
assert_eq!(Arc::as_ptr(signers[0]), Arc::as_ptr(&signer1));
assert_eq!(Arc::as_ptr(signers[1]), Arc::as_ptr(&signer2));
assert_eq!(Arc::as_ptr(signers[2]), Arc::as_ptr(&signer3));
assert!(is_equal(signers[0], &signer1));
assert!(is_equal(signers[1], &signer2));
assert!(is_equal(signers[2], &signer3));
}
#[test]
fn find_signer_by_id() {
let mut signers = SignersContainer::new();
let signer1: Arc<dyn Signer> = Arc::new(DummySigner);
let signer2: Arc<dyn Signer> = Arc::new(DummySigner);
let signer3: Arc<dyn Signer> = Arc::new(DummySigner);
let signer4: Arc<dyn Signer> = Arc::new(DummySigner);
let signer1 = Arc::new(DummySigner { number: 1 });
let signer2 = Arc::new(DummySigner { number: 2 });
let signer3 = Arc::new(DummySigner { number: 3 });
let signer4 = Arc::new(DummySigner { number: 3 }); // Same ID as `signer3` but will use lower ordering.
let id1 = SignerId::Fingerprint(b"cafe"[..].into());
let id2 = SignerId::Fingerprint(b"babe"[..].into());
let id3 = SignerId::Fingerprint(b"feed"[..].into());
let id_nonexistent = SignerId::Fingerprint(b"fefe"[..].into());
let id1 = SignerId::Dummy(1);
let id2 = SignerId::Dummy(2);
let id3 = SignerId::Dummy(3);
let id_nonexistent = SignerId::Dummy(999);
signers.add_external(id1.clone(), SignerOrdering(1), signer1.clone());
signers.add_external(id2.clone(), SignerOrdering(2), signer2.clone());
signers.add_external(id3.clone(), SignerOrdering(3), signer3.clone());
assert!(
matches!(signers.find(id1), Some(signer) if Arc::as_ptr(&signer1) == Arc::as_ptr(signer))
);
assert!(
matches!(signers.find(id2), Some(signer) if Arc::as_ptr(&signer2) == Arc::as_ptr(signer))
);
assert!(
matches!(signers.find(id3.clone()), Some(signer) if Arc::as_ptr(&signer3) == Arc::as_ptr(signer))
);
assert!(matches!(signers.find(id1), Some(signer) if is_equal(signer, &signer1)));
assert!(matches!(signers.find(id2), Some(signer) if is_equal(signer, &signer2)));
assert!(matches!(signers.find(id3.clone()), Some(signer) if is_equal(signer, &signer3)));
// The `signer4` has the same ID as `signer3` but lower ordering.
// It should be found by `id3` instead of `signer3`.
signers.add_external(id3.clone(), SignerOrdering(2), signer4.clone());
assert!(
matches!(signers.find(id3), Some(signer) if Arc::as_ptr(&signer4) == Arc::as_ptr(signer))
);
assert!(matches!(signers.find(id3), Some(signer) if is_equal(signer, &signer4)));
// Can't find anything with ID that doesn't exist
assert!(matches!(signers.find(id_nonexistent), None));
}
#[derive(Debug)]
struct DummySigner;
#[derive(Debug, Clone, Copy)]
struct DummySigner {
number: u64,
}
impl Signer for DummySigner {
fn sign(
&self,
@ -671,7 +661,7 @@ mod signers_container_tests {
}
fn id(&self, _secp: &SecpCtx) -> SignerId {
SignerId::Dummy(42)
SignerId::Dummy(self.number)
}
fn sign_whole_tx(&self) -> bool {

View File

@ -721,7 +721,7 @@ impl ChangeSpendPolicy {
#[cfg(test)]
mod test {
const ORDERING_TEST_TX: &'static str = "0200000003c26f3eb7932f7acddc5ddd26602b77e7516079b03090a16e2c2f54\
const ORDERING_TEST_TX: &str = "0200000003c26f3eb7932f7acddc5ddd26602b77e7516079b03090a16e2c2f54\
85d1fd600f0100000000ffffffffc26f3eb7932f7acddc5ddd26602b77e75160\
79b03090a16e2c2f5485d1fd600f0000000000ffffffff571fb3e02278217852\
dd5d299947e2b7354a639adc32ec1fa7b82cfb5dec530e0500000000ffffffff\
@ -770,7 +770,7 @@ mod test {
use std::str::FromStr;
let original_tx = ordering_test_tx!();
let mut tx = original_tx.clone();
let mut tx = original_tx;
TxOrdering::BIP69Lexicographic.sort_tx(&mut tx);
@ -828,9 +828,9 @@ mod test {
let filtered = get_test_utxos()
.into_iter()
.filter(|u| change_spend_policy.is_satisfied_by(u))
.collect::<Vec<_>>();
.count();
assert_eq!(filtered.len(), 2);
assert_eq!(filtered, 2);
}
#[test]

View File

@ -156,8 +156,8 @@ pub struct ChunksIterator<I: Iterator> {
size: usize,
}
#[cfg(any(feature = "electrum", feature = "esplora"))]
impl<I: Iterator> ChunksIterator<I> {
#[allow(dead_code)]
pub fn new(iter: I, size: usize) -> Self {
ChunksIterator { iter, size }
}

View File

@ -60,13 +60,13 @@ fn get_auth() -> Auth {
),
_ => Auth::CookieFile(PathBuf::from(
env::var("BDK_RPC_COOKIEFILE")
.unwrap_or("/home/user/.bitcoin/regtest/.cookie".to_string()),
.unwrap_or_else(|_| "/home/user/.bitcoin/regtest/.cookie".to_string()),
)),
}
}
pub fn get_electrum_url() -> String {
env::var("BDK_ELECTRUM_URL").unwrap_or("tcp://127.0.0.1:50001".to_string())
env::var("BDK_ELECTRUM_URL").unwrap_or_else(|_| "tcp://127.0.0.1:50001".to_string())
}
pub struct TestClient {
@ -311,8 +311,8 @@ where
impl TestClient {
pub fn new() -> Self {
let url = env::var("BDK_RPC_URL").unwrap_or("127.0.0.1:18443".to_string());
let wallet = env::var("BDK_RPC_WALLET").unwrap_or("bdk-test".to_string());
let url = env::var("BDK_RPC_URL").unwrap_or_else(|_| "127.0.0.1:18443".to_string());
let wallet = env::var("BDK_RPC_WALLET").unwrap_or_else(|_| "bdk-test".to_string());
let client =
RpcClient::new(format!("http://{}/wallet/{}", url, wallet), get_auth()).unwrap();
let electrum = ElectrumClient::new(&get_electrum_url()).unwrap();
@ -349,7 +349,7 @@ impl TestClient {
pub fn receive(&mut self, meta_tx: TestIncomingTx) -> Txid {
assert!(
meta_tx.output.len() > 0,
!meta_tx.output.is_empty(),
"can't create a transaction with no outputs"
);