Fix cargo clippy warnings

Disabled warnings for nursery/tmp_plan as it's going to be replaced
anyways
This commit is contained in:
Daniela Brozzoni 2023-03-02 19:08:33 +01:00
parent 1805bd35c0
commit c61b3604e1
No known key found for this signature in database
GPG Key ID: 7DE4F1FDCED0AB87
26 changed files with 139 additions and 153 deletions

View File

@ -356,26 +356,26 @@ pub(crate) trait DescriptorMeta {
fn is_witness(&self) -> bool; fn is_witness(&self) -> bool;
fn is_taproot(&self) -> bool; fn is_taproot(&self) -> bool;
fn get_extended_keys(&self) -> Vec<DescriptorXKey<ExtendedPubKey>>; fn get_extended_keys(&self) -> Vec<DescriptorXKey<ExtendedPubKey>>;
fn derive_from_hd_keypaths<'s>( fn derive_from_hd_keypaths(
&self, &self,
hd_keypaths: &HdKeyPaths, hd_keypaths: &HdKeyPaths,
secp: &'s SecpCtx, secp: &SecpCtx,
) -> Option<DerivedDescriptor>; ) -> Option<DerivedDescriptor>;
fn derive_from_tap_key_origins<'s>( fn derive_from_tap_key_origins(
&self, &self,
tap_key_origins: &TapKeyOrigins, tap_key_origins: &TapKeyOrigins,
secp: &'s SecpCtx, secp: &SecpCtx,
) -> Option<DerivedDescriptor>; ) -> Option<DerivedDescriptor>;
fn derive_from_psbt_key_origins<'s>( fn derive_from_psbt_key_origins(
&self, &self,
key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>, key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>,
secp: &'s SecpCtx, secp: &SecpCtx,
) -> Option<DerivedDescriptor>; ) -> Option<DerivedDescriptor>;
fn derive_from_psbt_input<'s>( fn derive_from_psbt_input(
&self, &self,
psbt_input: &psbt::Input, psbt_input: &psbt::Input,
utxo: Option<TxOut>, utxo: Option<TxOut>,
secp: &'s SecpCtx, secp: &SecpCtx,
) -> Option<DerivedDescriptor>; ) -> Option<DerivedDescriptor>;
} }
@ -410,10 +410,10 @@ impl DescriptorMeta for ExtendedDescriptor {
answer answer
} }
fn derive_from_psbt_key_origins<'s>( fn derive_from_psbt_key_origins(
&self, &self,
key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>, key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>,
secp: &'s SecpCtx, secp: &SecpCtx,
) -> Option<DerivedDescriptor> { ) -> Option<DerivedDescriptor> {
// Ensure that deriving `xpub` with `path` yields `expected` // Ensure that deriving `xpub` with `path` yields `expected`
let verify_key = |xpub: &DescriptorXKey<ExtendedPubKey>, let verify_key = |xpub: &DescriptorXKey<ExtendedPubKey>,
@ -497,10 +497,10 @@ impl DescriptorMeta for ExtendedDescriptor {
path_found.map(|path| self.at_derivation_index(path)) path_found.map(|path| self.at_derivation_index(path))
} }
fn derive_from_hd_keypaths<'s>( fn derive_from_hd_keypaths(
&self, &self,
hd_keypaths: &HdKeyPaths, hd_keypaths: &HdKeyPaths,
secp: &'s SecpCtx, secp: &SecpCtx,
) -> Option<DerivedDescriptor> { ) -> Option<DerivedDescriptor> {
// "Convert" an hd_keypaths map to the format required by `derive_from_psbt_key_origins` // "Convert" an hd_keypaths map to the format required by `derive_from_psbt_key_origins`
let key_origins = hd_keypaths let key_origins = hd_keypaths
@ -515,10 +515,10 @@ impl DescriptorMeta for ExtendedDescriptor {
self.derive_from_psbt_key_origins(key_origins, secp) self.derive_from_psbt_key_origins(key_origins, secp)
} }
fn derive_from_tap_key_origins<'s>( fn derive_from_tap_key_origins(
&self, &self,
tap_key_origins: &TapKeyOrigins, tap_key_origins: &TapKeyOrigins,
secp: &'s SecpCtx, secp: &SecpCtx,
) -> Option<DerivedDescriptor> { ) -> Option<DerivedDescriptor> {
// "Convert" a tap_key_origins map to the format required by `derive_from_psbt_key_origins` // "Convert" a tap_key_origins map to the format required by `derive_from_psbt_key_origins`
let key_origins = tap_key_origins let key_origins = tap_key_origins
@ -528,11 +528,11 @@ impl DescriptorMeta for ExtendedDescriptor {
self.derive_from_psbt_key_origins(key_origins, secp) self.derive_from_psbt_key_origins(key_origins, secp)
} }
fn derive_from_psbt_input<'s>( fn derive_from_psbt_input(
&self, &self,
psbt_input: &psbt::Input, psbt_input: &psbt::Input,
utxo: Option<TxOut>, utxo: Option<TxOut>,
secp: &'s SecpCtx, secp: &SecpCtx,
) -> Option<DerivedDescriptor> { ) -> Option<DerivedDescriptor> {
if let Some(derived) = self.derive_from_hd_keypaths(&psbt_input.bip32_derivation, secp) { if let Some(derived) = self.derive_from_hd_keypaths(&psbt_input.bip32_derivation, secp) {
return Some(derived); return Some(derived);

View File

@ -723,7 +723,7 @@ mod test {
fn get_test_utxos() -> Vec<WeightedUtxo> { fn get_test_utxos() -> Vec<WeightedUtxo> {
vec![ vec![
utxo(100_000, 0, ConfirmationTime::Unconfirmed), utxo(100_000, 0, ConfirmationTime::Unconfirmed),
utxo(FEE_AMOUNT as u64 - 40, 1, ConfirmationTime::Unconfirmed), utxo(FEE_AMOUNT - 40, 1, ConfirmationTime::Unconfirmed),
utxo(200_000, 2, ConfirmationTime::Unconfirmed), utxo(200_000, 2, ConfirmationTime::Unconfirmed),
] ]
} }

View File

@ -311,13 +311,12 @@ impl<D> Wallet<D> {
.last() .last()
.unwrap(), .unwrap(),
}; };
let info = AddressInfo { AddressInfo {
index, index,
address: Address::from_script(&spk, self.network) address: Address::from_script(&spk, self.network)
.expect("descriptor must have address form"), .expect("descriptor must have address form"),
keychain, keychain,
}; }
info
} }
/// Return whether or not a `script` is part of this wallet (either internal or external) /// Return whether or not a `script` is part of this wallet (either internal or external)
@ -342,7 +341,7 @@ impl<D> Wallet<D> {
.map(|(&(keychain, derivation_index), utxo)| LocalUtxo { .map(|(&(keychain, derivation_index), utxo)| LocalUtxo {
outpoint: utxo.outpoint, outpoint: utxo.outpoint,
txout: utxo.txout, txout: utxo.txout,
keychain: keychain.clone(), keychain,
is_spent: false, is_spent: false,
derivation_index, derivation_index,
confirmation_time: utxo.chain_position, confirmation_time: utxo.chain_position,
@ -1288,8 +1287,7 @@ impl<D> Wallet<D> {
// - If that also fails, it will try it on the internal descriptor, if present // - If that also fails, it will try it on the internal descriptor, if present
let desc = psbt let desc = psbt
.get_utxo_for(n) .get_utxo_for(n)
.map(|txout| self.get_descriptor_for_txout(&txout)) .and_then(|txout| self.get_descriptor_for_txout(&txout))
.flatten()
.or_else(|| { .or_else(|| {
self.keychain_tracker self.keychain_tracker
.txout_index .txout_index

View File

@ -1544,7 +1544,7 @@ fn test_bump_fee_add_input() {
}], }],
}; };
wallet wallet
.insert_tx(init_tx.clone(), wallet.transactions().last().unwrap().0) .insert_tx(init_tx, wallet.transactions().last().unwrap().0)
.unwrap(); .unwrap();
let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap();

View File

@ -264,7 +264,7 @@ where
} }
/// Calculates the difference between self and `update` in the form of a [`ChangeSet`]. /// Calculates the difference between self and `update` in the form of a [`ChangeSet`].
pub fn determine_changeset<'a, T2>( pub fn determine_changeset<T2>(
&self, &self,
update: &ChainGraph<P, T2>, update: &ChainGraph<P, T2>,
) -> Result<ChangeSet<P, T>, UpdateError<P>> ) -> Result<ChangeSet<P, T>, UpdateError<P>>
@ -366,7 +366,7 @@ where
// evicted, return error // evicted, return error
return Err(UnresolvableConflict { return Err(UnresolvableConflict {
already_confirmed_tx: (conflicting_pos.clone(), conflicting_txid), already_confirmed_tx: (conflicting_pos.clone(), conflicting_txid),
update_tx: (update_pos.clone(), update_txid), update_tx: (update_pos, update_txid),
}); });
} }
TxHeight::Unconfirmed => { TxHeight::Unconfirmed => {
@ -516,7 +516,7 @@ impl<P: core::fmt::Debug> core::fmt::Display for NewError<P> {
f, f,
"missing full transactions for {}", "missing full transactions for {}",
missing missing
.into_iter() .iter()
.map(|txid| txid.to_string()) .map(|txid| txid.to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", ") .join(", ")

View File

@ -8,10 +8,10 @@ use bitcoin::{
use crate::BlockId; use crate::BlockId;
pub const RAW_TX_1: &'static str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100"; pub const RAW_TX_1: &str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100";
pub const RAW_TX_2: &'static str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100"; pub const RAW_TX_2: &str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100";
pub const RAW_TX_3: &'static str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100"; pub const RAW_TX_3: &str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100";
pub const RAW_TX_4: &'static str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100"; pub const RAW_TX_4: &str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100";
pub fn tx_from_hex(s: &str) -> Transaction { pub fn tx_from_hex(s: &str) -> Transaction {
let raw = Vec::from_hex(s).expect("data must be in hex"); let raw = Vec::from_hex(s).expect("data must be in hex");

View File

@ -145,12 +145,12 @@ where
/// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]). /// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]).
pub fn graph(&self) -> &TxGraph<T> { pub fn graph(&self) -> &TxGraph<T> {
&self.chain_graph().graph() self.chain_graph().graph()
} }
/// Returns a reference to the internal [`SparseChain`] (which is part of the [`ChainGraph`]). /// Returns a reference to the internal [`SparseChain`] (which is part of the [`ChainGraph`]).
pub fn chain(&self) -> &SparseChain<P> { pub fn chain(&self) -> &SparseChain<P> {
&self.chain_graph().chain() self.chain_graph().chain()
} }
/// Determines the changes as result of inserting `block_id` (a height and block hash) into the /// Determines the changes as result of inserting `block_id` (a height and block hash) into the

View File

@ -10,7 +10,7 @@ use core::{fmt::Debug, ops::Deref};
use super::DerivationAdditions; use super::DerivationAdditions;
/// Maximum [BIP32](https://bips.xyz/32) derivation index. /// Maximum [BIP32](https://bips.xyz/32) derivation index.
pub const BIP32_MAX_INDEX: u32 = 1 << 31 - 1; pub const BIP32_MAX_INDEX: u32 = (1 << 31) - 1;
/// A convenient wrapper around [`SpkTxOutIndex`] that relates script pubkeys to miniscript public /// A convenient wrapper around [`SpkTxOutIndex`] that relates script pubkeys to miniscript public
/// [`Descriptor`]s. /// [`Descriptor`]s.
@ -162,7 +162,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
pub fn set_lookahead_for_all(&mut self, lookahead: u32) { pub fn set_lookahead_for_all(&mut self, lookahead: u32) {
for keychain in &self.keychains.keys().cloned().collect::<Vec<_>>() { for keychain in &self.keychains.keys().cloned().collect::<Vec<_>>() {
self.lookahead.insert(keychain.clone(), lookahead); self.lookahead.insert(keychain.clone(), lookahead);
self.replenish_lookahead(&keychain); self.replenish_lookahead(keychain);
} }
} }
@ -348,7 +348,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
let mut spks = BTreeMap::new(); let mut spks = BTreeMap::new();
for (keychain, &index) in keychains { for (keychain, &index) in keychains {
let (new_spks, new_additions) = self.reveal_to_target(&keychain, index); let (new_spks, new_additions) = self.reveal_to_target(keychain, index);
if !new_additions.is_empty() { if !new_additions.is_empty() {
spks.insert(keychain.clone(), new_spks); spks.insert(keychain.clone(), new_spks);
additions.append(new_additions); additions.append(new_additions);

View File

@ -463,12 +463,13 @@ impl<P: ChainPosition> SparseChain<P> {
where where
C: IntoIterator<Item = BlockId>, C: IntoIterator<Item = BlockId>,
{ {
let mut chain = Self::default(); Self {
chain.checkpoints = checkpoints checkpoints: checkpoints
.into_iter() .into_iter()
.map(|block_id| block_id.into()) .map(|block_id| block_id.into())
.collect(); .collect(),
chain ..Default::default()
}
} }
/// Get the checkpoint for the last known tip. /// Get the checkpoint for the last known tip.

View File

@ -248,7 +248,7 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
if self.outputs_in_range(index..=index).next().is_some() { if self.outputs_in_range(index..=index).next().is_some() {
return false; return false;
} }
return self.unused.insert(index.clone()); self.unused.insert(index.clone())
} }
/// Returns the index associated with the script pubkey. /// Returns the index associated with the script pubkey.
@ -300,13 +300,11 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
let input_matches = tx let input_matches = tx
.input .input
.iter() .iter()
.find(|input| self.txouts.contains_key(&input.previous_output)) .any(|input| self.txouts.contains_key(&input.previous_output));
.is_some();
let output_matches = tx let output_matches = tx
.output .output
.iter() .iter()
.find(|output| self.spk_indices.contains_key(&output.script_pubkey)) .any(|output| self.spk_indices.contains_key(&output.script_pubkey));
.is_some();
input_matches || output_matches input_matches || output_matches
} }
} }

View File

@ -276,7 +276,7 @@ impl<T: AsTransaction + Ord + Clone> TxGraph<T> {
/// ///
/// The [`Additions`] would be the set difference of `update` and `self` (transactions that /// The [`Additions`] would be the set difference of `update` and `self` (transactions that
/// exist in `update` but not in `self`). /// exist in `update` but not in `self`).
pub fn determine_additions<'a, T2>(&self, update: &'a TxGraph<T2>) -> Additions<T> pub fn determine_additions<T2>(&self, update: &TxGraph<T2>) -> Additions<T>
where where
T2: IntoOwned<T> + Clone, T2: IntoOwned<T> + Clone,
{ {
@ -299,7 +299,7 @@ impl<T: AsTransaction + Ord + Clone> TxGraph<T> {
for (&vout, update_txout) in partial { for (&vout, update_txout) in partial {
let outpoint = OutPoint::new(txid, vout); let outpoint = OutPoint::new(txid, vout);
if self.get_txout(outpoint) != Some(&update_txout) { if self.get_txout(outpoint) != Some(update_txout) {
additions.txout.insert(outpoint, update_txout.clone()); additions.txout.insert(outpoint, update_txout.clone());
} }
} }
@ -627,7 +627,7 @@ where
}; };
self.populate_stack(op_spends + 1, txid); self.populate_stack(op_spends + 1, txid);
return Some(item); Some(item)
} }
} }

View File

@ -186,7 +186,7 @@ fn update_evicts_conflicting_tx() {
let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
let _ = cg.insert_checkpoint(cp_b).expect("should insert cp"); let _ = cg.insert_checkpoint(cp_b).expect("should insert cp");
let _ = cg let _ = cg
.insert_tx(tx_a.clone(), TxHeight::Confirmed(0)) .insert_tx(tx_a, TxHeight::Confirmed(0))
.expect("should insert tx"); .expect("should insert tx");
let _ = cg let _ = cg
.insert_tx(tx_b.clone(), TxHeight::Confirmed(1)) .insert_tx(tx_b.clone(), TxHeight::Confirmed(1))
@ -213,7 +213,7 @@ fn update_evicts_conflicting_tx() {
.into(), .into(),
}, },
graph: tx_graph::Additions { graph: tx_graph::Additions {
tx: [tx_b2.clone()].into(), tx: [tx_b2].into(),
txout: [].into(), txout: [].into(),
}, },
}; };
@ -460,7 +460,7 @@ fn test_apply_changes_reintroduce_tx() {
let mut cg = ChainGraph::default(); let mut cg = ChainGraph::default();
let _ = cg.insert_checkpoint(block1).unwrap(); let _ = cg.insert_checkpoint(block1).unwrap();
let _ = cg.insert_checkpoint(block2a).unwrap(); let _ = cg.insert_checkpoint(block2a).unwrap();
let _ = cg.insert_tx(tx1.clone(), TxHeight::Confirmed(1)).unwrap(); let _ = cg.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
let _ = cg.insert_tx(tx2a.clone(), TxHeight::Confirmed(2)).unwrap(); let _ = cg.insert_tx(tx2a.clone(), TxHeight::Confirmed(2)).unwrap();
cg cg
}; };
@ -647,7 +647,7 @@ fn test_evict_descendants() {
); );
let err = cg let err = cg
.insert_tx_preview(tx_conflict.clone(), TxHeight::Unconfirmed) .insert_tx_preview(tx_conflict, TxHeight::Unconfirmed)
.expect_err("must fail due to conflicts"); .expect_err("must fail due to conflicts");
assert!(matches!(err, InsertTxError::UnresolvableConflict(_))); assert!(matches!(err, InsertTxError::UnresolvableConflict(_)));
} }

View File

@ -69,8 +69,8 @@ fn test_balance() {
let mut tracker = KeychainTracker::<Keychain, TxHeight>::default(); let mut tracker = KeychainTracker::<Keychain, TxHeight>::default();
let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap(); let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap();
let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap(); let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap();
tracker.add_keychain(Keychain::One, one.clone()); tracker.add_keychain(Keychain::One, one);
tracker.add_keychain(Keychain::Two, two.clone()); tracker.add_keychain(Keychain::Two, two);
let tx1 = Transaction { let tx1 = Transaction {
version: 0x01, version: 0x01,
@ -126,9 +126,9 @@ fn test_balance() {
}) })
.unwrap(); .unwrap();
let should_trust = |keychain: &Keychain| match keychain { let should_trust = |keychain: &Keychain| match *keychain {
&Keychain::One => false, Keychain::One => false,
&Keychain::Two => true, Keychain::Two => true,
}; };
assert_eq!(tracker.balance(should_trust), Balance::default()); assert_eq!(tracker.balance(should_trust), Balance::default());
@ -172,9 +172,7 @@ fn test_balance() {
} }
); );
let _ = tracker let _ = tracker.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
.insert_tx(tx1.clone(), TxHeight::Confirmed(1))
.unwrap();
assert_eq!( assert_eq!(
tracker.balance(should_trust), tracker.balance(should_trust),
@ -186,9 +184,7 @@ fn test_balance() {
} }
); );
let _ = tracker let _ = tracker.insert_tx(tx2, TxHeight::Confirmed(2)).unwrap();
.insert_tx(tx2.clone(), TxHeight::Confirmed(2))
.unwrap();
assert_eq!( assert_eq!(
tracker.balance(should_trust), tracker.balance(should_trust),

View File

@ -147,8 +147,8 @@ fn invalidate_a_checkpoint_and_try_and_move_tx_when_it_wasnt_within_invalidation
chain1.determine_changeset(&chain2), chain1.determine_changeset(&chain2),
Err(UpdateError::TxInconsistent { Err(UpdateError::TxInconsistent {
txid: h!("tx0"), txid: h!("tx0"),
original_pos: TxHeight::Confirmed(0).into(), original_pos: TxHeight::Confirmed(0),
update_pos: TxHeight::Confirmed(1).into(), update_pos: TxHeight::Confirmed(1),
}) })
); );
} }

View File

@ -42,10 +42,10 @@ fn spk_txout_sent_and_received() {
output: vec![ output: vec![
TxOut { TxOut {
value: 20_000, value: 20_000,
script_pubkey: spk2.clone(), script_pubkey: spk2,
}, },
TxOut { TxOut {
script_pubkey: spk1.clone(), script_pubkey: spk1,
value: 30_000, value: 30_000,
}, },
], ],
@ -62,15 +62,15 @@ fn mark_used() {
let mut spk_index = SpkTxOutIndex::default(); let mut spk_index = SpkTxOutIndex::default();
spk_index.insert_spk(1, spk1.clone()); spk_index.insert_spk(1, spk1.clone());
spk_index.insert_spk(2, spk2.clone()); spk_index.insert_spk(2, spk2);
assert_eq!(spk_index.is_used(&1), false); assert!(!spk_index.is_used(&1));
spk_index.mark_used(&1); spk_index.mark_used(&1);
assert_eq!(spk_index.is_used(&1), true); assert!(spk_index.is_used(&1));
spk_index.unmark_used(&1); spk_index.unmark_used(&1);
assert_eq!(spk_index.is_used(&1), false); assert!(!spk_index.is_used(&1));
spk_index.mark_used(&1); spk_index.mark_used(&1);
assert_eq!(spk_index.is_used(&1), true); assert!(spk_index.is_used(&1));
let tx1 = Transaction { let tx1 = Transaction {
version: 0x02, version: 0x02,
@ -78,15 +78,14 @@ fn mark_used() {
input: vec![], input: vec![],
output: vec![TxOut { output: vec![TxOut {
value: 42_000, value: 42_000,
script_pubkey: spk1.clone(), script_pubkey: spk1,
}], }],
}; };
spk_index.scan(&tx1); spk_index.scan(&tx1);
spk_index.unmark_used(&1); spk_index.unmark_used(&1);
assert_eq!( assert!(
spk_index.is_used(&1), spk_index.is_used(&1),
true,
"even though we unmark_used it doesn't matter because there was a tx scanned that used it" "even though we unmark_used it doesn't matter because there was a tx scanned that used it"
); );
} }
@ -94,8 +93,8 @@ fn mark_used() {
#[test] #[test]
fn unmark_used_does_not_result_in_invalid_representation() { fn unmark_used_does_not_result_in_invalid_representation() {
let mut spk_index = SpkTxOutIndex::default(); let mut spk_index = SpkTxOutIndex::default();
assert_eq!(spk_index.unmark_used(&0), false); assert!(!spk_index.unmark_used(&0));
assert_eq!(spk_index.unmark_used(&1), false); assert!(!spk_index.unmark_used(&1));
assert_eq!(spk_index.unmark_used(&2), false); assert!(!spk_index.unmark_used(&2));
assert!(spk_index.unused_spks(..).collect::<Vec<_>>().is_empty()); assert!(spk_index.unused_spks(..).collect::<Vec<_>>().is_empty());
} }

View File

@ -128,10 +128,10 @@ fn insert_tx_graph_keeps_track_of_spend() {
let _ = graph1.insert_tx(tx2.clone()); let _ = graph1.insert_tx(tx2.clone());
let _ = graph2.insert_tx(tx2.clone()); let _ = graph2.insert_tx(tx2.clone());
let _ = graph2.insert_tx(tx1.clone()); let _ = graph2.insert_tx(tx1);
assert_eq!( assert_eq!(
&*graph1.outspends(op), graph1.outspends(op),
&iter::once(tx2.txid()).collect::<HashSet<_>>() &iter::once(tx2.txid()).collect::<HashSet<_>>()
); );
assert_eq!(graph2.outspends(op), graph1.outspends(op)); assert_eq!(graph2.outspends(op), graph1.outspends(op));
@ -173,7 +173,7 @@ fn insert_tx_displaces_txouts() {
vout: 0, vout: 0,
}, },
TxOut { TxOut {
value: 1337_000, value: 1_337_000,
script_pubkey: Script::default(), script_pubkey: Script::default(),
}, },
); );
@ -231,7 +231,7 @@ fn insert_txout_does_not_displace_tx() {
vout: 0, vout: 0,
}, },
TxOut { TxOut {
value: 1337_000, value: 1_337_000,
script_pubkey: Script::default(), script_pubkey: Script::default(),
}, },
); );

View File

@ -97,9 +97,8 @@ pub trait ElectrumExt {
impl ElectrumExt for Client { impl ElectrumExt for Client {
fn get_tip(&self) -> Result<(u32, BlockHash), Error> { fn get_tip(&self) -> Result<(u32, BlockHash), Error> {
// TODO: unsubscribe when added to the client, or is there a better call to use here? // TODO: unsubscribe when added to the client, or is there a better call to use here?
Ok(self self.block_headers_subscribe()
.block_headers_subscribe() .map(|data| (data.height as u32, data.header.block_hash()))
.map(|data| (data.height as u32, data.header.block_hash()))?)
} }
fn scan<K: Ord + Clone>( fn scan<K: Ord + Clone>(
@ -139,7 +138,7 @@ impl ElectrumExt for Client {
batch_size, batch_size,
) { ) {
Err(InternalError::Reorg) => continue, Err(InternalError::Reorg) => continue,
Err(InternalError::ElectrumError(e)) => return Err(e.into()), Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(mut spks) => scanned_spks.append(&mut spks), Ok(mut spks) => scanned_spks.append(&mut spks),
}; };
} }
@ -152,7 +151,7 @@ impl ElectrumExt for Client {
batch_size, batch_size,
) { ) {
Err(InternalError::Reorg) => continue, Err(InternalError::Reorg) => continue,
Err(InternalError::ElectrumError(e)) => return Err(e.into()), Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(spks) => scanned_spks.extend( Ok(spks) => scanned_spks.extend(
spks.into_iter() spks.into_iter()
.map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)), .map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)),
@ -163,13 +162,13 @@ impl ElectrumExt for Client {
match populate_with_txids(self, &mut update, &mut txids.iter().cloned()) { match populate_with_txids(self, &mut update, &mut txids.iter().cloned()) {
Err(InternalError::Reorg) => continue, Err(InternalError::Reorg) => continue,
Err(InternalError::ElectrumError(e)) => return Err(e.into()), Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(_) => {} Ok(_) => {}
} }
match populate_with_outpoints(self, &mut update, &mut outpoints.iter().cloned()) { match populate_with_outpoints(self, &mut update, &mut outpoints.iter().cloned()) {
Err(InternalError::Reorg) => continue, Err(InternalError::Reorg) => continue,
Err(InternalError::ElectrumError(e)) => return Err(e.into()), Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ } Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ }
} }
@ -284,7 +283,7 @@ impl<K: Ord + Clone + Debug> ElectrumUpdate<K, TxHeight> {
.into_iter() .into_iter()
.zip( .zip(
client client
.batch_block_header(heights.clone())? .batch_block_header(heights)?
.into_iter() .into_iter()
.map(|bh| bh.time as u64), .map(|bh| bh.time as u64),
) )
@ -307,7 +306,7 @@ impl<K: Ord + Clone + Debug> ElectrumUpdate<K, TxHeight> {
Ok(ElectrumUpdate { Ok(ElectrumUpdate {
chain_update: new_update, chain_update: new_update,
last_active_indices: self.last_active_indices.clone(), last_active_indices: self.last_active_indices,
}) })
} }
} }
@ -320,15 +319,15 @@ enum InternalError {
impl From<electrum_client::Error> for InternalError { impl From<electrum_client::Error> for InternalError {
fn from(value: electrum_client::Error) -> Self { fn from(value: electrum_client::Error) -> Self {
Self::ElectrumError(value.into()) Self::ElectrumError(value)
} }
} }
fn get_tip(client: &Client) -> Result<(u32, BlockHash), Error> { fn get_tip(client: &Client) -> Result<(u32, BlockHash), Error> {
// TODO: unsubscribe when added to the client, or is there a better call to use here? // TODO: unsubscribe when added to the client, or is there a better call to use here?
Ok(client client
.block_headers_subscribe() .block_headers_subscribe()
.map(|data| (data.height as u32, data.header.block_hash()))?) .map(|data| (data.height as u32, data.header.block_hash()))
} }
/// Prepare an update sparsechain "template" based on the checkpoints of the `local_chain`. /// Prepare an update sparsechain "template" based on the checkpoints of the `local_chain`.

View File

@ -34,6 +34,7 @@ pub trait EsploraExt {
/// parallel. /// parallel.
/// ///
/// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition
#[allow(clippy::result_large_err)] // FIXME
fn scan<K: Ord + Clone>( fn scan<K: Ord + Clone>(
&self, &self,
local_chain: &BTreeMap<u32, BlockHash>, local_chain: &BTreeMap<u32, BlockHash>,
@ -47,6 +48,7 @@ pub trait EsploraExt {
/// Convenience method to call [`scan`] without requiring a keychain. /// Convenience method to call [`scan`] without requiring a keychain.
/// ///
/// [`scan`]: EsploraExt::scan /// [`scan`]: EsploraExt::scan
#[allow(clippy::result_large_err)] // FIXME
fn scan_without_keychain( fn scan_without_keychain(
&self, &self,
local_chain: &BTreeMap<u32, BlockHash>, local_chain: &BTreeMap<u32, BlockHash>,
@ -127,13 +129,12 @@ impl EsploraExt for esplora_client::BlockingClient {
let mut spks = spks.into_iter(); let mut spks = spks.into_iter();
let mut last_active_index = None; let mut last_active_index = None;
let mut empty_scripts = 0; let mut empty_scripts = 0;
type IndexWithTxs = (u32, Vec<esplora_client::Tx>);
loop { loop {
let handles = (0..parallel_requests) let handles = (0..parallel_requests)
.filter_map( .filter_map(
|_| -> Option< |_| -> Option<std::thread::JoinHandle<Result<IndexWithTxs, _>>> {
std::thread::JoinHandle<Result<(u32, Vec<esplora_client::Tx>), _>>,
> {
let (index, script) = spks.next()?; let (index, script) = spks.next()?;
let client = self.clone(); let client = self.clone();
Some(std::thread::spawn(move || { Some(std::thread::spawn(move || {

View File

@ -6,7 +6,7 @@ use bdk_chain::{
}; };
pub use file_store::*; pub use file_store::*;
impl<'de, K, P> PersistBackend<K, P> for KeychainStore<K, P> impl<K, P> PersistBackend<K, P> for KeychainStore<K, P>
where where
K: Ord + Clone + core::fmt::Debug, K: Ord + Clone + core::fmt::Debug,
P: ChainPosition, P: ChainPosition,

View File

@ -4,7 +4,6 @@ use bdk_chain::{
TxHeight, TxHeight,
}; };
use bdk_file_store::{FileError, IterError, KeychainStore, MAGIC_BYTES, MAGIC_BYTES_LEN}; use bdk_file_store::{FileError, IterError, KeychainStore, MAGIC_BYTES, MAGIC_BYTES_LEN};
use serde;
use std::{ use std::{
io::{Read, Write}, io::{Read, Write},
vec::Vec, vec::Vec,

View File

@ -48,7 +48,7 @@ pub struct ScanOptions {
} }
fn main() -> anyhow::Result<()> { fn main() -> anyhow::Result<()> {
let (args, keymap, mut tracker, mut db) = cli::init::<ElectrumCommands, _>()?; let (args, keymap, tracker, db) = cli::init::<ElectrumCommands, _>()?;
let electrum_url = match args.network { let electrum_url = match args.network {
Network::Bitcoin => "ssl://electrum.blockstream.info:50002", Network::Bitcoin => "ssl://electrum.blockstream.info:50002",
@ -57,10 +57,7 @@ fn main() -> anyhow::Result<()> {
Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001", Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001",
}; };
let config = electrum_client::Config::builder() let config = electrum_client::Config::builder()
.validate_domain(match args.network { .validate_domain(matches!(args.network, Network::Bitcoin))
Network::Bitcoin => true,
_ => false,
})
.build(); .build();
let client = electrum_client::Client::from_config(electrum_url, config)?; let client = electrum_client::Client::from_config(electrum_url, config)?;
@ -74,8 +71,8 @@ fn main() -> anyhow::Result<()> {
let _txid = client.transaction_broadcast(transaction)?; let _txid = client.transaction_broadcast(transaction)?;
Ok(()) Ok(())
}, },
&mut tracker, &tracker,
&mut db, &db,
args.network, args.network,
&keymap, &keymap,
) )
@ -148,7 +145,7 @@ fn main() -> anyhow::Result<()> {
.txout_index .txout_index
.all_spks() .all_spks()
.iter() .iter()
.map(|(k, v)| (k.clone(), v.clone())) .map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
eprintln!("scanning {:?}", index); eprintln!("scanning {:?}", index);
@ -159,7 +156,7 @@ fn main() -> anyhow::Result<()> {
let unused_spks = tracker let unused_spks = tracker
.txout_index .txout_index
.unused_spks(..) .unused_spks(..)
.map(|(k, v)| (k.clone(), v.clone())) .map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
eprintln!( eprintln!(

View File

@ -153,7 +153,7 @@ fn main() -> anyhow::Result<()> {
.txout_index .txout_index
.all_spks() .all_spks()
.iter() .iter()
.map(|(k, v)| (k.clone(), v.clone())) .map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
eprintln!("scanning {:?}", index); eprintln!("scanning {:?}", index);
@ -164,7 +164,7 @@ fn main() -> anyhow::Result<()> {
let unused_spks = tracker let unused_spks = tracker
.txout_index .txout_index
.unused_spks(..) .unused_spks(..)
.map(|(k, v)| (k.clone(), v.clone())) .map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
eprintln!( eprintln!(

View File

@ -227,7 +227,7 @@ where
false => Keychain::External, false => Keychain::External,
}; };
for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) { for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) {
let address = Address::from_script(&spk, network) let address = Address::from_script(spk, network)
.expect("should always be able to derive address"); .expect("should always be able to derive address");
println!( println!(
"{:?} {} used:{}", "{:?} {} used:{}",
@ -271,6 +271,7 @@ pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
unconfirmed, unconfirmed,
} => { } => {
let tracker = tracker.lock().unwrap(); let tracker = tracker.lock().unwrap();
#[allow(clippy::type_complexity)] // FIXME
let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> = match (unspent, spent) let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> = match (unspent, spent)
{ {
(true, false) => Box::new(tracker.full_utxos()), (true, false) => Box::new(tracker.full_utxos()),
@ -282,6 +283,7 @@ pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
_ => Box::new(tracker.full_txouts()), _ => Box::new(tracker.full_txouts()),
}; };
#[allow(clippy::type_complexity)] // FIXME
let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> = let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> =
match (confirmed, unconfirmed) { match (confirmed, unconfirmed) {
(true, false) => Box::new( (true, false) => Box::new(
@ -310,6 +312,7 @@ pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
} }
} }
#[allow(clippy::type_complexity)] // FIXME
pub fn create_tx<P: ChainPosition>( pub fn create_tx<P: ChainPosition>(
value: u64, value: u64,
address: Address, address: Address,
@ -482,7 +485,7 @@ pub fn create_tx<P: ChainPosition>(
assert!( assert!(
requirements.signatures.sign_with_keymap( requirements.signatures.sign_with_keymap(
i, i,
&keymap, keymap,
&sighash_prevouts, &sighash_prevouts,
None, None,
None, None,
@ -539,13 +542,13 @@ where
{ {
match command { match command {
// TODO: Make these functions return stuffs // TODO: Make these functions return stuffs
Commands::Address { addr_cmd } => run_address_cmd(&tracker, &store, addr_cmd, network), Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network),
Commands::Balance => { Commands::Balance => {
run_balance_cmd(&tracker); run_balance_cmd(tracker);
Ok(()) Ok(())
} }
Commands::TxOut { txout_cmd } => { Commands::TxOut { txout_cmd } => {
run_txo_cmd(txout_cmd, &tracker, network); run_txo_cmd(txout_cmd, tracker, network);
Ok(()) Ok(())
} }
Commands::Send { Commands::Send {
@ -557,7 +560,7 @@ where
// take mutable ref to construct tx -- it is only open for a short time while building it. // take mutable ref to construct tx -- it is only open for a short time while building it.
let tracker = &mut *tracker.lock().unwrap(); let tracker = &mut *tracker.lock().unwrap();
let (transaction, change_info) = let (transaction, change_info) =
create_tx(value, address, coin_select, tracker, &keymap)?; create_tx(value, address, coin_select, tracker, keymap)?;
if let Some((change_derivation_changes, (change_keychain, index))) = change_info { if let Some((change_derivation_changes, (change_keychain, index))) = change_info {
// We must first persist to disk the fact that we've got a new address from the // We must first persist to disk the fact that we've got a new address from the
@ -605,7 +608,7 @@ where
// We failed to broadcast so allow our change address to be used in the future // We failed to broadcast so allow our change address to be used in the future
tracker.txout_index.unmark_used(&keychain, index); tracker.txout_index.unmark_used(&keychain, index);
} }
Err(e.into()) Err(e)
} }
} }
} }
@ -615,6 +618,7 @@ where
} }
} }
#[allow(clippy::type_complexity)] // FIXME
pub fn init<C: clap::Subcommand, P>() -> anyhow::Result<( pub fn init<C: clap::Subcommand, P>() -> anyhow::Result<(
Args<C>, Args<C>,
KeyMap, KeyMap,

View File

@ -12,10 +12,7 @@ pub enum BranchStrategy {
impl BranchStrategy { impl BranchStrategy {
pub fn will_continue(&self) -> bool { pub fn will_continue(&self) -> bool {
match self { matches!(self, Self::Continue | Self::SkipInclusion)
Self::Continue | Self::SkipInclusion => true,
_ => false,
}
} }
} }
@ -69,23 +66,20 @@ impl<'c, S: Ord> Bnb<'c, S> {
/// Attempt to backtrack to the previously selected node's omission branch, return false /// Attempt to backtrack to the previously selected node's omission branch, return false
/// otherwise (no more solutions). /// otherwise (no more solutions).
pub fn backtrack(&mut self) -> bool { pub fn backtrack(&mut self) -> bool {
(0..self.pool_pos) (0..self.pool_pos).rev().any(|pos| {
.rev() let (index, candidate) = self.pool[pos];
.find(|&pos| {
let (index, candidate) = self.pool[pos];
if self.selection.is_selected(index) { if self.selection.is_selected(index) {
// deselect last `pos`, so next round will check omission branch // deselect last `pos`, so next round will check omission branch
self.pool_pos = pos; self.pool_pos = pos;
self.selection.deselect(index); self.selection.deselect(index);
return true; true
} else { } else {
self.rem_abs += candidate.value; self.rem_abs += candidate.value;
self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate); self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate);
return false; false
} }
}) })
.is_some()
} }
/// Continue down this branch, skip inclusion branch if specified. /// Continue down this branch, skip inclusion branch if specified.
@ -106,7 +100,7 @@ impl<'c, S: Ord> Bnb<'c, S> {
self.best_score = score; self.best_score = score;
return true; return true;
} }
return false; false
} }
} }
@ -277,7 +271,7 @@ where
} }
// check out inclusion branch first // check out inclusion branch first
return (BranchStrategy::Continue, None); (BranchStrategy::Continue, None)
}; };
// determine sum of absolute and effective values for current selection // determine sum of absolute and effective values for current selection

View File

@ -341,7 +341,7 @@ impl<'a> CoinSelector<'a> {
}) })
})?; })?;
(selected - target_value) as u64 selected - target_value
}; };
let fee_without_drain = fee_without_drain.max(self.opts.min_absolute_fee); let fee_without_drain = fee_without_drain.max(self.opts.min_absolute_fee);
@ -427,17 +427,16 @@ pub struct SelectionError {
impl core::fmt::Display for SelectionError { impl core::fmt::Display for SelectionError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self { let SelectionError {
SelectionError { selected,
selected, missing,
missing, constraint,
constraint, } = self;
} => write!( write!(
f, f,
"insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}", "insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}",
selected, missing, constraint selected, missing, constraint
), )
}
} }
} }

View File

@ -1,5 +1,6 @@
#![allow(unused)] #![allow(unused)]
#![allow(missing_docs)] #![allow(missing_docs)]
#![allow(clippy::all)] // FIXME
//! A spending plan or *plan* for short is a representation of a particular spending path on a //! A spending plan or *plan* for short is a representation of a particular spending path on a
//! descriptor. This allows us to analayze a choice of spending path without producing any //! descriptor. This allows us to analayze a choice of spending path without producing any
//! signatures or other witness data for it. //! signatures or other witness data for it.