Fix cargo clippy warnings

Disabled warnings for nursery/tmp_plan as it's going to be replaced
anyways
This commit is contained in:
Daniela Brozzoni 2023-03-02 19:08:33 +01:00
parent 1805bd35c0
commit c61b3604e1
No known key found for this signature in database
GPG Key ID: 7DE4F1FDCED0AB87
26 changed files with 139 additions and 153 deletions

View File

@ -356,26 +356,26 @@ pub(crate) trait DescriptorMeta {
fn is_witness(&self) -> bool;
fn is_taproot(&self) -> bool;
fn get_extended_keys(&self) -> Vec<DescriptorXKey<ExtendedPubKey>>;
fn derive_from_hd_keypaths<'s>(
fn derive_from_hd_keypaths(
&self,
hd_keypaths: &HdKeyPaths,
secp: &'s SecpCtx,
secp: &SecpCtx,
) -> Option<DerivedDescriptor>;
fn derive_from_tap_key_origins<'s>(
fn derive_from_tap_key_origins(
&self,
tap_key_origins: &TapKeyOrigins,
secp: &'s SecpCtx,
secp: &SecpCtx,
) -> Option<DerivedDescriptor>;
fn derive_from_psbt_key_origins<'s>(
fn derive_from_psbt_key_origins(
&self,
key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>,
secp: &'s SecpCtx,
secp: &SecpCtx,
) -> Option<DerivedDescriptor>;
fn derive_from_psbt_input<'s>(
fn derive_from_psbt_input(
&self,
psbt_input: &psbt::Input,
utxo: Option<TxOut>,
secp: &'s SecpCtx,
secp: &SecpCtx,
) -> Option<DerivedDescriptor>;
}
@ -410,10 +410,10 @@ impl DescriptorMeta for ExtendedDescriptor {
answer
}
fn derive_from_psbt_key_origins<'s>(
fn derive_from_psbt_key_origins(
&self,
key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>,
secp: &'s SecpCtx,
secp: &SecpCtx,
) -> Option<DerivedDescriptor> {
// Ensure that deriving `xpub` with `path` yields `expected`
let verify_key = |xpub: &DescriptorXKey<ExtendedPubKey>,
@ -497,10 +497,10 @@ impl DescriptorMeta for ExtendedDescriptor {
path_found.map(|path| self.at_derivation_index(path))
}
fn derive_from_hd_keypaths<'s>(
fn derive_from_hd_keypaths(
&self,
hd_keypaths: &HdKeyPaths,
secp: &'s SecpCtx,
secp: &SecpCtx,
) -> Option<DerivedDescriptor> {
// "Convert" an hd_keypaths map to the format required by `derive_from_psbt_key_origins`
let key_origins = hd_keypaths
@ -515,10 +515,10 @@ impl DescriptorMeta for ExtendedDescriptor {
self.derive_from_psbt_key_origins(key_origins, secp)
}
fn derive_from_tap_key_origins<'s>(
fn derive_from_tap_key_origins(
&self,
tap_key_origins: &TapKeyOrigins,
secp: &'s SecpCtx,
secp: &SecpCtx,
) -> Option<DerivedDescriptor> {
// "Convert" a tap_key_origins map to the format required by `derive_from_psbt_key_origins`
let key_origins = tap_key_origins
@ -528,11 +528,11 @@ impl DescriptorMeta for ExtendedDescriptor {
self.derive_from_psbt_key_origins(key_origins, secp)
}
fn derive_from_psbt_input<'s>(
fn derive_from_psbt_input(
&self,
psbt_input: &psbt::Input,
utxo: Option<TxOut>,
secp: &'s SecpCtx,
secp: &SecpCtx,
) -> Option<DerivedDescriptor> {
if let Some(derived) = self.derive_from_hd_keypaths(&psbt_input.bip32_derivation, secp) {
return Some(derived);

View File

@ -723,7 +723,7 @@ mod test {
fn get_test_utxos() -> Vec<WeightedUtxo> {
vec![
utxo(100_000, 0, ConfirmationTime::Unconfirmed),
utxo(FEE_AMOUNT as u64 - 40, 1, ConfirmationTime::Unconfirmed),
utxo(FEE_AMOUNT - 40, 1, ConfirmationTime::Unconfirmed),
utxo(200_000, 2, ConfirmationTime::Unconfirmed),
]
}

View File

@ -311,13 +311,12 @@ impl<D> Wallet<D> {
.last()
.unwrap(),
};
let info = AddressInfo {
AddressInfo {
index,
address: Address::from_script(&spk, self.network)
.expect("descriptor must have address form"),
keychain,
};
info
}
}
/// Return whether or not a `script` is part of this wallet (either internal or external)
@ -342,7 +341,7 @@ impl<D> Wallet<D> {
.map(|(&(keychain, derivation_index), utxo)| LocalUtxo {
outpoint: utxo.outpoint,
txout: utxo.txout,
keychain: keychain.clone(),
keychain,
is_spent: false,
derivation_index,
confirmation_time: utxo.chain_position,
@ -1288,8 +1287,7 @@ impl<D> Wallet<D> {
// - If that also fails, it will try it on the internal descriptor, if present
let desc = psbt
.get_utxo_for(n)
.map(|txout| self.get_descriptor_for_txout(&txout))
.flatten()
.and_then(|txout| self.get_descriptor_for_txout(&txout))
.or_else(|| {
self.keychain_tracker
.txout_index

View File

@ -1544,7 +1544,7 @@ fn test_bump_fee_add_input() {
}],
};
wallet
.insert_tx(init_tx.clone(), wallet.transactions().last().unwrap().0)
.insert_tx(init_tx, wallet.transactions().last().unwrap().0)
.unwrap();
let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap();

View File

@ -264,7 +264,7 @@ where
}
/// Calculates the difference between self and `update` in the form of a [`ChangeSet`].
pub fn determine_changeset<'a, T2>(
pub fn determine_changeset<T2>(
&self,
update: &ChainGraph<P, T2>,
) -> Result<ChangeSet<P, T>, UpdateError<P>>
@ -366,7 +366,7 @@ where
// evicted, return error
return Err(UnresolvableConflict {
already_confirmed_tx: (conflicting_pos.clone(), conflicting_txid),
update_tx: (update_pos.clone(), update_txid),
update_tx: (update_pos, update_txid),
});
}
TxHeight::Unconfirmed => {
@ -516,7 +516,7 @@ impl<P: core::fmt::Debug> core::fmt::Display for NewError<P> {
f,
"missing full transactions for {}",
missing
.into_iter()
.iter()
.map(|txid| txid.to_string())
.collect::<Vec<_>>()
.join(", ")

View File

@ -8,10 +8,10 @@ use bitcoin::{
use crate::BlockId;
pub const RAW_TX_1: &'static str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100";
pub const RAW_TX_2: &'static str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100";
pub const RAW_TX_3: &'static str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100";
pub const RAW_TX_4: &'static str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100";
pub const RAW_TX_1: &str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100";
pub const RAW_TX_2: &str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100";
pub const RAW_TX_3: &str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100";
pub const RAW_TX_4: &str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100";
pub fn tx_from_hex(s: &str) -> Transaction {
let raw = Vec::from_hex(s).expect("data must be in hex");

View File

@ -145,12 +145,12 @@ where
/// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]).
pub fn graph(&self) -> &TxGraph<T> {
&self.chain_graph().graph()
self.chain_graph().graph()
}
/// Returns a reference to the internal [`SparseChain`] (which is part of the [`ChainGraph`]).
pub fn chain(&self) -> &SparseChain<P> {
&self.chain_graph().chain()
self.chain_graph().chain()
}
/// Determines the changes as result of inserting `block_id` (a height and block hash) into the

View File

@ -10,7 +10,7 @@ use core::{fmt::Debug, ops::Deref};
use super::DerivationAdditions;
/// Maximum [BIP32](https://bips.xyz/32) derivation index.
pub const BIP32_MAX_INDEX: u32 = 1 << 31 - 1;
pub const BIP32_MAX_INDEX: u32 = (1 << 31) - 1;
/// A convenient wrapper around [`SpkTxOutIndex`] that relates script pubkeys to miniscript public
/// [`Descriptor`]s.
@ -162,7 +162,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
pub fn set_lookahead_for_all(&mut self, lookahead: u32) {
for keychain in &self.keychains.keys().cloned().collect::<Vec<_>>() {
self.lookahead.insert(keychain.clone(), lookahead);
self.replenish_lookahead(&keychain);
self.replenish_lookahead(keychain);
}
}
@ -348,7 +348,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
let mut spks = BTreeMap::new();
for (keychain, &index) in keychains {
let (new_spks, new_additions) = self.reveal_to_target(&keychain, index);
let (new_spks, new_additions) = self.reveal_to_target(keychain, index);
if !new_additions.is_empty() {
spks.insert(keychain.clone(), new_spks);
additions.append(new_additions);

View File

@ -463,12 +463,13 @@ impl<P: ChainPosition> SparseChain<P> {
where
C: IntoIterator<Item = BlockId>,
{
let mut chain = Self::default();
chain.checkpoints = checkpoints
.into_iter()
.map(|block_id| block_id.into())
.collect();
chain
Self {
checkpoints: checkpoints
.into_iter()
.map(|block_id| block_id.into())
.collect(),
..Default::default()
}
}
/// Get the checkpoint for the last known tip.

View File

@ -248,7 +248,7 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
if self.outputs_in_range(index..=index).next().is_some() {
return false;
}
return self.unused.insert(index.clone());
self.unused.insert(index.clone())
}
/// Returns the index associated with the script pubkey.
@ -300,13 +300,11 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
let input_matches = tx
.input
.iter()
.find(|input| self.txouts.contains_key(&input.previous_output))
.is_some();
.any(|input| self.txouts.contains_key(&input.previous_output));
let output_matches = tx
.output
.iter()
.find(|output| self.spk_indices.contains_key(&output.script_pubkey))
.is_some();
.any(|output| self.spk_indices.contains_key(&output.script_pubkey));
input_matches || output_matches
}
}

View File

@ -276,7 +276,7 @@ impl<T: AsTransaction + Ord + Clone> TxGraph<T> {
///
/// The [`Additions`] would be the set difference of `update` and `self` (transactions that
/// exist in `update` but not in `self`).
pub fn determine_additions<'a, T2>(&self, update: &'a TxGraph<T2>) -> Additions<T>
pub fn determine_additions<T2>(&self, update: &TxGraph<T2>) -> Additions<T>
where
T2: IntoOwned<T> + Clone,
{
@ -299,7 +299,7 @@ impl<T: AsTransaction + Ord + Clone> TxGraph<T> {
for (&vout, update_txout) in partial {
let outpoint = OutPoint::new(txid, vout);
if self.get_txout(outpoint) != Some(&update_txout) {
if self.get_txout(outpoint) != Some(update_txout) {
additions.txout.insert(outpoint, update_txout.clone());
}
}
@ -627,7 +627,7 @@ where
};
self.populate_stack(op_spends + 1, txid);
return Some(item);
Some(item)
}
}

View File

@ -186,7 +186,7 @@ fn update_evicts_conflicting_tx() {
let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
let _ = cg.insert_checkpoint(cp_b).expect("should insert cp");
let _ = cg
.insert_tx(tx_a.clone(), TxHeight::Confirmed(0))
.insert_tx(tx_a, TxHeight::Confirmed(0))
.expect("should insert tx");
let _ = cg
.insert_tx(tx_b.clone(), TxHeight::Confirmed(1))
@ -213,7 +213,7 @@ fn update_evicts_conflicting_tx() {
.into(),
},
graph: tx_graph::Additions {
tx: [tx_b2.clone()].into(),
tx: [tx_b2].into(),
txout: [].into(),
},
};
@ -460,7 +460,7 @@ fn test_apply_changes_reintroduce_tx() {
let mut cg = ChainGraph::default();
let _ = cg.insert_checkpoint(block1).unwrap();
let _ = cg.insert_checkpoint(block2a).unwrap();
let _ = cg.insert_tx(tx1.clone(), TxHeight::Confirmed(1)).unwrap();
let _ = cg.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
let _ = cg.insert_tx(tx2a.clone(), TxHeight::Confirmed(2)).unwrap();
cg
};
@ -647,7 +647,7 @@ fn test_evict_descendants() {
);
let err = cg
.insert_tx_preview(tx_conflict.clone(), TxHeight::Unconfirmed)
.insert_tx_preview(tx_conflict, TxHeight::Unconfirmed)
.expect_err("must fail due to conflicts");
assert!(matches!(err, InsertTxError::UnresolvableConflict(_)));
}

View File

@ -69,8 +69,8 @@ fn test_balance() {
let mut tracker = KeychainTracker::<Keychain, TxHeight>::default();
let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap();
let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap();
tracker.add_keychain(Keychain::One, one.clone());
tracker.add_keychain(Keychain::Two, two.clone());
tracker.add_keychain(Keychain::One, one);
tracker.add_keychain(Keychain::Two, two);
let tx1 = Transaction {
version: 0x01,
@ -126,9 +126,9 @@ fn test_balance() {
})
.unwrap();
let should_trust = |keychain: &Keychain| match keychain {
&Keychain::One => false,
&Keychain::Two => true,
let should_trust = |keychain: &Keychain| match *keychain {
Keychain::One => false,
Keychain::Two => true,
};
assert_eq!(tracker.balance(should_trust), Balance::default());
@ -172,9 +172,7 @@ fn test_balance() {
}
);
let _ = tracker
.insert_tx(tx1.clone(), TxHeight::Confirmed(1))
.unwrap();
let _ = tracker.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
assert_eq!(
tracker.balance(should_trust),
@ -186,9 +184,7 @@ fn test_balance() {
}
);
let _ = tracker
.insert_tx(tx2.clone(), TxHeight::Confirmed(2))
.unwrap();
let _ = tracker.insert_tx(tx2, TxHeight::Confirmed(2)).unwrap();
assert_eq!(
tracker.balance(should_trust),

View File

@ -147,8 +147,8 @@ fn invalidate_a_checkpoint_and_try_and_move_tx_when_it_wasnt_within_invalidation
chain1.determine_changeset(&chain2),
Err(UpdateError::TxInconsistent {
txid: h!("tx0"),
original_pos: TxHeight::Confirmed(0).into(),
update_pos: TxHeight::Confirmed(1).into(),
original_pos: TxHeight::Confirmed(0),
update_pos: TxHeight::Confirmed(1),
})
);
}

View File

@ -42,10 +42,10 @@ fn spk_txout_sent_and_received() {
output: vec![
TxOut {
value: 20_000,
script_pubkey: spk2.clone(),
script_pubkey: spk2,
},
TxOut {
script_pubkey: spk1.clone(),
script_pubkey: spk1,
value: 30_000,
},
],
@ -62,15 +62,15 @@ fn mark_used() {
let mut spk_index = SpkTxOutIndex::default();
spk_index.insert_spk(1, spk1.clone());
spk_index.insert_spk(2, spk2.clone());
spk_index.insert_spk(2, spk2);
assert_eq!(spk_index.is_used(&1), false);
assert!(!spk_index.is_used(&1));
spk_index.mark_used(&1);
assert_eq!(spk_index.is_used(&1), true);
assert!(spk_index.is_used(&1));
spk_index.unmark_used(&1);
assert_eq!(spk_index.is_used(&1), false);
assert!(!spk_index.is_used(&1));
spk_index.mark_used(&1);
assert_eq!(spk_index.is_used(&1), true);
assert!(spk_index.is_used(&1));
let tx1 = Transaction {
version: 0x02,
@ -78,15 +78,14 @@ fn mark_used() {
input: vec![],
output: vec![TxOut {
value: 42_000,
script_pubkey: spk1.clone(),
script_pubkey: spk1,
}],
};
spk_index.scan(&tx1);
spk_index.unmark_used(&1);
assert_eq!(
assert!(
spk_index.is_used(&1),
true,
"even though we unmark_used it doesn't matter because there was a tx scanned that used it"
);
}
@ -94,8 +93,8 @@ fn mark_used() {
#[test]
fn unmark_used_does_not_result_in_invalid_representation() {
let mut spk_index = SpkTxOutIndex::default();
assert_eq!(spk_index.unmark_used(&0), false);
assert_eq!(spk_index.unmark_used(&1), false);
assert_eq!(spk_index.unmark_used(&2), false);
assert!(!spk_index.unmark_used(&0));
assert!(!spk_index.unmark_used(&1));
assert!(!spk_index.unmark_used(&2));
assert!(spk_index.unused_spks(..).collect::<Vec<_>>().is_empty());
}

View File

@ -128,10 +128,10 @@ fn insert_tx_graph_keeps_track_of_spend() {
let _ = graph1.insert_tx(tx2.clone());
let _ = graph2.insert_tx(tx2.clone());
let _ = graph2.insert_tx(tx1.clone());
let _ = graph2.insert_tx(tx1);
assert_eq!(
&*graph1.outspends(op),
graph1.outspends(op),
&iter::once(tx2.txid()).collect::<HashSet<_>>()
);
assert_eq!(graph2.outspends(op), graph1.outspends(op));
@ -173,7 +173,7 @@ fn insert_tx_displaces_txouts() {
vout: 0,
},
TxOut {
value: 1337_000,
value: 1_337_000,
script_pubkey: Script::default(),
},
);
@ -231,7 +231,7 @@ fn insert_txout_does_not_displace_tx() {
vout: 0,
},
TxOut {
value: 1337_000,
value: 1_337_000,
script_pubkey: Script::default(),
},
);

View File

@ -97,9 +97,8 @@ pub trait ElectrumExt {
impl ElectrumExt for Client {
fn get_tip(&self) -> Result<(u32, BlockHash), Error> {
// TODO: unsubscribe when added to the client, or is there a better call to use here?
Ok(self
.block_headers_subscribe()
.map(|data| (data.height as u32, data.header.block_hash()))?)
self.block_headers_subscribe()
.map(|data| (data.height as u32, data.header.block_hash()))
}
fn scan<K: Ord + Clone>(
@ -139,7 +138,7 @@ impl ElectrumExt for Client {
batch_size,
) {
Err(InternalError::Reorg) => continue,
Err(InternalError::ElectrumError(e)) => return Err(e.into()),
Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(mut spks) => scanned_spks.append(&mut spks),
};
}
@ -152,7 +151,7 @@ impl ElectrumExt for Client {
batch_size,
) {
Err(InternalError::Reorg) => continue,
Err(InternalError::ElectrumError(e)) => return Err(e.into()),
Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(spks) => scanned_spks.extend(
spks.into_iter()
.map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)),
@ -163,13 +162,13 @@ impl ElectrumExt for Client {
match populate_with_txids(self, &mut update, &mut txids.iter().cloned()) {
Err(InternalError::Reorg) => continue,
Err(InternalError::ElectrumError(e)) => return Err(e.into()),
Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(_) => {}
}
match populate_with_outpoints(self, &mut update, &mut outpoints.iter().cloned()) {
Err(InternalError::Reorg) => continue,
Err(InternalError::ElectrumError(e)) => return Err(e.into()),
Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ }
}
@ -284,7 +283,7 @@ impl<K: Ord + Clone + Debug> ElectrumUpdate<K, TxHeight> {
.into_iter()
.zip(
client
.batch_block_header(heights.clone())?
.batch_block_header(heights)?
.into_iter()
.map(|bh| bh.time as u64),
)
@ -307,7 +306,7 @@ impl<K: Ord + Clone + Debug> ElectrumUpdate<K, TxHeight> {
Ok(ElectrumUpdate {
chain_update: new_update,
last_active_indices: self.last_active_indices.clone(),
last_active_indices: self.last_active_indices,
})
}
}
@ -320,15 +319,15 @@ enum InternalError {
impl From<electrum_client::Error> for InternalError {
fn from(value: electrum_client::Error) -> Self {
Self::ElectrumError(value.into())
Self::ElectrumError(value)
}
}
fn get_tip(client: &Client) -> Result<(u32, BlockHash), Error> {
// TODO: unsubscribe when added to the client, or is there a better call to use here?
Ok(client
client
.block_headers_subscribe()
.map(|data| (data.height as u32, data.header.block_hash()))?)
.map(|data| (data.height as u32, data.header.block_hash()))
}
/// Prepare an update sparsechain "template" based on the checkpoints of the `local_chain`.

View File

@ -34,6 +34,7 @@ pub trait EsploraExt {
/// parallel.
///
/// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition
#[allow(clippy::result_large_err)] // FIXME
fn scan<K: Ord + Clone>(
&self,
local_chain: &BTreeMap<u32, BlockHash>,
@ -47,6 +48,7 @@ pub trait EsploraExt {
/// Convenience method to call [`scan`] without requiring a keychain.
///
/// [`scan`]: EsploraExt::scan
#[allow(clippy::result_large_err)] // FIXME
fn scan_without_keychain(
&self,
local_chain: &BTreeMap<u32, BlockHash>,
@ -127,13 +129,12 @@ impl EsploraExt for esplora_client::BlockingClient {
let mut spks = spks.into_iter();
let mut last_active_index = None;
let mut empty_scripts = 0;
type IndexWithTxs = (u32, Vec<esplora_client::Tx>);
loop {
let handles = (0..parallel_requests)
.filter_map(
|_| -> Option<
std::thread::JoinHandle<Result<(u32, Vec<esplora_client::Tx>), _>>,
> {
|_| -> Option<std::thread::JoinHandle<Result<IndexWithTxs, _>>> {
let (index, script) = spks.next()?;
let client = self.clone();
Some(std::thread::spawn(move || {

View File

@ -6,7 +6,7 @@ use bdk_chain::{
};
pub use file_store::*;
impl<'de, K, P> PersistBackend<K, P> for KeychainStore<K, P>
impl<K, P> PersistBackend<K, P> for KeychainStore<K, P>
where
K: Ord + Clone + core::fmt::Debug,
P: ChainPosition,

View File

@ -4,7 +4,6 @@ use bdk_chain::{
TxHeight,
};
use bdk_file_store::{FileError, IterError, KeychainStore, MAGIC_BYTES, MAGIC_BYTES_LEN};
use serde;
use std::{
io::{Read, Write},
vec::Vec,

View File

@ -48,7 +48,7 @@ pub struct ScanOptions {
}
fn main() -> anyhow::Result<()> {
let (args, keymap, mut tracker, mut db) = cli::init::<ElectrumCommands, _>()?;
let (args, keymap, tracker, db) = cli::init::<ElectrumCommands, _>()?;
let electrum_url = match args.network {
Network::Bitcoin => "ssl://electrum.blockstream.info:50002",
@ -57,10 +57,7 @@ fn main() -> anyhow::Result<()> {
Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001",
};
let config = electrum_client::Config::builder()
.validate_domain(match args.network {
Network::Bitcoin => true,
_ => false,
})
.validate_domain(matches!(args.network, Network::Bitcoin))
.build();
let client = electrum_client::Client::from_config(electrum_url, config)?;
@ -74,8 +71,8 @@ fn main() -> anyhow::Result<()> {
let _txid = client.transaction_broadcast(transaction)?;
Ok(())
},
&mut tracker,
&mut db,
&tracker,
&db,
args.network,
&keymap,
)
@ -148,7 +145,7 @@ fn main() -> anyhow::Result<()> {
.txout_index
.all_spks()
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>();
spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
eprintln!("scanning {:?}", index);
@ -159,7 +156,7 @@ fn main() -> anyhow::Result<()> {
let unused_spks = tracker
.txout_index
.unused_spks(..)
.map(|(k, v)| (k.clone(), v.clone()))
.map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>();
spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
eprintln!(

View File

@ -153,7 +153,7 @@ fn main() -> anyhow::Result<()> {
.txout_index
.all_spks()
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>();
spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
eprintln!("scanning {:?}", index);
@ -164,7 +164,7 @@ fn main() -> anyhow::Result<()> {
let unused_spks = tracker
.txout_index
.unused_spks(..)
.map(|(k, v)| (k.clone(), v.clone()))
.map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>();
spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
eprintln!(

View File

@ -227,7 +227,7 @@ where
false => Keychain::External,
};
for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) {
let address = Address::from_script(&spk, network)
let address = Address::from_script(spk, network)
.expect("should always be able to derive address");
println!(
"{:?} {} used:{}",
@ -271,6 +271,7 @@ pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
unconfirmed,
} => {
let tracker = tracker.lock().unwrap();
#[allow(clippy::type_complexity)] // FIXME
let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> = match (unspent, spent)
{
(true, false) => Box::new(tracker.full_utxos()),
@ -282,6 +283,7 @@ pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
_ => Box::new(tracker.full_txouts()),
};
#[allow(clippy::type_complexity)] // FIXME
let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> =
match (confirmed, unconfirmed) {
(true, false) => Box::new(
@ -310,6 +312,7 @@ pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
}
}
#[allow(clippy::type_complexity)] // FIXME
pub fn create_tx<P: ChainPosition>(
value: u64,
address: Address,
@ -482,7 +485,7 @@ pub fn create_tx<P: ChainPosition>(
assert!(
requirements.signatures.sign_with_keymap(
i,
&keymap,
keymap,
&sighash_prevouts,
None,
None,
@ -539,13 +542,13 @@ where
{
match command {
// TODO: Make these functions return stuffs
Commands::Address { addr_cmd } => run_address_cmd(&tracker, &store, addr_cmd, network),
Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network),
Commands::Balance => {
run_balance_cmd(&tracker);
run_balance_cmd(tracker);
Ok(())
}
Commands::TxOut { txout_cmd } => {
run_txo_cmd(txout_cmd, &tracker, network);
run_txo_cmd(txout_cmd, tracker, network);
Ok(())
}
Commands::Send {
@ -557,7 +560,7 @@ where
// take mutable ref to construct tx -- it is only open for a short time while building it.
let tracker = &mut *tracker.lock().unwrap();
let (transaction, change_info) =
create_tx(value, address, coin_select, tracker, &keymap)?;
create_tx(value, address, coin_select, tracker, keymap)?;
if let Some((change_derivation_changes, (change_keychain, index))) = change_info {
// We must first persist to disk the fact that we've got a new address from the
@ -605,7 +608,7 @@ where
// We failed to broadcast so allow our change address to be used in the future
tracker.txout_index.unmark_used(&keychain, index);
}
Err(e.into())
Err(e)
}
}
}
@ -615,6 +618,7 @@ where
}
}
#[allow(clippy::type_complexity)] // FIXME
pub fn init<C: clap::Subcommand, P>() -> anyhow::Result<(
Args<C>,
KeyMap,

View File

@ -12,10 +12,7 @@ pub enum BranchStrategy {
impl BranchStrategy {
pub fn will_continue(&self) -> bool {
match self {
Self::Continue | Self::SkipInclusion => true,
_ => false,
}
matches!(self, Self::Continue | Self::SkipInclusion)
}
}
@ -69,23 +66,20 @@ impl<'c, S: Ord> Bnb<'c, S> {
/// Attempt to backtrack to the previously selected node's omission branch, return false
/// otherwise (no more solutions).
pub fn backtrack(&mut self) -> bool {
(0..self.pool_pos)
.rev()
.find(|&pos| {
let (index, candidate) = self.pool[pos];
(0..self.pool_pos).rev().any(|pos| {
let (index, candidate) = self.pool[pos];
if self.selection.is_selected(index) {
// deselect last `pos`, so next round will check omission branch
self.pool_pos = pos;
self.selection.deselect(index);
return true;
} else {
self.rem_abs += candidate.value;
self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate);
return false;
}
})
.is_some()
if self.selection.is_selected(index) {
// deselect last `pos`, so next round will check omission branch
self.pool_pos = pos;
self.selection.deselect(index);
true
} else {
self.rem_abs += candidate.value;
self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate);
false
}
})
}
/// Continue down this branch, skip inclusion branch if specified.
@ -106,7 +100,7 @@ impl<'c, S: Ord> Bnb<'c, S> {
self.best_score = score;
return true;
}
return false;
false
}
}
@ -277,7 +271,7 @@ where
}
// check out inclusion branch first
return (BranchStrategy::Continue, None);
(BranchStrategy::Continue, None)
};
// determine sum of absolute and effective values for current selection

View File

@ -341,7 +341,7 @@ impl<'a> CoinSelector<'a> {
})
})?;
(selected - target_value) as u64
selected - target_value
};
let fee_without_drain = fee_without_drain.max(self.opts.min_absolute_fee);
@ -427,17 +427,16 @@ pub struct SelectionError {
impl core::fmt::Display for SelectionError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
SelectionError {
selected,
missing,
constraint,
} => write!(
f,
"insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}",
selected, missing, constraint
),
}
let SelectionError {
selected,
missing,
constraint,
} = self;
write!(
f,
"insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}",
selected, missing, constraint
)
}
}

View File

@ -1,5 +1,6 @@
#![allow(unused)]
#![allow(missing_docs)]
#![allow(clippy::all)] // FIXME
//! A spending plan or *plan* for short is a representation of a particular spending path on a
//! descriptor. This allows us to analayze a choice of spending path without producing any
//! signatures or other witness data for it.