Make bdk and bdk_chain work under 1.57.0

- rewrite some parts of the code to deal with older borrow checker
- downgraded hashbrown
This commit is contained in:
Steve Myers 2023-03-02 22:05:11 -06:00 committed by Daniela Brozzoni
parent 3a5d727899
commit 38ef170ed1
No known key found for this signature in database
GPG Key ID: 7DE4F1FDCED0AB87
16 changed files with 85 additions and 195 deletions

View File

@ -9,8 +9,8 @@ keywords = ["bitcoin", "wallet", "descriptor", "psbt"]
readme = "README.md" readme = "README.md"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["Bitcoin Dev Kit Developers"] authors = ["Bitcoin Dev Kit Developers"]
edition = "2018" edition = "2021"
rust-version = "1.57"
[dependencies] [dependencies]
log = "^0.4" log = "^0.4"

View File

@ -22,7 +22,7 @@ use alloc::{
pub use bdk_chain::keychain::Balance; pub use bdk_chain::keychain::Balance;
use bdk_chain::{ use bdk_chain::{
chain_graph, chain_graph,
keychain::{KeychainChangeSet, KeychainScan, KeychainTracker}, keychain::{persist, KeychainChangeSet, KeychainScan, KeychainTracker},
sparse_chain, BlockId, ConfirmationTime, IntoOwned, sparse_chain, BlockId, ConfirmationTime, IntoOwned,
}; };
use bitcoin::consensus::encode::serialize; use bitcoin::consensus::encode::serialize;
@ -48,7 +48,6 @@ pub(crate) mod utils;
#[cfg(feature = "hardware-signer")] #[cfg(feature = "hardware-signer")]
#[cfg_attr(docsrs, doc(cfg(feature = "hardware-signer")))] #[cfg_attr(docsrs, doc(cfg(feature = "hardware-signer")))]
pub mod hardwaresigner; pub mod hardwaresigner;
pub mod persist;
pub use utils::IsDust; pub use utils::IsDust;
@ -85,7 +84,7 @@ pub struct Wallet<D = ()> {
signers: Arc<SignersContainer>, signers: Arc<SignersContainer>,
change_signers: Arc<SignersContainer>, change_signers: Arc<SignersContainer>,
keychain_tracker: KeychainTracker<KeychainKind, ConfirmationTime>, keychain_tracker: KeychainTracker<KeychainKind, ConfirmationTime>,
persist: persist::Persist<D>, persist: persist::Persist<KeychainKind, ConfirmationTime, D>,
network: Network, network: Network,
secp: SecpCtx, secp: SecpCtx,
} }
@ -196,7 +195,7 @@ impl<D> Wallet<D> {
network: Network, network: Network,
) -> Result<Self, NewError<D::LoadError>> ) -> Result<Self, NewError<D::LoadError>>
where where
D: persist::Backend, D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{ {
let secp = Secp256k1::new(); let secp = Secp256k1::new();
@ -258,7 +257,7 @@ impl<D> Wallet<D> {
/// (i.e. does not end with /*) then the same address will always be returned for any [`AddressIndex`]. /// (i.e. does not end with /*) then the same address will always be returned for any [`AddressIndex`].
pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo
where where
D: persist::Backend, D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{ {
self._get_address(address_index, KeychainKind::External) self._get_address(address_index, KeychainKind::External)
} }
@ -272,14 +271,14 @@ impl<D> Wallet<D> {
/// be returned for any [`AddressIndex`]. /// be returned for any [`AddressIndex`].
pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo
where where
D: persist::Backend, D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{ {
self._get_address(address_index, KeychainKind::Internal) self._get_address(address_index, KeychainKind::Internal)
} }
fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo
where where
D: persist::Backend, D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{ {
let keychain = self.map_keychain(keychain); let keychain = self.map_keychain(keychain);
let txout_index = &mut self.keychain_tracker.txout_index; let txout_index = &mut self.keychain_tracker.txout_index;
@ -614,7 +613,7 @@ impl<D> Wallet<D> {
params: TxParams, params: TxParams,
) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error> ) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error>
where where
D: persist::Backend, D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{ {
let external_descriptor = self let external_descriptor = self
.keychain_tracker .keychain_tracker
@ -1689,7 +1688,7 @@ impl<D> Wallet<D> {
/// [`commit`]: Self::commit /// [`commit`]: Self::commit
pub fn apply_update<Tx>(&mut self, update: Update<Tx>) -> Result<(), UpdateError> pub fn apply_update<Tx>(&mut self, update: Update<Tx>) -> Result<(), UpdateError>
where where
D: persist::Backend, D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
Tx: IntoOwned<Transaction> + Clone, Tx: IntoOwned<Transaction> + Clone,
{ {
let changeset = self.keychain_tracker.apply_update(update)?; let changeset = self.keychain_tracker.apply_update(update)?;
@ -1702,7 +1701,7 @@ impl<D> Wallet<D> {
/// [`staged`]: Self::staged /// [`staged`]: Self::staged
pub fn commit(&mut self) -> Result<(), D::WriteError> pub fn commit(&mut self) -> Result<(), D::WriteError>
where where
D: persist::Backend, D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{ {
self.persist.commit() self.persist.commit()
} }

View File

@ -1,124 +0,0 @@
//! Persistence for changes made to a [`Wallet`].
//!
//! BDK's [`Wallet`] needs somewhere to persist changes it makes during operation.
//! Operations like giving out a new address are crucial to persist so that next time the
//! application is loaded it can find transactions related to that address.
//!
//! Note that `Wallet` does not read this persisted data during operation since it always has a copy
//! in memory
//!
//! [`Wallet`]: crate::Wallet
use crate::KeychainKind;
use bdk_chain::{keychain::KeychainTracker, ConfirmationTime};
/// `Persist` wraps a [`Backend`] to create a convienient staging area for changes before they are
/// persisted. Not all changes made to the [`Wallet`] need to be written to disk right away so you
/// can use [`Persist::stage`] to *stage* it first and then [`Persist::commit`] to finally write it
/// to disk.
///
/// [`Wallet`]: crate::Wallet
#[derive(Debug)]
pub struct Persist<P> {
backend: P,
stage: ChangeSet,
}
impl<P> Persist<P> {
/// Create a new `Persist` from a [`Backend`]
pub fn new(backend: P) -> Self {
Self {
backend,
stage: Default::default(),
}
}
/// Stage a `changeset` to later persistence with [`commit`].
///
/// [`commit`]: Self::commit
pub fn stage(&mut self, changeset: ChangeSet) {
self.stage.append(changeset)
}
/// Get the changes that haven't been commited yet
pub fn staged(&self) -> &ChangeSet {
&self.stage
}
/// Commit the staged changes to the underlying persistence backend.
///
/// Retuns a backend defined error if this fails
pub fn commit(&mut self) -> Result<(), P::WriteError>
where
P: Backend,
{
self.backend.append_changeset(&self.stage)?;
self.stage = Default::default();
Ok(())
}
}
/// A persistence backend for [`Wallet`]
///
/// [`Wallet`]: crate::Wallet
pub trait Backend {
/// The error the backend returns when it fails to write
type WriteError: core::fmt::Debug;
/// The error the backend returns when it fails to load
type LoadError: core::fmt::Debug;
/// Appends a new changeset to the persistance backend.
///
/// It is up to the backend what it does with this. It could store every changeset in a list or
/// it insert the actual changes to a more structured database. All it needs to guarantee is
/// that [`load_into_keychain_tracker`] restores a keychain tracker to what it should be if all
/// changesets had been applied sequentially.
///
/// [`load_into_keychain_tracker`]: Self::load_into_keychain_tracker
fn append_changeset(&mut self, changeset: &ChangeSet) -> Result<(), Self::WriteError>;
/// Applies all the changesets the backend has received to `tracker`.
fn load_into_keychain_tracker(
&mut self,
tracker: &mut KeychainTracker<KeychainKind, ConfirmationTime>,
) -> Result<(), Self::LoadError>;
}
#[cfg(feature = "file-store")]
mod file_store {
use super::*;
use bdk_chain::file_store::{IterError, KeychainStore};
type FileStore = KeychainStore<KeychainKind, ConfirmationTime>;
impl Backend for FileStore {
type WriteError = std::io::Error;
type LoadError = IterError;
fn append_changeset(&mut self, changeset: &ChangeSet) -> Result<(), Self::WriteError> {
self.append_changeset(changeset)
}
fn load_into_keychain_tracker(
&mut self,
tracker: &mut KeychainTracker<KeychainKind, ConfirmationTime>,
) -> Result<(), Self::LoadError> {
self.load_into_keychain_tracker(tracker)
}
}
}
impl Backend for () {
type WriteError = ();
type LoadError = ();
fn append_changeset(&mut self, _changeset: &ChangeSet) -> Result<(), Self::WriteError> {
Ok(())
}
fn load_into_keychain_tracker(
&mut self,
_tracker: &mut KeychainTracker<KeychainKind, ConfirmationTime>,
) -> Result<(), Self::LoadError> {
Ok(())
}
}
#[cfg(feature = "file-store")]
pub use file_store::*;
use super::ChangeSet;

View File

@ -39,6 +39,7 @@
use crate::collections::BTreeMap; use crate::collections::BTreeMap;
use crate::collections::HashSet; use crate::collections::HashSet;
use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec}; use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec};
use bdk_chain::ConfirmationTime;
use core::cell::RefCell; use core::cell::RefCell;
use core::marker::PhantomData; use core::marker::PhantomData;
@ -525,7 +526,7 @@ impl<'a, D, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> TxBuilder<'a, D,
/// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki /// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki
pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error> pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error>
where where
D: persist::Backend, D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{ {
self.wallet self.wallet
.borrow_mut() .borrow_mut()

View File

@ -2,6 +2,7 @@
name = "bdk_chain" name = "bdk_chain"
version = "0.3.1" version = "0.3.1"
edition = "2021" edition = "2021"
rust-version = "1.57"
homepage = "https://bitcoindevkit.org" homepage = "https://bitcoindevkit.org"
repository = "https://github.com/bitcoindevkit/bdk" repository = "https://github.com/bitcoindevkit/bdk"
documentation = "https://docs.rs/bdk_chain" documentation = "https://docs.rs/bdk_chain"
@ -14,8 +15,10 @@ readme = "../README.md"
[dependencies] [dependencies]
bitcoin = { version = "0.29" } bitcoin = { version = "0.29" }
serde_crate = { package = "serde", version = "1", optional = true, features = ["derive"] } serde_crate = { package = "serde", version = "1", optional = true, features = ["derive"] }
# Use hashbrown as a feature flag to have HashSet and HashMap from it. # Use hashbrown as a feature flag to have HashSet and HashMap from it.
hashbrown = { version = "0.13.2", optional = true } # note version 0.13 breaks outs MSRV.
hashbrown = { version = "0.12", optional = true, features = ["serde"] }
miniscript = { version = "9.0.0", optional = true } miniscript = { version = "9.0.0", optional = true }
[dev-dependencies] [dev-dependencies]

View File

@ -294,7 +294,7 @@ where
&'a self, &'a self,
tx: &'a Transaction, tx: &'a Transaction,
) -> impl Iterator<Item = (&'a P, Txid)> + 'a { ) -> impl Iterator<Item = (&'a P, Txid)> + 'a {
self.graph.walk_conflicts(tx, |_, conflict_txid| { self.graph.walk_conflicts(tx, move |_, conflict_txid| {
self.chain self.chain
.tx_position(conflict_txid) .tx_position(conflict_txid)
.map(|conflict_pos| (conflict_pos, conflict_txid)) .map(|conflict_pos| (conflict_pos, conflict_txid))
@ -309,39 +309,42 @@ where
&self, &self,
changeset: &mut ChangeSet<P, T>, changeset: &mut ChangeSet<P, T>,
) -> Result<(), UnresolvableConflict<P>> { ) -> Result<(), UnresolvableConflict<P>> {
let chain_conflicts = changeset let mut chain_conflicts = vec![];
.chain
.txids for (&txid, pos_change) in &changeset.chain.txids {
.iter() let pos = match pos_change {
// we want to find new txid additions by the changeset (all txid entries in the Some(pos) => {
// changeset with Some(position_change)) // Ignore txs that are still in the chain -- we only care about new ones
.filter_map(|(&txid, pos_change)| pos_change.as_ref().map(|pos| (txid, pos))) if self.chain.tx_position(txid).is_some() {
// we don't care about txids that move, only newly added txids continue;
.filter(|&(txid, _)| self.chain.tx_position(txid).is_none()) }
// full tx should exist (either in graph, or additions) pos
.filter_map(|(txid, pos)| { }
let full_tx = self // Ignore txids that are being delted by the change (they can't conflict)
.graph None => continue,
.get_tx(txid) };
.or_else(|| {
changeset let mut full_tx = self.graph.get_tx(txid);
if full_tx.is_none() {
full_tx = changeset
.graph .graph
.tx .tx
.iter() .iter()
.find(|tx| tx.as_tx().txid() == txid) .find(|tx| tx.as_tx().txid() == txid)
}) }
.map(|tx| (txid, tx, pos));
debug_assert!(full_tx.is_some(), "should have full tx at this point"); debug_assert!(full_tx.is_some(), "should have full tx at this point");
full_tx
}) let full_tx = match full_tx {
.flat_map(|(new_txid, new_tx, new_pos)| { Some(full_tx) => full_tx,
self.tx_conflicts_in_chain(new_tx.as_tx()).map( None => continue,
move |(conflict_pos, conflict_txid)| { };
(new_pos.clone(), new_txid, conflict_pos, conflict_txid)
}, for (conflict_pos, conflict_txid) in self.tx_conflicts_in_chain(full_tx.as_tx()) {
) chain_conflicts.push((pos.clone(), txid, conflict_pos, conflict_txid))
}) }
.collect::<Vec<_>>(); }
for (update_pos, update_txid, conflicting_pos, conflicting_txid) in chain_conflicts { for (update_pos, update_txid, conflicting_pos, conflicting_txid) in chain_conflicts {
// We have found a tx that conflicts with our update txid. Only allow this when the // We have found a tx that conflicts with our update txid. Only allow this when the
@ -411,7 +414,7 @@ where
pub fn transactions_in_chain(&self) -> impl DoubleEndedIterator<Item = (&P, &T)> { pub fn transactions_in_chain(&self) -> impl DoubleEndedIterator<Item = (&P, &T)> {
self.chain self.chain
.txids() .txids()
.map(|(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist"))) .map(move |(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist")))
} }
/// Finds the transaction in the chain that spends `outpoint` given the input/output /// Finds the transaction in the chain that spends `outpoint` given the input/output

View File

@ -23,7 +23,7 @@ pub struct Persist<K, P, B> {
stage: keychain::KeychainChangeSet<K, P>, stage: keychain::KeychainChangeSet<K, P>,
} }
impl<K, P, B: PersistBackend<K, P>> Persist<K, P, B> { impl<K, P, B> Persist<K, P, B> {
/// Create a new `Persist` from a [`PersistBackend`]. /// Create a new `Persist` from a [`PersistBackend`].
pub fn new(backend: B) -> Self { pub fn new(backend: B) -> Self {
Self { Self {
@ -51,7 +51,10 @@ impl<K, P, B: PersistBackend<K, P>> Persist<K, P, B> {
/// Commit the staged changes to the underlying persistence backend. /// Commit the staged changes to the underlying persistence backend.
/// ///
/// Retuns a backend defined error if this fails /// Retuns a backend defined error if this fails
pub fn commit(&mut self) -> Result<(), B::WriteError> { pub fn commit(&mut self) -> Result<(), B::WriteError>
where
B: PersistBackend<K, P>,
{
self.backend.append_changeset(&self.stage)?; self.backend.append_changeset(&self.stage)?;
self.stage = Default::default(); self.stage = Default::default();
Ok(()) Ok(())

View File

@ -125,7 +125,7 @@ where
pub fn full_txouts(&self) -> impl Iterator<Item = (&(K, u32), FullTxOut<P>)> + '_ { pub fn full_txouts(&self) -> impl Iterator<Item = (&(K, u32), FullTxOut<P>)> + '_ {
self.txout_index self.txout_index
.txouts() .txouts()
.filter_map(|(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?))) .filter_map(move |(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?)))
} }
/// Iterates through [`FullTxOut`]s that are unspent outputs. /// Iterates through [`FullTxOut`]s that are unspent outputs.

View File

@ -423,7 +423,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
Cow::Owned(descriptor.clone()), Cow::Owned(descriptor.clone()),
next_reveal_index..index + 1, next_reveal_index..index + 1,
), ),
DerivationAdditions([(keychain.clone(), index)].into()), DerivationAdditions(core::iter::once((keychain.clone(), index)).collect()),
) )
} }
None => ( None => (
@ -575,11 +575,17 @@ where
.take_while(move |&index| has_wildcard || index == 0) .take_while(move |&index| has_wildcard || index == 0)
// we can only iterate over non-hardened indices // we can only iterate over non-hardened indices
.take_while(|&index| index <= BIP32_MAX_INDEX) .take_while(|&index| index <= BIP32_MAX_INDEX)
// take until failure .map(
.map_while(move |index| { move |index| -> Result<_, miniscript::descriptor::ConversionError> {
Ok((
index,
descriptor descriptor
.derived_descriptor(&secp, index) .at_derivation_index(index)
.map(|desc| (index, desc.script_pubkey())) .derived_descriptor(&secp)?
.ok() .script_pubkey(),
}) ))
},
)
.take_while(Result::is_ok)
.map(Result::unwrap)
} }

View File

@ -949,7 +949,7 @@ impl<P: ChainPosition> SparseChain<P> {
changeset changeset
.txids .txids
.iter() .iter()
.filter(|(&txid, pos)| { .filter(move |(&txid, pos)| {
pos.is_some() /*it was not a deletion*/ && pos.is_some() /*it was not a deletion*/ &&
self.tx_position(txid).is_none() /* we don't have the txid already */ self.tx_position(txid).is_none() /* we don't have the txid already */
}) })

View File

@ -79,13 +79,12 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
/// See [`ForEachTxout`] for the types that support this. /// See [`ForEachTxout`] for the types that support this.
/// ///
/// [`ForEachTxout`]: crate::ForEachTxOut /// [`ForEachTxout`]: crate::ForEachTxOut
pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet<&I> { pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet<I> {
// let scanner = &mut SpkTxOutScanner::new(self);
let mut scanned_indices = BTreeSet::new(); let mut scanned_indices = BTreeSet::new();
txouts.for_each_txout(|(op, txout)| { txouts.for_each_txout(|(op, txout)| {
if let Some(spk_i) = scan_txout!(self, op, txout) { if let Some(spk_i) = scan_txout!(self, op, txout) {
scanned_indices.insert(spk_i); scanned_indices.insert(spk_i.clone());
} }
}); });
@ -207,7 +206,7 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
{ {
self.unused self.unused
.range(range) .range(range)
.map(|index| (index, self.spk_at_index(index).expect("must exist"))) .map(move |index| (index, self.spk_at_index(index).expect("must exist")))
} }
/// Returns whether the script pubkey at `index` has been used or not. /// Returns whether the script pubkey at `index` has been used or not.

View File

@ -419,7 +419,7 @@ impl<T> TxGraph<T> {
tx.input tx.input
.iter() .iter()
.enumerate() .enumerate()
.filter_map(|(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin))) .filter_map(move |(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin)))
.flat_map(|(spends, vin)| core::iter::repeat(vin).zip(spends.iter().cloned())) .flat_map(|(spends, vin)| core::iter::repeat(vin).zip(spends.iter().cloned()))
.filter(move |(_, conflicting_txid)| *conflicting_txid != txid) .filter(move |(_, conflicting_txid)| *conflicting_txid != txid)
} }
@ -474,7 +474,7 @@ impl<T> Additions<T> {
.output .output
.iter() .iter()
.enumerate() .enumerate()
.map(|(vout, txout)| (OutPoint::new(tx.as_tx().txid(), vout as _), txout)) .map(move |(vout, txout)| (OutPoint::new(tx.as_tx().txid(), vout as _), txout))
}) })
.chain(self.txout.iter().map(|(op, txout)| (*op, txout))) .chain(self.txout.iter().map(|(op, txout)| (*op, txout)))
} }

View File

@ -253,7 +253,7 @@ fn test_wildcard_derivations() {
(0..=15) (0..=15)
.into_iter() .into_iter()
.chain([17, 20, 23].into_iter()) .chain(vec![17, 20, 23].into_iter())
.for_each(|index| assert!(txout_index.mark_used(&TestKeychain::External, index))); .for_each(|index| assert!(txout_index.mark_used(&TestKeychain::External, index)));
assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true)); assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true));

View File

@ -62,7 +62,7 @@ fn main() -> anyhow::Result<()> {
let client = electrum_client::Client::from_config(electrum_url, config)?; let client = electrum_client::Client::from_config(electrum_url, config)?;
let electrum_cmd = match args.command { let electrum_cmd = match args.command.clone() {
cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd, cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd,
general_command => { general_command => {
return cli::handle_commands( return cli::handle_commands(

View File

@ -10,7 +10,7 @@ bdk_file_store = { path = "../../crates/file_store" }
bdk_tmp_plan = { path = "../../nursery/tmp_plan" } bdk_tmp_plan = { path = "../../nursery/tmp_plan" }
bdk_coin_select = { path = "../../nursery/coin_select" } bdk_coin_select = { path = "../../nursery/coin_select" }
clap = { version = "4", features = ["derive", "env"] } clap = { version = "3.2.23", features = ["derive", "env"] }
anyhow = "1" anyhow = "1"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = { version = "^1.0" } serde_json = { version = "^1.0" }

View File

@ -675,7 +675,7 @@ pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, P: ChainPosition>(
) -> impl Iterator<Item = (bdk_tmp_plan::Plan<AK>, FullTxOut<P>)> + 'a { ) -> impl Iterator<Item = (bdk_tmp_plan::Plan<AK>, FullTxOut<P>)> + 'a {
tracker tracker
.full_utxos() .full_utxos()
.filter_map(|((keychain, derivation_index), full_txout)| { .filter_map(move |((keychain, derivation_index), full_txout)| {
Some(( Some((
bdk_tmp_plan::plan_satisfaction( bdk_tmp_plan::plan_satisfaction(
&tracker &tracker