From e69fccb15fb17ba6eb86e24efad4b9bd96e3bf72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Tue, 9 May 2023 21:49:33 +0800 Subject: [PATCH 01/17] [wallet_redesign] Update `Wallet` with redesigned structures --- crates/bdk/src/wallet/export.rs | 8 +- crates/bdk/src/wallet/mod.rs | 646 +++++++++++------- crates/bdk/src/wallet/tx_builder.rs | 6 +- crates/bdk/tests/common.rs | 1 + crates/bdk/tests/wallet.rs | 109 ++- crates/chain/src/chain_data.rs | 12 + example-crates/wallet_electrum/src/main.rs | 175 ++--- example-crates/wallet_esplora/src/main.rs | 163 ++--- .../wallet_esplora_async/src/main.rs | 165 ++--- 9 files changed, 757 insertions(+), 528 deletions(-) diff --git a/crates/bdk/src/wallet/export.rs b/crates/bdk/src/wallet/export.rs index 90563844..36b75ea2 100644 --- a/crates/bdk/src/wallet/export.rs +++ b/crates/bdk/src/wallet/export.rs @@ -56,7 +56,6 @@ use core::str::FromStr; use alloc::string::{String, ToString}; -use bdk_chain::sparse_chain::ChainPosition; use serde::{Deserialize, Serialize}; use miniscript::descriptor::{ShInner, WshInner}; @@ -130,8 +129,10 @@ impl FullyNodedExport { wallet .transactions() .next() - .and_then(|(pos, _)| pos.height().into()) - .unwrap_or(0) + .map_or(0, |canonical_tx| match canonical_tx.observed_as { + bdk_chain::ObservedAs::Confirmed(a) => a.confirmation_height, + bdk_chain::ObservedAs::Unconfirmed(_) => 0, + }) } else { 0 }; @@ -246,6 +247,7 @@ mod test { height: 5000, time: 0, }, + None, ) .unwrap(); wallet diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index 95197545..c894d6ba 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -21,9 +21,12 @@ use alloc::{ }; pub use bdk_chain::keychain::Balance; use bdk_chain::{ - chain_graph, - keychain::{persist, KeychainChangeSet, KeychainScan, KeychainTracker}, - sparse_chain, BlockId, ConfirmationTime, + indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, + keychain::{DerivationAdditions, KeychainTxOutIndex}, + local_chain::{self, LocalChain, UpdateNotConnectedError}, + tx_graph::{CanonicalTx, TxGraph}, + Anchor, Append, BlockId, ConfirmationTime, ConfirmationTimeAnchor, FullTxOut, ObservedAs, + Persist, PersistBackend, }; use bitcoin::consensus::encode::serialize; use bitcoin::secp256k1::Secp256k1; @@ -83,19 +86,83 @@ const COINBASE_MATURITY: u32 = 100; pub struct Wallet { signers: Arc, change_signers: Arc, - keychain_tracker: KeychainTracker, - persist: persist::Persist, + chain: LocalChain, + indexed_graph: IndexedTxGraph>, + persist: Persist, // [TODO] Use a different `ChangeSet` network: Network, secp: SecpCtx, } /// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources. /// The type parameter `T` indicates the kind of transaction contained in the update. It's usually a [`bitcoin::Transaction`]. -pub type Update = KeychainScan; -/// Error indicating that something was wrong with an [`Update`]. -pub type UpdateError = chain_graph::UpdateError; +#[derive(Debug, Default, PartialEq)] +pub struct Update { + keychain: BTreeMap, + graph: TxGraph, + chain: LocalChain, +} + /// The changeset produced internally by applying an update -pub(crate) type ChangeSet = KeychainChangeSet; +#[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)] +#[serde(bound( + deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>", + serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize" +))] +// #[cfg_attr(predicate, attr)] +pub struct ChangeSet { + pub chain_changeset: local_chain::ChangeSet, + pub indexed_additions: IndexedAdditions>, +} + +impl Default for ChangeSet { + fn default() -> Self { + Self { + chain_changeset: Default::default(), + indexed_additions: Default::default(), + } + } +} + +impl Append for ChangeSet { + fn append(&mut self, other: Self) { + Append::append(&mut self.chain_changeset, other.chain_changeset); + Append::append(&mut self.indexed_additions, other.indexed_additions); + } + + fn is_empty(&self) -> bool { + self.chain_changeset.is_empty() && self.indexed_additions.is_empty() + } +} + +impl From>> for ChangeSet { + fn from(indexed_additions: IndexedAdditions>) -> Self { + Self { + indexed_additions, + ..Default::default() + } + } +} + +impl From> for ChangeSet { + fn from(index_additions: DerivationAdditions) -> Self { + Self { + indexed_additions: IndexedAdditions { + index_additions, + ..Default::default() + }, + ..Default::default() + } + } +} + +impl From for ChangeSet { + fn from(chain_changeset: local_chain::ChangeSet) -> Self { + Self { + chain_changeset, + ..Default::default() + } + } +} /// The address index selection strategy to use to derived an address from the wallet's external /// descriptor. See [`Wallet::get_address`]. If you're unsure which one to use use `WalletIndex::New`. @@ -182,6 +249,14 @@ where } } +#[derive(Debug)] +pub enum InsertTxError { + ConfirmationHeightCannotBeGreaterThanTip { + tip_height: Option, + tx_height: u32, + }, +} + #[cfg(feature = "std")] impl std::error::Error for NewError

{} @@ -195,15 +270,17 @@ impl Wallet { network: Network, ) -> Result> where - D: persist::PersistBackend, + D: PersistBackend, { let secp = Secp256k1::new(); + let mut chain = LocalChain::default(); + let mut indexed_graph = + IndexedTxGraph::>::default(); - let mut keychain_tracker = KeychainTracker::default(); let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, &secp, network) .map_err(NewError::Descriptor)?; - keychain_tracker - .txout_index + indexed_graph + .index .add_keychain(KeychainKind::External, descriptor.clone()); let signers = Arc::new(SignersContainer::build(keymap, &descriptor, &secp)); let change_signers = match change_descriptor { @@ -218,8 +295,8 @@ impl Wallet { &secp, )); - keychain_tracker - .txout_index + indexed_graph + .index .add_keychain(KeychainKind::Internal, change_descriptor); change_signers @@ -227,18 +304,20 @@ impl Wallet { None => Arc::new(SignersContainer::new()), }; - db.load_into_keychain_tracker(&mut keychain_tracker) - .map_err(NewError::Persist)?; + let changeset = db.load_from_persistence().map_err(NewError::Persist)?; + chain.apply_changeset(changeset.chain_changeset); + indexed_graph.apply_additions(changeset.indexed_additions); - let persist = persist::Persist::new(db); + let persist = Persist::new(db); Ok(Wallet { signers, change_signers, network, + chain, + indexed_graph, persist, secp, - keychain_tracker, }) } @@ -249,7 +328,7 @@ impl Wallet { /// Iterator over all keychains in this wallet pub fn keychains(&self) -> &BTreeMap { - self.keychain_tracker.txout_index.keychains() + self.indexed_graph.index.keychains() } /// Return a derived address using the external descriptor, see [`AddressIndex`] for @@ -257,7 +336,7 @@ impl Wallet { /// (i.e. does not end with /*) then the same address will always be returned for any [`AddressIndex`]. pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo where - D: persist::PersistBackend, + D: PersistBackend, { self._get_address(address_index, KeychainKind::External) } @@ -271,17 +350,17 @@ impl Wallet { /// be returned for any [`AddressIndex`]. pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo where - D: persist::PersistBackend, + D: PersistBackend, { self._get_address(address_index, KeychainKind::Internal) } fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo where - D: persist::PersistBackend, + D: PersistBackend, { let keychain = self.map_keychain(keychain); - let txout_index = &mut self.keychain_tracker.txout_index; + let txout_index = &mut self.indexed_graph.index; let (index, spk) = match address_index { AddressIndex::New => { let ((index, spk), changeset) = txout_index.reveal_next_spk(&keychain); @@ -320,42 +399,36 @@ impl Wallet { /// Return whether or not a `script` is part of this wallet (either internal or external) pub fn is_mine(&self, script: &Script) -> bool { - self.keychain_tracker - .txout_index - .index_of_spk(script) - .is_some() + self.indexed_graph.index.index_of_spk(script).is_some() } /// Finds how the wallet derived the script pubkey `spk`. /// /// Will only return `Some(_)` if the wallet has given out the spk. pub fn derivation_of_spk(&self, spk: &Script) -> Option<(KeychainKind, u32)> { - self.keychain_tracker.txout_index.index_of_spk(spk).copied() + self.indexed_graph.index.index_of_spk(spk).copied() } /// Return the list of unspent outputs of this wallet - pub fn list_unspent(&self) -> Vec { - self.keychain_tracker - .full_utxos() - .map(|(&(keychain, derivation_index), utxo)| LocalUtxo { - outpoint: utxo.outpoint, - txout: utxo.txout, - keychain, - is_spent: false, - derivation_index, - confirmation_time: utxo.chain_position, - }) - .collect() + pub fn list_unspent(&self) -> impl Iterator + '_ { + self.indexed_graph + .graph() + .filter_chain_unspents( + &self.chain, + self.chain.tip().unwrap_or_default(), + self.indexed_graph.index.outpoints().iter().cloned(), + ) + .map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo)) } /// Get all the checkpoints the wallet is currently storing indexed by height. pub fn checkpoints(&self) -> &BTreeMap { - self.keychain_tracker.chain().checkpoints() + self.chain.blocks() } /// Returns the latest checkpoint. pub fn latest_checkpoint(&self) -> Option { - self.keychain_tracker.chain().latest_checkpoint() + self.chain.tip() } /// Returns a iterators of all the script pubkeys for the `Internal` and External` variants in `KeychainKind`. @@ -369,7 +442,7 @@ impl Wallet { pub fn spks_of_all_keychains( &self, ) -> BTreeMap + Clone> { - self.keychain_tracker.txout_index.spks_of_all_keychains() + self.indexed_graph.index.spks_of_all_keychains() } /// Gets an iterator over all the script pubkeys in a single keychain. @@ -381,30 +454,22 @@ impl Wallet { &self, keychain: KeychainKind, ) -> impl Iterator + Clone { - self.keychain_tracker - .txout_index - .spks_of_keychain(&keychain) + self.indexed_graph.index.spks_of_keychain(&keychain) } /// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the /// wallet's database. pub fn get_utxo(&self, op: OutPoint) -> Option { - self.keychain_tracker - .full_utxos() - .find_map(|(&(keychain, derivation_index), txo)| { - if op == txo.outpoint { - Some(LocalUtxo { - outpoint: txo.outpoint, - txout: txo.txout, - keychain, - is_spent: txo.spent_by.is_none(), - derivation_index, - confirmation_time: txo.chain_position, - }) - } else { - None - } - }) + let (&spk_i, _) = self.indexed_graph.index.txout(op)?; + self.indexed_graph + .graph() + .filter_chain_unspents( + &self.chain, + self.chain.tip().unwrap_or_default(), + core::iter::once((spk_i, op)), + ) + .map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo)) + .next() } /// Return a single transactions made and received by the wallet @@ -412,54 +477,22 @@ impl Wallet { /// Optionally fill the [`TransactionDetails::transaction`] field with the raw transaction if /// `include_raw` is `true`. pub fn get_tx(&self, txid: Txid, include_raw: bool) -> Option { - let (&confirmation_time, tx) = self.keychain_tracker.chain_graph().get_tx_in_chain(txid)?; - let graph = self.keychain_tracker.graph(); - let txout_index = &self.keychain_tracker.txout_index; + let graph = self.indexed_graph.graph(); - let received = tx - .output - .iter() - .map(|txout| { - if txout_index.index_of_spk(&txout.script_pubkey).is_some() { - txout.value - } else { - 0 - } - }) - .sum(); + let canonical_tx = CanonicalTx { + observed_as: graph.get_chain_position( + &self.chain, + self.chain.tip().unwrap_or_default(), + txid, + )?, + node: graph.get_tx_node(txid)?, + }; - let sent = tx - .input - .iter() - .map(|txin| { - if let Some((_, txout)) = txout_index.txout(txin.previous_output) { - txout.value - } else { - 0 - } - }) - .sum(); - - let inputs = tx - .input - .iter() - .map(|txin| { - graph - .get_txout(txin.previous_output) - .map(|txout| txout.value) - }) - .sum::>(); - let outputs = tx.output.iter().map(|txout| txout.value).sum(); - let fee = inputs.map(|inputs| inputs.saturating_sub(outputs)); - - Some(TransactionDetails { - transaction: if include_raw { Some(tx.clone()) } else { None }, - txid, - received, - sent, - fee, - confirmation_time, - }) + Some(new_tx_details( + &self.indexed_graph, + canonical_tx, + include_raw, + )) } /// Add a new checkpoint to the wallet's internal view of the chain. @@ -472,10 +505,15 @@ impl Wallet { pub fn insert_checkpoint( &mut self, block_id: BlockId, - ) -> Result { - let changeset = self.keychain_tracker.insert_checkpoint(block_id)?; - let changed = changeset.is_empty(); - self.persist.stage(changeset); + ) -> Result + where + D: PersistBackend, + { + let changeset = self.chain.insert_block(block_id)?; + let changed = !changeset.is_empty(); + if changed { + self.persist.stage(changeset.into()); + } Ok(changed) } @@ -497,41 +535,80 @@ impl Wallet { &mut self, tx: Transaction, position: ConfirmationTime, - ) -> Result> { - let changeset = self.keychain_tracker.insert_tx(tx, position)?; - let changed = changeset.is_empty(); - self.persist.stage(changeset); + seen_at: Option, + ) -> Result + where + D: PersistBackend, + { + let tip = self.chain.tip(); + + if let ConfirmationTime::Confirmed { height, .. } = position { + let tip_height = tip.map(|b| b.height); + if Some(height) > tip_height { + return Err(InsertTxError::ConfirmationHeightCannotBeGreaterThanTip { + tip_height, + tx_height: height, + }); + } + } + + let anchor = match position { + ConfirmationTime::Confirmed { height, time } => { + let tip_height = tip.map(|b| b.height); + if Some(height) > tip_height { + return Err(InsertTxError::ConfirmationHeightCannotBeGreaterThanTip { + tip_height, + tx_height: height, + }); + } + Some(ConfirmationTimeAnchor { + anchor_block: tip.expect("already checked if tip_height > height"), + confirmation_height: height, + confirmation_time: time, + }) + } + ConfirmationTime::Unconfirmed => None, + }; + + let changeset: ChangeSet = self.indexed_graph.insert_tx(&tx, anchor, seen_at).into(); + let changed = !changeset.is_empty(); + if changed { + self.persist.stage(changeset); + } Ok(changed) } #[deprecated(note = "use Wallet::transactions instead")] /// Deprecated. use `Wallet::transactions` instead. - pub fn list_transactions(&self, include_raw: bool) -> Vec { - self.keychain_tracker - .chain() - .txids() - .map(|&(_, txid)| self.get_tx(txid, include_raw).expect("must exist")) - .collect() + pub fn list_transactions( + &self, + include_raw: bool, + ) -> impl Iterator + '_ { + self.indexed_graph + .graph() + .list_chain_txs(&self.chain, self.chain.tip().unwrap_or_default()) + .map(move |canonical_tx| new_tx_details(&self.indexed_graph, canonical_tx, include_raw)) } /// Iterate over the transactions in the wallet in order of ascending confirmation time with /// unconfirmed transactions last. pub fn transactions( &self, - ) -> impl DoubleEndedIterator + '_ { - self.keychain_tracker - .chain_graph() - .transactions_in_chain() - .map(|(pos, tx)| (*pos, tx)) + ) -> impl Iterator> + '_ { + self.indexed_graph + .graph() + .list_chain_txs(&self.chain, self.chain.tip().unwrap_or_default()) } /// Return the balance, separated into available, trusted-pending, untrusted-pending and immature /// values. pub fn get_balance(&self) -> Balance { - self.keychain_tracker.balance(|keychain| match keychain { - KeychainKind::External => false, - KeychainKind::Internal => true, - }) + self.indexed_graph.graph().balance( + &self.chain, + self.chain.tip().unwrap_or_default(), + self.indexed_graph.index.outpoints().iter().cloned(), + |&(k, _), _| k == KeychainKind::Internal, + ) } /// Add an external signer @@ -613,17 +690,17 @@ impl Wallet { params: TxParams, ) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error> where - D: persist::PersistBackend, + D: PersistBackend, { let external_descriptor = self - .keychain_tracker - .txout_index + .indexed_graph + .index .keychains() .get(&KeychainKind::External) .expect("must exist"); let internal_descriptor = self - .keychain_tracker - .txout_index + .indexed_graph + .index .keychains() .get(&KeychainKind::Internal); @@ -700,9 +777,8 @@ impl Wallet { let current_height = match params.current_height { // If they didn't tell us the current height, we assume it's the latest sync height. None => self - .keychain_tracker - .chain() - .latest_checkpoint() + .chain + .tip() .and_then(|cp| cp.height.into()) .map(|height| LockTime::from_height(height).expect("Invalid height")), h => h, @@ -874,14 +950,10 @@ impl Wallet { Some(ref drain_recipient) => drain_recipient.clone(), None => { let change_keychain = self.map_keychain(KeychainKind::Internal); - let ((index, spk), changeset) = self - .keychain_tracker - .txout_index - .next_unused_spk(&change_keychain); + let ((index, spk), changeset) = + self.indexed_graph.index.next_unused_spk(&change_keychain); let spk = spk.clone(); - self.keychain_tracker - .txout_index - .mark_used(&change_keychain, index); + self.indexed_graph.index.mark_used(&change_keychain, index); self.persist.stage(changeset.into()); self.persist.commit().expect("TODO"); spk @@ -1019,16 +1091,21 @@ impl Wallet { &mut self, txid: Txid, ) -> Result, Error> { - let graph = self.keychain_tracker.graph(); - let txout_index = &self.keychain_tracker.txout_index; - let tx_and_height = self.keychain_tracker.chain_graph().get_tx_in_chain(txid); - let mut tx = match tx_and_height { - None => return Err(Error::TransactionNotFound), - Some((ConfirmationTime::Confirmed { .. }, _tx)) => { - return Err(Error::TransactionConfirmed) - } - Some((_, tx)) => tx.clone(), - }; + let graph = self.indexed_graph.graph(); + let txout_index = &self.indexed_graph.index; + let chain_tip = self.chain.tip().unwrap_or_default(); + + let mut tx = graph + .get_tx(txid) + .ok_or(Error::TransactionNotFound)? + .clone(); + + let pos = graph + .get_chain_position(&self.chain, chain_tip, txid) + .ok_or(Error::TransactionNotFound)?; + if let ObservedAs::Confirmed(_) = pos { + return Err(Error::TransactionConfirmed); + } if !tx .input @@ -1051,13 +1128,17 @@ impl Wallet { let original_utxos = original_txin .iter() .map(|txin| -> Result<_, Error> { - let (&confirmation_time, prev_tx) = self - .keychain_tracker - .chain_graph() - .get_tx_in_chain(txin.previous_output.txid) + let prev_tx = graph + .get_tx(txin.previous_output.txid) .ok_or(Error::UnknownUtxo)?; let txout = &prev_tx.output[txin.previous_output.vout as usize]; + let confirmation_time: ConfirmationTime = graph + .get_chain_position(&self.chain, chain_tip, txin.previous_output.txid) + .ok_or(Error::UnknownUtxo)? + .cloned() + .into(); + let weighted_utxo = match txout_index.index_of_spk(&txout.script_pubkey) { Some(&(keychain, derivation_index)) => { let satisfaction_weight = self @@ -1231,7 +1312,7 @@ impl Wallet { /// /// This can be used to build a watch-only version of a wallet pub fn public_descriptor(&self, keychain: KeychainKind) -> Option<&ExtendedDescriptor> { - self.keychain_tracker.txout_index.keychains().get(&keychain) + self.indexed_graph.index.keychains().get(&keychain) } /// Finalize a PSBT, i.e., for each input determine if sufficient data is available to pass @@ -1247,6 +1328,8 @@ impl Wallet { psbt: &mut psbt::PartiallySignedTransaction, sign_options: SignOptions, ) -> Result { + let chain_tip = self.chain.tip().unwrap_or_default(); + let tx = &psbt.unsigned_tx; let mut finished = true; @@ -1259,19 +1342,16 @@ impl Wallet { continue; } let confirmation_height = self - .keychain_tracker - .chain() - .tx_position(input.previous_output.txid) - .map(|conftime| match conftime { - &ConfirmationTime::Confirmed { height, .. } => height, - ConfirmationTime::Unconfirmed => u32::MAX, + .indexed_graph + .graph() + .get_chain_position(&self.chain, chain_tip, input.previous_output.txid) + .map(|observed_as| match observed_as { + ObservedAs::Confirmed(a) => a.confirmation_height, + ObservedAs::Unconfirmed(_) => u32::MAX, }); - let last_sync_height = self - .keychain_tracker - .chain() - .latest_checkpoint() - .map(|block_id| block_id.height); - let current_height = sign_options.assume_height.or(last_sync_height); + let current_height = sign_options + .assume_height + .or(self.chain.tip().map(|b| b.height)); debug!( "Input #{} - {}, using `confirmation_height` = {:?}, `current_height` = {:?}", @@ -1288,8 +1368,8 @@ impl Wallet { .get_utxo_for(n) .and_then(|txout| self.get_descriptor_for_txout(&txout)) .or_else(|| { - self.keychain_tracker - .txout_index + self.indexed_graph + .index .keychains() .iter() .find_map(|(_, desc)| { @@ -1347,14 +1427,12 @@ impl Wallet { /// The derivation index of this wallet. It will return `None` if it has not derived any addresses. /// Otherwise, it will return the index of the highest address it has derived. pub fn derivation_index(&self, keychain: KeychainKind) -> Option { - self.keychain_tracker - .txout_index - .last_revealed_index(&keychain) + self.indexed_graph.index.last_revealed_index(&keychain) } /// The index of the next address that you would get if you were to ask the wallet for a new address pub fn next_derivation_index(&self, keychain: KeychainKind) -> u32 { - self.keychain_tracker.txout_index.next_index(&keychain).0 + self.indexed_graph.index.next_index(&keychain).0 } /// Informs the wallet that you no longer intend to broadcast a tx that was built from it. @@ -1362,7 +1440,7 @@ impl Wallet { /// This frees up the change address used when creating the tx for use in future transactions. // TODO: Make this free up reserved utxos when that's implemented pub fn cancel_tx(&mut self, tx: &Transaction) { - let txout_index = &mut self.keychain_tracker.txout_index; + let txout_index = &mut self.indexed_graph.index; for txout in &tx.output { if let Some(&(keychain, index)) = txout_index.index_of_spk(&txout.script_pubkey) { // NOTE: unmark_used will **not** make something unused if it has actually been used @@ -1384,8 +1462,8 @@ impl Wallet { fn get_descriptor_for_txout(&self, txout: &TxOut) -> Option { let &(keychain, child) = self - .keychain_tracker - .txout_index + .indexed_graph + .index .index_of_spk(&txout.script_pubkey)?; let descriptor = self.get_descriptor_for_keychain(keychain); Some(descriptor.at_derivation_index(child)) @@ -1393,7 +1471,6 @@ impl Wallet { fn get_available_utxos(&self) -> Vec<(LocalUtxo, usize)> { self.list_unspent() - .into_iter() .map(|utxo| { let keychain = utxo.keychain; ( @@ -1419,6 +1496,7 @@ impl Wallet { must_only_use_confirmed_tx: bool, current_height: Option, ) -> (Vec, Vec) { + let chain_tip = self.chain.tip().unwrap_or_default(); // must_spend <- manually selected utxos // may_spend <- all other available utxos let mut may_spend = self.get_available_utxos(); @@ -1438,39 +1516,43 @@ impl Wallet { let satisfies_confirmed = may_spend .iter() - .map(|u| { + .map(|u| -> bool { let txid = u.0.outpoint.txid; - let tx = self.keychain_tracker.chain_graph().get_tx_in_chain(txid); - match tx { - // We don't have the tx in the db for some reason, - // so we can't know for sure if it's mature or not. - // We prefer not to spend it. - None => false, - Some((confirmation_time, tx)) => { - // Whether the UTXO is mature and, if needed, confirmed - let mut spendable = true; - if must_only_use_confirmed_tx && !confirmation_time.is_confirmed() { - return false; - } - if tx.is_coin_base() { - debug_assert!( - confirmation_time.is_confirmed(), - "coinbase must always be confirmed" - ); - if let Some(current_height) = current_height { - match confirmation_time { - ConfirmationTime::Confirmed { height, .. } => { - // https://github.com/bitcoin/bitcoin/blob/c5e67be03bb06a5d7885c55db1f016fbf2333fe3/src/validation.cpp#L373-L375 - spendable &= (current_height.saturating_sub(*height)) - >= COINBASE_MATURITY; - } - ConfirmationTime::Unconfirmed => spendable = false, - } + let tx = match self.indexed_graph.graph().get_tx(txid) { + Some(tx) => tx, + None => return false, + }; + let confirmation_time: ConfirmationTime = match self + .indexed_graph + .graph() + .get_chain_position(&self.chain, chain_tip, txid) + { + Some(observed_as) => observed_as.cloned().into(), + None => return false, + }; + + // Whether the UTXO is mature and, if needed, confirmed + let mut spendable = true; + if must_only_use_confirmed_tx && !confirmation_time.is_confirmed() { + return false; + } + if tx.is_coin_base() { + debug_assert!( + confirmation_time.is_confirmed(), + "coinbase must always be confirmed" + ); + if let Some(current_height) = current_height { + match confirmation_time { + ConfirmationTime::Confirmed { height, .. } => { + // https://github.com/bitcoin/bitcoin/blob/c5e67be03bb06a5d7885c55db1f016fbf2333fe3/src/validation.cpp#L373-L375 + spendable &= + (current_height.saturating_sub(height)) >= COINBASE_MATURITY; } + ConfirmationTime::Unconfirmed => spendable = false, } - spendable } } + spendable }) .collect::>(); @@ -1590,8 +1672,8 @@ impl Wallet { // Try to find the prev_script in our db to figure out if this is internal or external, // and the derivation index let &(keychain, child) = self - .keychain_tracker - .txout_index + .indexed_graph + .index .index_of_spk(&utxo.txout.script_pubkey) .ok_or(Error::UnknownUtxo)?; @@ -1608,7 +1690,7 @@ impl Wallet { .map_err(MiniscriptPsbtError::Conversion)?; let prev_output = utxo.outpoint; - if let Some(prev_tx) = self.keychain_tracker.graph().get_tx(prev_output.txid) { + if let Some(prev_tx) = self.indexed_graph.graph().get_tx(prev_output.txid) { if desc.is_witness() || desc.is_taproot() { psbt_input.witness_utxo = Some(prev_tx.output[prev_output.vout as usize].clone()); } @@ -1641,10 +1723,8 @@ impl Wallet { // Try to figure out the keychain and derivation for every input and output for (is_input, index, out) in utxos.into_iter() { - if let Some(&(keychain, child)) = self - .keychain_tracker - .txout_index - .index_of_spk(&out.script_pubkey) + if let Some(&(keychain, child)) = + self.indexed_graph.index.index_of_spk(&out.script_pubkey) { debug!( "Found descriptor for input #{} {:?}/{}", @@ -1685,52 +1765,62 @@ impl Wallet { /// transactions related to your wallet into it. /// /// [`commit`]: Self::commit - pub fn apply_update(&mut self, update: Update) -> Result<(), UpdateError> + pub fn apply_update(&mut self, update: Update) -> Result where - D: persist::PersistBackend, + D: PersistBackend, { - let changeset = self.keychain_tracker.apply_update(update)?; - self.persist.stage(changeset); - Ok(()) + let mut changeset: ChangeSet = self.chain.apply_update(update.chain)?.into(); + let (_, derivation_additions) = self + .indexed_graph + .index + .reveal_to_target_multi(&update.keychain); + changeset.append(derivation_additions.into()); + changeset.append(self.indexed_graph.apply_update(update.graph).into()); + + let changed = !changeset.is_empty(); + if changed { + self.persist.stage(changeset); + } + Ok(changed) } /// Commits all curently [`staged`] changed to the persistence backend returning and error when this fails. /// /// [`staged`]: Self::staged - pub fn commit(&mut self) -> Result<(), D::WriteError> + pub fn commit(&mut self) -> Result where - D: persist::PersistBackend, + D: PersistBackend, { - self.persist.commit() + self.persist.commit().map(|c| c.is_some()) } /// Returns the changes that will be staged with the next call to [`commit`]. /// /// [`commit`]: Self::commit - pub fn staged(&self) -> &ChangeSet { + pub fn staged(&self) -> &ChangeSet + where + D: PersistBackend, + { self.persist.staged() } /// Get a reference to the inner [`TxGraph`](bdk_chain::tx_graph::TxGraph). - pub fn as_graph(&self) -> &bdk_chain::tx_graph::TxGraph { - self.keychain_tracker.graph() + pub fn as_graph(&self) -> &TxGraph { + self.indexed_graph.graph() } - /// Get a reference to the inner [`ChainGraph`](bdk_chain::chain_graph::ChainGraph). - pub fn as_chain_graph(&self) -> &bdk_chain::chain_graph::ChainGraph { - self.keychain_tracker.chain_graph() + pub fn as_index(&self) -> &KeychainTxOutIndex { + &self.indexed_graph.index + } + + pub fn as_chain(&self) -> &LocalChain { + &self.chain } } -impl AsRef for Wallet { - fn as_ref(&self) -> &bdk_chain::tx_graph::TxGraph { - self.keychain_tracker.graph() - } -} - -impl AsRef> for Wallet { - fn as_ref(&self) -> &bdk_chain::chain_graph::ChainGraph { - self.keychain_tracker.chain_graph() +impl AsRef> for Wallet { + fn as_ref(&self) -> &bdk_chain::tx_graph::TxGraph { + self.indexed_graph.graph() } } @@ -1765,6 +1855,76 @@ where Ok(wallet_name) } +fn new_local_utxo( + keychain: KeychainKind, + derivation_index: u32, + full_txo: FullTxOut>, +) -> LocalUtxo { + LocalUtxo { + outpoint: full_txo.outpoint, + txout: full_txo.txout, + is_spent: full_txo.spent_by.is_some(), + confirmation_time: full_txo.chain_position.into(), + keychain, + derivation_index, + } +} + +fn new_tx_details( + indexed_graph: &IndexedTxGraph>, + canonical_tx: CanonicalTx<'_, Transaction, ConfirmationTimeAnchor>, + include_raw: bool, +) -> TransactionDetails { + let graph = indexed_graph.graph(); + let index = &indexed_graph.index; + let tx = canonical_tx.node.tx; + + let received = tx + .output + .iter() + .map(|txout| { + if index.index_of_spk(&txout.script_pubkey).is_some() { + txout.value + } else { + 0 + } + }) + .sum(); + + let sent = tx + .input + .iter() + .map(|txin| { + if let Some((_, txout)) = index.txout(txin.previous_output) { + txout.value + } else { + 0 + } + }) + .sum(); + + let inputs = tx + .input + .iter() + .map(|txin| { + graph + .get_txout(txin.previous_output) + .map(|txout| txout.value) + }) + .sum::>(); + let outputs = tx.output.iter().map(|txout| txout.value).sum(); + let fee = inputs.map(|inputs| inputs.saturating_sub(outputs)); + + TransactionDetails { + transaction: if include_raw { Some(tx.clone()) } else { None }, + txid: canonical_tx.node.txid, + received, + sent, + fee, + confirmation_time: canonical_tx.observed_as.cloned().into(), + } +} + #[macro_export] #[doc(hidden)] /// Macro for getting a wallet for use in a doctest @@ -1796,7 +1956,7 @@ macro_rules! doctest_wallet { let _ = wallet.insert_tx(tx.clone(), ConfirmationTime::Confirmed { height: 500, time: 50_000 - }); + }, None); wallet }} diff --git a/crates/bdk/src/wallet/tx_builder.rs b/crates/bdk/src/wallet/tx_builder.rs index 6e812e59..5d106260 100644 --- a/crates/bdk/src/wallet/tx_builder.rs +++ b/crates/bdk/src/wallet/tx_builder.rs @@ -39,7 +39,7 @@ use crate::collections::BTreeMap; use crate::collections::HashSet; use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec}; -use bdk_chain::ConfirmationTime; +use bdk_chain::PersistBackend; use core::cell::RefCell; use core::marker::PhantomData; @@ -47,7 +47,7 @@ use bitcoin::util::psbt::{self, PartiallySignedTransaction as Psbt}; use bitcoin::{LockTime, OutPoint, Script, Sequence, Transaction}; use super::coin_selection::{CoinSelectionAlgorithm, DefaultCoinSelectionAlgorithm}; -use super::persist; +use super::ChangeSet; use crate::{ types::{FeeRate, KeychainKind, LocalUtxo, WeightedUtxo}, TransactionDetails, @@ -529,7 +529,7 @@ impl<'a, D, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> TxBuilder<'a, D, /// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error> where - D: persist::PersistBackend, + D: PersistBackend, { self.wallet .borrow_mut() diff --git a/crates/bdk/tests/common.rs b/crates/bdk/tests/common.rs index de946703..cbf74f24 100644 --- a/crates/bdk/tests/common.rs +++ b/crates/bdk/tests/common.rs @@ -35,6 +35,7 @@ pub fn get_funded_wallet_with_change( height: 1_000, time: 100, }, + None, ) .unwrap(); diff --git a/crates/bdk/tests/wallet.rs b/crates/bdk/tests/wallet.rs index 94c5ad1e..6291df1d 100644 --- a/crates/bdk/tests/wallet.rs +++ b/crates/bdk/tests/wallet.rs @@ -44,6 +44,7 @@ fn receive_output(wallet: &mut Wallet, value: u64, height: TxHeight) -> OutPoint }, TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed, }, + None, ) .unwrap(); @@ -811,7 +812,7 @@ fn test_create_tx_add_utxo() { lock_time: PackedLockTime(0), }; wallet - .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed, None) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -848,7 +849,7 @@ fn test_create_tx_manually_selected_insufficient() { }; wallet - .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed, None) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -889,7 +890,9 @@ fn test_create_tx_policy_path_no_csv() { script_pubkey: wallet.get_address(New).script_pubkey(), }], }; - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let external_policy = wallet.policies(KeychainKind::External).unwrap().unwrap(); let root_id = external_policy.id; @@ -972,7 +975,7 @@ fn test_add_foreign_utxo() { get_funded_wallet("wpkh(cVbZ8ovhye9AoAHFsqobCf7LxbXDAECy9Kb8TZdfsDYMZGBUyCnm)"); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); - let utxo = wallet2.list_unspent().remove(0); + let utxo = wallet2.list_unspent().next().expect("must take!"); let foreign_utxo_satisfaction = wallet2 .get_descriptor_for_keychain(KeychainKind::External) .max_satisfaction_weight() @@ -1036,7 +1039,7 @@ fn test_add_foreign_utxo() { #[should_panic(expected = "Generic(\"Foreign utxo missing witness_utxo or non_witness_utxo\")")] fn test_add_foreign_utxo_invalid_psbt_input() { let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); - let outpoint = wallet.list_unspent()[0].outpoint; + let outpoint = wallet.list_unspent().next().expect("must exist").outpoint; let foreign_utxo_satisfaction = wallet .get_descriptor_for_keychain(KeychainKind::External) .max_satisfaction_weight() @@ -1054,7 +1057,7 @@ fn test_add_foreign_utxo_where_outpoint_doesnt_match_psbt_input() { let (wallet2, txid2) = get_funded_wallet("wpkh(cVbZ8ovhye9AoAHFsqobCf7LxbXDAECy9Kb8TZdfsDYMZGBUyCnm)"); - let utxo2 = wallet2.list_unspent().remove(0); + let utxo2 = wallet2.list_unspent().next().unwrap(); let tx1 = wallet1.get_tx(txid1, true).unwrap().transaction.unwrap(); let tx2 = wallet2.get_tx(txid2, true).unwrap().transaction.unwrap(); @@ -1098,7 +1101,7 @@ fn test_add_foreign_utxo_only_witness_utxo() { let (wallet2, txid2) = get_funded_wallet("wpkh(cVbZ8ovhye9AoAHFsqobCf7LxbXDAECy9Kb8TZdfsDYMZGBUyCnm)"); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); - let utxo2 = wallet2.list_unspent().remove(0); + let utxo2 = wallet2.list_unspent().next().unwrap(); let satisfaction_weight = wallet2 .get_descriptor_for_keychain(KeychainKind::External) @@ -1214,7 +1217,9 @@ fn test_bump_fee_irreplaceable_tx() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); wallet.build_fee_bump(txid).unwrap().finish().unwrap(); } @@ -1237,6 +1242,7 @@ fn test_bump_fee_confirmed_tx() { height: 42, time: 42_000, }, + None, ) .unwrap(); @@ -1257,7 +1263,9 @@ fn test_bump_fee_low_fee_rate() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(1.0)); @@ -1278,7 +1286,9 @@ fn test_bump_fee_low_abs() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(10); @@ -1298,7 +1308,9 @@ fn test_bump_fee_zero_abs() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(0); @@ -1316,7 +1328,9 @@ fn test_bump_fee_reduce_change() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(2.5)).enable_rbf(); @@ -1401,7 +1415,9 @@ fn test_bump_fee_reduce_single_recipient() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -1432,7 +1448,9 @@ fn test_bump_fee_absolute_reduce_single_recipient() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -1471,6 +1489,7 @@ fn test_bump_fee_drain_wallet() { height: wallet.latest_checkpoint().unwrap().height, time: 42_000, }, + None, ) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -1488,7 +1507,9 @@ fn test_bump_fee_drain_wallet() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); assert_eq!(original_details.sent, 25_000); // for the new feerate, it should be enough to reduce the output, but since we specify @@ -1523,7 +1544,17 @@ fn test_bump_fee_remove_output_manually_selected_only() { }], }; wallet - .insert_tx(init_tx.clone(), wallet.transactions().last().unwrap().0) + .insert_tx( + init_tx.clone(), + wallet + .transactions() + .last() + .unwrap() + .observed_as + .cloned() + .into(), + None, + ) .unwrap(); let outpoint = OutPoint { txid: init_tx.txid(), @@ -1540,7 +1571,9 @@ fn test_bump_fee_remove_output_manually_selected_only() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); assert_eq!(original_details.sent, 25_000); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1562,9 +1595,14 @@ fn test_bump_fee_add_input() { value: 25_000, }], }; - wallet - .insert_tx(init_tx, wallet.transactions().last().unwrap().0) - .unwrap(); + let pos = wallet + .transactions() + .last() + .unwrap() + .observed_as + .cloned() + .into(); + wallet.insert_tx(init_tx, pos, None).unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); let mut builder = wallet.build_tx().coin_selection(LargestFirstCoinSelection); @@ -1574,7 +1612,9 @@ fn test_bump_fee_add_input() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(50.0)); @@ -1618,7 +1658,9 @@ fn test_bump_fee_absolute_add_input() { let (psbt, original_details) = builder.finish().unwrap(); let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_absolute(6_000); @@ -1668,7 +1710,9 @@ fn test_bump_fee_no_change_add_input_and_change() { let tx = psbt.extract_tx(); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); // now bump the fees without using `allow_shrinking`. the wallet should add an // extra input and a change output, and leave the original output untouched @@ -1724,7 +1768,9 @@ fn test_bump_fee_add_input_change_dust() { assert_eq!(tx.input.len(), 1); assert_eq!(tx.output.len(), 2); let txid = tx.txid(); - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); // We set a fee high enough that during rbf we are forced to add @@ -1784,7 +1830,7 @@ fn test_bump_fee_force_add_input() { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } wallet - .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed, None) .unwrap(); // the new fee_rate is low enough that just reducing the change would be fine, but we force // the addition of an extra input with `add_utxo()` @@ -1839,7 +1885,7 @@ fn test_bump_fee_absolute_force_add_input() { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } wallet - .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed, None) .unwrap(); // the new fee_rate is low enough that just reducing the change would be fine, but we force @@ -1899,7 +1945,9 @@ fn test_bump_fee_unconfirmed_inputs_only() { for txin in &mut tx.input { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(25.0)); builder.finish().unwrap(); @@ -1928,7 +1976,9 @@ fn test_bump_fee_unconfirmed_input() { for txin in &mut tx.input { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } - wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet + .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder @@ -2660,7 +2710,7 @@ fn test_taproot_foreign_utxo() { let (wallet2, _) = get_funded_wallet(get_test_tr_single_sig()); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); - let utxo = wallet2.list_unspent().remove(0); + let utxo = wallet2.list_unspent().next().unwrap(); let psbt_input = wallet2.get_psbt_input(utxo.clone(), None, false).unwrap(); let foreign_utxo_satisfaction = wallet2 .get_descriptor_for_keychain(KeychainKind::External) @@ -3022,6 +3072,7 @@ fn test_spend_coinbase() { height: confirmation_height, time: 30_000, }, + None, ) .unwrap(); diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index 3252febc..9baa643e 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -137,6 +137,18 @@ impl ConfirmationTime { } } +impl From> for ConfirmationTime { + fn from(observed_as: ObservedAs) -> Self { + match observed_as { + ObservedAs::Confirmed(a) => Self::Confirmed { + height: a.confirmation_height, + time: a.confirmation_time, + }, + ObservedAs::Unconfirmed(_) => Self::Unconfirmed, + } + } +} + /// A reference to a block in the canonical chain. #[derive(Debug, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)] #[cfg_attr( diff --git a/example-crates/wallet_electrum/src/main.rs b/example-crates/wallet_electrum/src/main.rs index 5145d593..7eb88264 100644 --- a/example-crates/wallet_electrum/src/main.rs +++ b/example-crates/wallet_electrum/src/main.rs @@ -1,104 +1,105 @@ -use std::{io::Write, str::FromStr}; +// use std::{io::Write, str::FromStr}; -use bdk::{ - bitcoin::{Address, Network}, - SignOptions, Wallet, -}; -use bdk_electrum::{ - electrum_client::{self, ElectrumApi}, - ElectrumExt, -}; -use bdk_file_store::KeychainStore; +// use bdk::{ +// bitcoin::{Address, Network}, +// SignOptions, Wallet, +// }; +// use bdk_electrum::{ +// electrum_client::{self, ElectrumApi}, +// ElectrumExt, +// }; +// use bdk_file_store::KeychainStore; -const SEND_AMOUNT: u64 = 5000; -const STOP_GAP: usize = 50; -const BATCH_SIZE: usize = 5; +// const SEND_AMOUNT: u64 = 5000; +// const STOP_GAP: usize = 50; +// const BATCH_SIZE: usize = 5; fn main() -> Result<(), Box> { - println!("Hello, world!"); + todo!("update this example!"); + // println!("Hello, world!"); - let db_path = std::env::temp_dir().join("bdk-electrum-example"); - let db = KeychainStore::new_from_path(db_path)?; - let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; - let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + // let db_path = std::env::temp_dir().join("bdk-electrum-example"); + // let db = KeychainStore::new_from_path(db_path)?; + // let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + // let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; - let mut wallet = Wallet::new( - external_descriptor, - Some(internal_descriptor), - db, - Network::Testnet, - )?; + // let mut wallet = Wallet::new( + // external_descriptor, + // Some(internal_descriptor), + // db, + // Network::Testnet, + // )?; - let address = wallet.get_address(bdk::wallet::AddressIndex::New); - println!("Generated Address: {}", address); + // let address = wallet.get_address(bdk::wallet::AddressIndex::New); + // println!("Generated Address: {}", address); - let balance = wallet.get_balance(); - println!("Wallet balance before syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance before syncing: {} sats", balance.total()); - print!("Syncing..."); - // Scanning the chain... - let electrum_url = "ssl://electrum.blockstream.info:60002"; - let client = electrum_client::Client::new(electrum_url)?; - let local_chain = wallet.checkpoints(); - let spks = wallet - .spks_of_all_keychains() - .into_iter() - .map(|(k, spks)| { - let mut first = true; - ( - k, - spks.inspect(move |(spk_i, _)| { - if first { - first = false; - print!("\nScanning keychain [{:?}]:", k); - } - print!(" {}", spk_i); - let _ = std::io::stdout().flush(); - }), - ) - }) - .collect(); - let electrum_update = client - .scan( - local_chain, - spks, - core::iter::empty(), - core::iter::empty(), - STOP_GAP, - BATCH_SIZE, - )? - .into_confirmation_time_update(&client)?; - println!(); - let new_txs = client.batch_transaction_get(electrum_update.missing_full_txs(&wallet))?; - let update = electrum_update.into_keychain_scan(new_txs, &wallet)?; - wallet.apply_update(update)?; - wallet.commit()?; + // print!("Syncing..."); + // // Scanning the chain... + // let electrum_url = "ssl://electrum.blockstream.info:60002"; + // let client = electrum_client::Client::new(electrum_url)?; + // let local_chain = wallet.checkpoints(); + // let spks = wallet + // .spks_of_all_keychains() + // .into_iter() + // .map(|(k, spks)| { + // let mut first = true; + // ( + // k, + // spks.inspect(move |(spk_i, _)| { + // if first { + // first = false; + // print!("\nScanning keychain [{:?}]:", k); + // } + // print!(" {}", spk_i); + // let _ = std::io::stdout().flush(); + // }), + // ) + // }) + // .collect(); + // let electrum_update = client + // .scan( + // local_chain, + // spks, + // core::iter::empty(), + // core::iter::empty(), + // STOP_GAP, + // BATCH_SIZE, + // )? + // .into_confirmation_time_update(&client)?; + // println!(); + // let new_txs = client.batch_transaction_get(electrum_update.missing_full_txs(&wallet))?; + // let update = electrum_update.into_keychain_scan(new_txs, &wallet)?; + // wallet.apply_update(update)?; + // wallet.commit()?; - let balance = wallet.get_balance(); - println!("Wallet balance after syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance after syncing: {} sats", balance.total()); - if balance.total() < SEND_AMOUNT { - println!( - "Please send at least {} sats to the receiving address", - SEND_AMOUNT - ); - std::process::exit(0); - } + // if balance.total() < SEND_AMOUNT { + // println!( + // "Please send at least {} sats to the receiving address", + // SEND_AMOUNT + // ); + // std::process::exit(0); + // } - let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + // let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; - let mut tx_builder = wallet.build_tx(); - tx_builder - .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) - .enable_rbf(); + // let mut tx_builder = wallet.build_tx(); + // tx_builder + // .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + // .enable_rbf(); - let (mut psbt, _) = tx_builder.finish()?; - let finalized = wallet.sign(&mut psbt, SignOptions::default())?; - assert!(finalized); + // let (mut psbt, _) = tx_builder.finish()?; + // let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + // assert!(finalized); - let tx = psbt.extract_tx(); - client.transaction_broadcast(&tx)?; - println!("Tx broadcasted! Txid: {}", tx.txid()); + // let tx = psbt.extract_tx(); + // client.transaction_broadcast(&tx)?; + // println!("Tx broadcasted! Txid: {}", tx.txid()); - Ok(()) + // Ok(()) } diff --git a/example-crates/wallet_esplora/src/main.rs b/example-crates/wallet_esplora/src/main.rs index d8eda32a..d9d07c7a 100644 --- a/example-crates/wallet_esplora/src/main.rs +++ b/example-crates/wallet_esplora/src/main.rs @@ -1,96 +1,97 @@ -use bdk::{ - bitcoin::{Address, Network}, - wallet::AddressIndex, - SignOptions, Wallet, -}; -use bdk_esplora::esplora_client; -use bdk_esplora::EsploraExt; -use bdk_file_store::KeychainStore; -use std::{io::Write, str::FromStr}; +// use bdk::{ +// bitcoin::{Address, Network}, +// wallet::AddressIndex, +// SignOptions, Wallet, +// }; +// use bdk_esplora::esplora_client; +// use bdk_esplora::EsploraExt; +// use bdk_file_store::KeychainStore; +// use std::{io::Write, str::FromStr}; -const SEND_AMOUNT: u64 = 5000; -const STOP_GAP: usize = 50; -const PARALLEL_REQUESTS: usize = 5; +// const SEND_AMOUNT: u64 = 5000; +// const STOP_GAP: usize = 50; +// const PARALLEL_REQUESTS: usize = 5; fn main() -> Result<(), Box> { - let db_path = std::env::temp_dir().join("bdk-esplora-example"); - let db = KeychainStore::new_from_path(db_path)?; - let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; - let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + todo!("update this exampe!"); + // let db_path = std::env::temp_dir().join("bdk-esplora-example"); + // let db = KeychainStore::new_from_path(db_path)?; + // let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + // let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; - let mut wallet = Wallet::new( - external_descriptor, - Some(internal_descriptor), - db, - Network::Testnet, - )?; + // let mut wallet = Wallet::new( + // external_descriptor, + // Some(internal_descriptor), + // db, + // Network::Testnet, + // )?; - let address = wallet.get_address(AddressIndex::New); - println!("Generated Address: {}", address); + // let address = wallet.get_address(AddressIndex::New); + // println!("Generated Address: {}", address); - let balance = wallet.get_balance(); - println!("Wallet balance before syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance before syncing: {} sats", balance.total()); - print!("Syncing..."); - // Scanning the chain... - let esplora_url = "https://mempool.space/testnet/api"; - let client = esplora_client::Builder::new(esplora_url).build_blocking()?; - let checkpoints = wallet.checkpoints(); - let spks = wallet - .spks_of_all_keychains() - .into_iter() - .map(|(k, spks)| { - let mut first = true; - ( - k, - spks.inspect(move |(spk_i, _)| { - if first { - first = false; - print!("\nScanning keychain [{:?}]:", k); - } - print!(" {}", spk_i); - let _ = std::io::stdout().flush(); - }), - ) - }) - .collect(); - let update = client.scan( - checkpoints, - spks, - core::iter::empty(), - core::iter::empty(), - STOP_GAP, - PARALLEL_REQUESTS, - )?; - println!(); - wallet.apply_update(update)?; - wallet.commit()?; + // print!("Syncing..."); + // // Scanning the chain... + // let esplora_url = "https://mempool.space/testnet/api"; + // let client = esplora_client::Builder::new(esplora_url).build_blocking()?; + // let checkpoints = wallet.checkpoints(); + // let spks = wallet + // .spks_of_all_keychains() + // .into_iter() + // .map(|(k, spks)| { + // let mut first = true; + // ( + // k, + // spks.inspect(move |(spk_i, _)| { + // if first { + // first = false; + // print!("\nScanning keychain [{:?}]:", k); + // } + // print!(" {}", spk_i); + // let _ = std::io::stdout().flush(); + // }), + // ) + // }) + // .collect(); + // let update = client.scan( + // checkpoints, + // spks, + // core::iter::empty(), + // core::iter::empty(), + // STOP_GAP, + // PARALLEL_REQUESTS, + // )?; + // println!(); + // wallet.apply_update(update)?; + // wallet.commit()?; - let balance = wallet.get_balance(); - println!("Wallet balance after syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance after syncing: {} sats", balance.total()); - if balance.total() < SEND_AMOUNT { - println!( - "Please send at least {} sats to the receiving address", - SEND_AMOUNT - ); - std::process::exit(0); - } + // if balance.total() < SEND_AMOUNT { + // println!( + // "Please send at least {} sats to the receiving address", + // SEND_AMOUNT + // ); + // std::process::exit(0); + // } - let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + // let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; - let mut tx_builder = wallet.build_tx(); - tx_builder - .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) - .enable_rbf(); + // let mut tx_builder = wallet.build_tx(); + // tx_builder + // .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + // .enable_rbf(); - let (mut psbt, _) = tx_builder.finish()?; - let finalized = wallet.sign(&mut psbt, SignOptions::default())?; - assert!(finalized); + // let (mut psbt, _) = tx_builder.finish()?; + // let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + // assert!(finalized); - let tx = psbt.extract_tx(); - client.broadcast(&tx)?; - println!("Tx broadcasted! Txid: {}", tx.txid()); + // let tx = psbt.extract_tx(); + // client.broadcast(&tx)?; + // println!("Tx broadcasted! Txid: {}", tx.txid()); - Ok(()) + // Ok(()) } diff --git a/example-crates/wallet_esplora_async/src/main.rs b/example-crates/wallet_esplora_async/src/main.rs index b78b09df..66cdf689 100644 --- a/example-crates/wallet_esplora_async/src/main.rs +++ b/example-crates/wallet_esplora_async/src/main.rs @@ -1,99 +1,100 @@ -use std::{io::Write, str::FromStr}; +// use std::{io::Write, str::FromStr}; -use bdk::{ - bitcoin::{Address, Network}, - wallet::AddressIndex, - SignOptions, Wallet, -}; -use bdk_esplora::{esplora_client, EsploraAsyncExt}; -use bdk_file_store::KeychainStore; +// use bdk::{ +// bitcoin::{Address, Network}, +// wallet::AddressIndex, +// SignOptions, Wallet, +// }; +// use bdk_esplora::{esplora_client, EsploraAsyncExt}; +// use bdk_file_store::KeychainStore; -const SEND_AMOUNT: u64 = 5000; -const STOP_GAP: usize = 50; -const PARALLEL_REQUESTS: usize = 5; +// const SEND_AMOUNT: u64 = 5000; +// const STOP_GAP: usize = 50; +// const PARALLEL_REQUESTS: usize = 5; #[tokio::main] async fn main() -> Result<(), Box> { - let db_path = std::env::temp_dir().join("bdk-esplora-example"); - let db = KeychainStore::new_from_path(db_path)?; - let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; - let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + todo!("update this example!"); + // let db_path = std::env::temp_dir().join("bdk-esplora-example"); + // let db = KeychainStore::new_from_path(db_path)?; + // let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + // let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; - let mut wallet = Wallet::new( - external_descriptor, - Some(internal_descriptor), - db, - Network::Testnet, - )?; + // let mut wallet = Wallet::new( + // external_descriptor, + // Some(internal_descriptor), + // db, + // Network::Testnet, + // )?; - let address = wallet.get_address(AddressIndex::New); - println!("Generated Address: {}", address); + // let address = wallet.get_address(AddressIndex::New); + // println!("Generated Address: {}", address); - let balance = wallet.get_balance(); - println!("Wallet balance before syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance before syncing: {} sats", balance.total()); - print!("Syncing..."); - // Scanning the blockchain - let esplora_url = "https://mempool.space/testnet/api"; - let client = esplora_client::Builder::new(esplora_url).build_async()?; - let checkpoints = wallet.checkpoints(); - let spks = wallet - .spks_of_all_keychains() - .into_iter() - .map(|(k, spks)| { - let mut first = true; - ( - k, - spks.inspect(move |(spk_i, _)| { - if first { - first = false; - print!("\nScanning keychain [{:?}]:", k); - } - print!(" {}", spk_i); - let _ = std::io::stdout().flush(); - }), - ) - }) - .collect(); - let update = client - .scan( - checkpoints, - spks, - std::iter::empty(), - std::iter::empty(), - STOP_GAP, - PARALLEL_REQUESTS, - ) - .await?; - println!(); - wallet.apply_update(update)?; - wallet.commit()?; + // print!("Syncing..."); + // // Scanning the blockchain + // let esplora_url = "https://mempool.space/testnet/api"; + // let client = esplora_client::Builder::new(esplora_url).build_async()?; + // let checkpoints = wallet.checkpoints(); + // let spks = wallet + // .spks_of_all_keychains() + // .into_iter() + // .map(|(k, spks)| { + // let mut first = true; + // ( + // k, + // spks.inspect(move |(spk_i, _)| { + // if first { + // first = false; + // print!("\nScanning keychain [{:?}]:", k); + // } + // print!(" {}", spk_i); + // let _ = std::io::stdout().flush(); + // }), + // ) + // }) + // .collect(); + // let update = client + // .scan( + // checkpoints, + // spks, + // std::iter::empty(), + // std::iter::empty(), + // STOP_GAP, + // PARALLEL_REQUESTS, + // ) + // .await?; + // println!(); + // wallet.apply_update(update)?; + // wallet.commit()?; - let balance = wallet.get_balance(); - println!("Wallet balance after syncing: {} sats", balance.total()); + // let balance = wallet.get_balance(); + // println!("Wallet balance after syncing: {} sats", balance.total()); - if balance.total() < SEND_AMOUNT { - println!( - "Please send at least {} sats to the receiving address", - SEND_AMOUNT - ); - std::process::exit(0); - } + // if balance.total() < SEND_AMOUNT { + // println!( + // "Please send at least {} sats to the receiving address", + // SEND_AMOUNT + // ); + // std::process::exit(0); + // } - let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + // let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; - let mut tx_builder = wallet.build_tx(); - tx_builder - .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) - .enable_rbf(); + // let mut tx_builder = wallet.build_tx(); + // tx_builder + // .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + // .enable_rbf(); - let (mut psbt, _) = tx_builder.finish()?; - let finalized = wallet.sign(&mut psbt, SignOptions::default())?; - assert!(finalized); + // let (mut psbt, _) = tx_builder.finish()?; + // let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + // assert!(finalized); - let tx = psbt.extract_tx(); - client.broadcast(&tx).await?; - println!("Tx broadcasted! Txid: {}", tx.txid()); + // let tx = psbt.extract_tx(); + // client.broadcast(&tx).await?; + // println!("Tx broadcasted! Txid: {}", tx.txid()); - Ok(()) + // Ok(()) } From aba88130d91b329d8637c450c84fd10af508bdac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Thu, 11 May 2023 18:46:41 +0800 Subject: [PATCH 02/17] [wallet_redesign] Move the majority of `Update` to `bdk_chain` This is to make it easier for chain source crates to formulate updates. --- crates/bdk/src/wallet/mod.rs | 13 +++---------- crates/chain/src/keychain.rs | 23 +++++++++++++++++++++++ 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index c894d6ba..2d2a70f9 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -22,7 +22,7 @@ use alloc::{ pub use bdk_chain::keychain::Balance; use bdk_chain::{ indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, - keychain::{DerivationAdditions, KeychainTxOutIndex}, + keychain::{DerivationAdditions, KeychainTxOutIndex, LocalUpdate}, local_chain::{self, LocalChain, UpdateNotConnectedError}, tx_graph::{CanonicalTx, TxGraph}, Anchor, Append, BlockId, ConfirmationTime, ConfirmationTimeAnchor, FullTxOut, ObservedAs, @@ -94,21 +94,14 @@ pub struct Wallet { } /// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources. -/// The type parameter `T` indicates the kind of transaction contained in the update. It's usually a [`bitcoin::Transaction`]. -#[derive(Debug, Default, PartialEq)] -pub struct Update { - keychain: BTreeMap, - graph: TxGraph, - chain: LocalChain, -} +pub type Update = LocalUpdate; -/// The changeset produced internally by applying an update +/// The changeset produced internally by applying an update. #[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)] #[serde(bound( deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>", serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize" ))] -// #[cfg_attr(predicate, attr)] pub struct ChangeSet { pub chain_changeset: local_chain::ChangeSet, pub indexed_additions: IndexedAdditions>, diff --git a/crates/chain/src/keychain.rs b/crates/chain/src/keychain.rs index f4d398ab..0f108b2d 100644 --- a/crates/chain/src/keychain.rs +++ b/crates/chain/src/keychain.rs @@ -18,6 +18,7 @@ use crate::{ chain_graph::{self, ChainGraph}, collections::BTreeMap, + local_chain::LocalChain, sparse_chain::ChainPosition, tx_graph::TxGraph, Append, ForEachTxOut, @@ -102,6 +103,28 @@ impl AsRef> for DerivationAdditions { } } +/// A structure to update [`KeychainTxOutIndex`], [`TxGraph`] and [`LocalChain`] +/// atomically. +#[derive(Debug, Clone, PartialEq)] +pub struct LocalUpdate { + /// Last active derivation index per keychain (`K`). + pub keychain: BTreeMap, + /// Update for the [`TxGraph`]. + pub graph: TxGraph, + /// Update for the [`LocalChain`]. + pub chain: LocalChain, +} + +impl Default for LocalUpdate { + fn default() -> Self { + Self { + keychain: Default::default(), + graph: Default::default(), + chain: Default::default(), + } + } +} + #[derive(Clone, Debug, PartialEq)] /// An update that includes the last active indexes of each keychain. pub struct KeychainScan { From 7261669c097791ee2ff8c7da6754868732d02eb3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Thu, 11 May 2023 22:56:26 +0800 Subject: [PATCH 03/17] Add `last_seen` to the the `ConfirmationTime::Unconfirmed` variant This allows us to skip adding an extra input to `Wallet::insert_tx`. Also remove redundant logic. --- crates/bdk/src/wallet/coin_selection.rs | 20 ++++--- crates/bdk/src/wallet/export.rs | 1 - crates/bdk/src/wallet/mod.rs | 58 ++++++++------------- crates/bdk/src/wallet/tx_builder.rs | 2 +- crates/bdk/tests/common.rs | 1 - crates/bdk/tests/wallet.rs | 55 +++++++++---------- crates/chain/src/chain_data.rs | 10 ++-- crates/chain/tests/test_keychain_tracker.rs | 4 +- crates/electrum/src/lib.rs | 2 +- crates/esplora/src/lib.rs | 2 +- 10 files changed, 75 insertions(+), 80 deletions(-) diff --git a/crates/bdk/src/wallet/coin_selection.rs b/crates/bdk/src/wallet/coin_selection.rs index 373dbdc3..e7927cab 100644 --- a/crates/bdk/src/wallet/coin_selection.rs +++ b/crates/bdk/src/wallet/coin_selection.rs @@ -722,9 +722,13 @@ mod test { fn get_test_utxos() -> Vec { vec![ - utxo(100_000, 0, ConfirmationTime::Unconfirmed), - utxo(FEE_AMOUNT - 40, 1, ConfirmationTime::Unconfirmed), - utxo(200_000, 2, ConfirmationTime::Unconfirmed), + utxo(100_000, 0, ConfirmationTime::Unconfirmed { last_seen: 0 }), + utxo( + FEE_AMOUNT - 40, + 1, + ConfirmationTime::Unconfirmed { last_seen: 0 }, + ), + utxo(200_000, 2, ConfirmationTime::Unconfirmed { last_seen: 0 }), ] } @@ -780,7 +784,7 @@ mod test { time: rng.next_u64(), } } else { - ConfirmationTime::Unconfirmed + ConfirmationTime::Unconfirmed { last_seen: 0 } }, }), }); @@ -803,7 +807,7 @@ mod test { keychain: KeychainKind::External, is_spent: false, derivation_index: 42, - confirmation_time: ConfirmationTime::Unconfirmed, + confirmation_time: ConfirmationTime::Unconfirmed { last_seen: 0 }, }), }; vec![utxo; utxos_number] @@ -1091,7 +1095,11 @@ mod test { let required = vec![utxos[0].clone()]; let mut optional = utxos[1..].to_vec(); - optional.push(utxo(500_000, 3, ConfirmationTime::Unconfirmed)); + optional.push(utxo( + 500_000, + 3, + ConfirmationTime::Unconfirmed { last_seen: 0 }, + )); // Defensive assertions, for sanity and in case someone changes the test utxos vector. let amount: u64 = required.iter().map(|u| u.utxo.txout().value).sum(); diff --git a/crates/bdk/src/wallet/export.rs b/crates/bdk/src/wallet/export.rs index 36b75ea2..fe87fedd 100644 --- a/crates/bdk/src/wallet/export.rs +++ b/crates/bdk/src/wallet/export.rs @@ -247,7 +247,6 @@ mod test { height: 5000, time: 0, }, - None, ) .unwrap(); wallet diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index 2d2a70f9..5fabc6d1 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -504,9 +504,7 @@ impl Wallet { { let changeset = self.chain.insert_block(block_id)?; let changed = !changeset.is_empty(); - if changed { - self.persist.stage(changeset.into()); - } + self.persist.stage(changeset.into()); Ok(changed) } @@ -528,24 +526,13 @@ impl Wallet { &mut self, tx: Transaction, position: ConfirmationTime, - seen_at: Option, ) -> Result where D: PersistBackend, { let tip = self.chain.tip(); - if let ConfirmationTime::Confirmed { height, .. } = position { - let tip_height = tip.map(|b| b.height); - if Some(height) > tip_height { - return Err(InsertTxError::ConfirmationHeightCannotBeGreaterThanTip { - tip_height, - tx_height: height, - }); - } - } - - let anchor = match position { + let (anchor, last_seen) = match position { ConfirmationTime::Confirmed { height, time } => { let tip_height = tip.map(|b| b.height); if Some(height) > tip_height { @@ -554,20 +541,21 @@ impl Wallet { tx_height: height, }); } - Some(ConfirmationTimeAnchor { - anchor_block: tip.expect("already checked if tip_height > height"), - confirmation_height: height, - confirmation_time: time, - }) + ( + Some(ConfirmationTimeAnchor { + anchor_block: tip.expect("already checked if tip_height > height"), + confirmation_height: height, + confirmation_time: time, + }), + None, + ) } - ConfirmationTime::Unconfirmed => None, + ConfirmationTime::Unconfirmed { last_seen } => (None, Some(last_seen)), }; - let changeset: ChangeSet = self.indexed_graph.insert_tx(&tx, anchor, seen_at).into(); + let changeset: ChangeSet = self.indexed_graph.insert_tx(&tx, anchor, last_seen).into(); let changed = !changeset.is_empty(); - if changed { - self.persist.stage(changeset); - } + self.persist.stage(changeset); Ok(changed) } @@ -1032,7 +1020,7 @@ impl Wallet { let transaction_details = TransactionDetails { transaction: None, txid, - confirmation_time: ConfirmationTime::Unconfirmed, + confirmation_time: ConfirmationTime::Unconfirmed { last_seen: 0 }, received, sent, fee: Some(fee_amount), @@ -1541,7 +1529,7 @@ impl Wallet { spendable &= (current_height.saturating_sub(height)) >= COINBASE_MATURITY; } - ConfirmationTime::Unconfirmed => spendable = false, + ConfirmationTime::Unconfirmed { .. } => spendable = false, } } } @@ -1771,9 +1759,7 @@ impl Wallet { changeset.append(self.indexed_graph.apply_update(update.graph).into()); let changed = !changeset.is_empty(); - if changed { - self.persist.stage(changeset); - } + self.persist.stage(changeset); Ok(changed) } @@ -1797,16 +1783,18 @@ impl Wallet { self.persist.staged() } - /// Get a reference to the inner [`TxGraph`](bdk_chain::tx_graph::TxGraph). - pub fn as_graph(&self) -> &TxGraph { + /// Get a reference to the inner [`TxGraph`]. + pub fn tx_graph(&self) -> &TxGraph { self.indexed_graph.graph() } - pub fn as_index(&self) -> &KeychainTxOutIndex { + /// Get a reference to the inner [`KeychainTxOutIndex`]. + pub fn spk_index(&self) -> &KeychainTxOutIndex { &self.indexed_graph.index } - pub fn as_chain(&self) -> &LocalChain { + /// Get a reference to the inner [`LocalChain`]. + pub fn local_chain(&self) -> &LocalChain { &self.chain } } @@ -1949,7 +1937,7 @@ macro_rules! doctest_wallet { let _ = wallet.insert_tx(tx.clone(), ConfirmationTime::Confirmed { height: 500, time: 50_000 - }, None); + }); wallet }} diff --git a/crates/bdk/src/wallet/tx_builder.rs b/crates/bdk/src/wallet/tx_builder.rs index 5d106260..165f01f2 100644 --- a/crates/bdk/src/wallet/tx_builder.rs +++ b/crates/bdk/src/wallet/tx_builder.rs @@ -884,7 +884,7 @@ mod test { txout: Default::default(), keychain: KeychainKind::External, is_spent: false, - confirmation_time: ConfirmationTime::Unconfirmed, + confirmation_time: ConfirmationTime::Unconfirmed { last_seen: 0 }, derivation_index: 0, }, LocalUtxo { diff --git a/crates/bdk/tests/common.rs b/crates/bdk/tests/common.rs index cbf74f24..de946703 100644 --- a/crates/bdk/tests/common.rs +++ b/crates/bdk/tests/common.rs @@ -35,7 +35,6 @@ pub fn get_funded_wallet_with_change( height: 1_000, time: 100, }, - None, ) .unwrap(); diff --git a/crates/bdk/tests/wallet.rs b/crates/bdk/tests/wallet.rs index 6291df1d..c5bf8e16 100644 --- a/crates/bdk/tests/wallet.rs +++ b/crates/bdk/tests/wallet.rs @@ -42,9 +42,8 @@ fn receive_output(wallet: &mut Wallet, value: u64, height: TxHeight) -> OutPoint height, time: 42_000, }, - TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed, + TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed { last_seen: 0 }, }, - None, ) .unwrap(); @@ -812,7 +811,10 @@ fn test_create_tx_add_utxo() { lock_time: PackedLockTime(0), }; wallet - .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed, None) + .insert_tx( + small_output_tx.clone(), + ConfirmationTime::Unconfirmed { last_seen: 0 }, + ) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -849,7 +851,10 @@ fn test_create_tx_manually_selected_insufficient() { }; wallet - .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed, None) + .insert_tx( + small_output_tx.clone(), + ConfirmationTime::Unconfirmed { last_seen: 0 }, + ) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -891,7 +896,7 @@ fn test_create_tx_policy_path_no_csv() { }], }; wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let external_policy = wallet.policies(KeychainKind::External).unwrap().unwrap(); @@ -1218,7 +1223,7 @@ fn test_bump_fee_irreplaceable_tx() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); wallet.build_fee_bump(txid).unwrap().finish().unwrap(); } @@ -1242,7 +1247,6 @@ fn test_bump_fee_confirmed_tx() { height: 42, time: 42_000, }, - None, ) .unwrap(); @@ -1264,7 +1268,7 @@ fn test_bump_fee_low_fee_rate() { let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1287,7 +1291,7 @@ fn test_bump_fee_low_abs() { let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1309,7 +1313,7 @@ fn test_bump_fee_zero_abs() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1329,7 +1333,7 @@ fn test_bump_fee_reduce_change() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1416,7 +1420,7 @@ fn test_bump_fee_reduce_single_recipient() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1449,7 +1453,7 @@ fn test_bump_fee_absolute_reduce_single_recipient() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1489,7 +1493,6 @@ fn test_bump_fee_drain_wallet() { height: wallet.latest_checkpoint().unwrap().height, time: 42_000, }, - None, ) .unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); @@ -1508,7 +1511,7 @@ fn test_bump_fee_drain_wallet() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); assert_eq!(original_details.sent, 25_000); @@ -1553,7 +1556,6 @@ fn test_bump_fee_remove_output_manually_selected_only() { .observed_as .cloned() .into(), - None, ) .unwrap(); let outpoint = OutPoint { @@ -1572,7 +1574,7 @@ fn test_bump_fee_remove_output_manually_selected_only() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); assert_eq!(original_details.sent, 25_000); @@ -1602,7 +1604,7 @@ fn test_bump_fee_add_input() { .observed_as .cloned() .into(); - wallet.insert_tx(init_tx, pos, None).unwrap(); + wallet.insert_tx(init_tx, pos).unwrap(); let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); let mut builder = wallet.build_tx().coin_selection(LargestFirstCoinSelection); @@ -1613,7 +1615,7 @@ fn test_bump_fee_add_input() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1659,7 +1661,7 @@ fn test_bump_fee_absolute_add_input() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1711,7 +1713,7 @@ fn test_bump_fee_no_change_add_input_and_change() { let tx = psbt.extract_tx(); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); // now bump the fees without using `allow_shrinking`. the wallet should add an @@ -1769,7 +1771,7 @@ fn test_bump_fee_add_input_change_dust() { assert_eq!(tx.output.len(), 2); let txid = tx.txid(); wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -1830,7 +1832,7 @@ fn test_bump_fee_force_add_input() { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } wallet - .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed, None) + .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); // the new fee_rate is low enough that just reducing the change would be fine, but we force // the addition of an extra input with `add_utxo()` @@ -1885,7 +1887,7 @@ fn test_bump_fee_absolute_force_add_input() { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } wallet - .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed, None) + .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); // the new fee_rate is low enough that just reducing the change would be fine, but we force @@ -1946,7 +1948,7 @@ fn test_bump_fee_unconfirmed_inputs_only() { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); builder.fee_rate(FeeRate::from_sat_per_vb(25.0)); @@ -1977,7 +1979,7 @@ fn test_bump_fee_unconfirmed_input() { txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature } wallet - .insert_tx(tx, ConfirmationTime::Unconfirmed, None) + .insert_tx(tx, ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); let mut builder = wallet.build_fee_bump(txid).unwrap(); @@ -3072,7 +3074,6 @@ fn test_spend_coinbase() { height: confirmation_height, time: 30_000, }, - None, ) .unwrap(); diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index 9baa643e..022e1299 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -99,14 +99,14 @@ impl TxHeight { )] pub enum ConfirmationTime { Confirmed { height: u32, time: u64 }, - Unconfirmed, + Unconfirmed { last_seen: u64 }, } impl sparse_chain::ChainPosition for ConfirmationTime { fn height(&self) -> TxHeight { match self { ConfirmationTime::Confirmed { height, .. } => TxHeight::Confirmed(*height), - ConfirmationTime::Unconfirmed => TxHeight::Unconfirmed, + ConfirmationTime::Unconfirmed { .. } => TxHeight::Unconfirmed, } } @@ -116,7 +116,7 @@ impl sparse_chain::ChainPosition for ConfirmationTime { height, time: u64::MAX, }, - TxHeight::Unconfirmed => Self::Unconfirmed, + TxHeight::Unconfirmed => Self::Unconfirmed { last_seen: 0 }, } } @@ -126,7 +126,7 @@ impl sparse_chain::ChainPosition for ConfirmationTime { height, time: u64::MIN, }, - TxHeight::Unconfirmed => Self::Unconfirmed, + TxHeight::Unconfirmed => Self::Unconfirmed { last_seen: 0 }, } } } @@ -144,7 +144,7 @@ impl From> for ConfirmationTime { height: a.confirmation_height, time: a.confirmation_time, }, - ObservedAs::Unconfirmed(_) => Self::Unconfirmed, + ObservedAs::Unconfirmed(_) => Self::Unconfirmed { last_seen: 0 }, } } } diff --git a/crates/chain/tests/test_keychain_tracker.rs b/crates/chain/tests/test_keychain_tracker.rs index bd8c6e03..fe725ea4 100644 --- a/crates/chain/tests/test_keychain_tracker.rs +++ b/crates/chain/tests/test_keychain_tracker.rs @@ -33,7 +33,7 @@ fn test_insert_tx() { let _ = tracker.txout_index.reveal_to_target(&(), 5); let changeset = tracker - .insert_tx_preview(tx.clone(), ConfirmationTime::Unconfirmed) + .insert_tx_preview(tx.clone(), ConfirmationTime::Unconfirmed { last_seen: 0 }) .unwrap(); tracker.apply_changeset(changeset); assert_eq!( @@ -41,7 +41,7 @@ fn test_insert_tx() { .chain_graph() .transactions_in_chain() .collect::>(), - vec![(&ConfirmationTime::Unconfirmed, &tx,)] + vec![(&ConfirmationTime::Unconfirmed { last_seen: 0 }, &tx,)] ); assert_eq!( diff --git a/crates/electrum/src/lib.rs b/crates/electrum/src/lib.rs index bddbd8f2..6d352ca1 100644 --- a/crates/electrum/src/lib.rs +++ b/crates/electrum/src/lib.rs @@ -296,7 +296,7 @@ impl ElectrumUpdate { height, time: height_to_time[&height], }, - TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed, + TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed { last_seen: 0 }, }; let _ = new_update.insert_tx(txid, conf_time).expect("must insert"); } diff --git a/crates/esplora/src/lib.rs b/crates/esplora/src/lib.rs index 8398fcb3..a6af0fad 100644 --- a/crates/esplora/src/lib.rs +++ b/crates/esplora/src/lib.rs @@ -22,6 +22,6 @@ pub(crate) fn map_confirmation_time( (Some(time), Some(height)) if height <= height_at_start => { ConfirmationTime::Confirmed { height, time } } - _ => ConfirmationTime::Unconfirmed, + _ => ConfirmationTime::Unconfirmed { last_seen: 0 }, } } From 2e3cee4bd0568073e42e5670476febddd85a7b36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Fri, 12 May 2023 00:08:16 +0800 Subject: [PATCH 04/17] [electrum_redesign] Introduce redesigned `ElectrumExt` There are a number of improvements that can be done, but it is in a decent state to be usable. Possible improvements: * Remove requirement to retry obtaining ALL data after reorg is detected. Transactions can be anchored to a lower block (not block tip), and an `assume_final_depth` value can be used. * The logic to finalize an update with confirmation time can be improved during reorgs to not require returning an error. --- crates/electrum/src/lib.rs | 15 +- crates/electrum/src/v2.rs | 507 +++++++++++++++++++++++++++++++++++++ 2 files changed, 515 insertions(+), 7 deletions(-) create mode 100644 crates/electrum/src/v2.rs diff --git a/crates/electrum/src/lib.rs b/crates/electrum/src/lib.rs index 6d352ca1..051b6375 100644 --- a/crates/electrum/src/lib.rs +++ b/crates/electrum/src/lib.rs @@ -20,12 +20,6 @@ //! [`batch_transaction_get`]: ElectrumApi::batch_transaction_get //! [`bdk_electrum_example`]: https://github.com/LLFourn/bdk_core_staging/tree/master/bdk_electrum_example -use std::{ - collections::{BTreeMap, HashMap}, - fmt::Debug, -}; - -pub use bdk_chain; use bdk_chain::{ bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid}, chain_graph::{self, ChainGraph}, @@ -34,8 +28,15 @@ use bdk_chain::{ tx_graph::TxGraph, BlockId, ConfirmationTime, TxHeight, }; -pub use electrum_client; use electrum_client::{Client, ElectrumApi, Error}; +use std::{ + collections::{BTreeMap, HashMap}, + fmt::Debug, +}; + +pub mod v2; +pub use bdk_chain; +pub use electrum_client; /// Trait to extend [`electrum_client::Client`] functionality. /// diff --git a/crates/electrum/src/v2.rs b/crates/electrum/src/v2.rs new file mode 100644 index 00000000..6a942a1f --- /dev/null +++ b/crates/electrum/src/v2.rs @@ -0,0 +1,507 @@ +use bdk_chain::{ + bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid}, + keychain::LocalUpdate, + local_chain::LocalChain, + tx_graph::{self, TxGraph}, + Anchor, BlockId, ConfirmationHeightAnchor, ConfirmationTimeAnchor, +}; +use electrum_client::{Client, ElectrumApi, Error}; +use std::{ + collections::{BTreeMap, BTreeSet, HashMap, HashSet}, + fmt::Debug, +}; + +use crate::InternalError; + +#[derive(Debug, Clone)] +pub struct ElectrumUpdate { + pub graph_update: HashMap>, + pub chain_update: LocalChain, + pub keychain_update: BTreeMap, +} + +impl Default for ElectrumUpdate { + fn default() -> Self { + Self { + graph_update: Default::default(), + chain_update: Default::default(), + keychain_update: Default::default(), + } + } +} + +impl<'a, K, A: Anchor> ElectrumUpdate { + pub fn missing_full_txs( + &'a self, + graph: &'a TxGraph, + ) -> impl Iterator + 'a { + self.graph_update + .keys() + .filter(move |&&txid| graph.as_ref().get_tx(txid).is_none()) + } + + pub fn finalize(self, seen_at: Option, new_txs: T) -> LocalUpdate + where + T: IntoIterator, + { + let mut graph_update = TxGraph::::new(new_txs); + for (txid, anchors) in self.graph_update { + if let Some(seen_at) = seen_at { + let _ = graph_update.insert_seen_at(txid, seen_at); + } + for anchor in anchors { + let _ = graph_update.insert_anchor(txid, anchor); + } + } + dbg!(graph_update.full_txs().count()); + LocalUpdate { + keychain: self.keychain_update, + graph: graph_update, + chain: self.chain_update, + } + } +} + +impl ElectrumUpdate { + pub fn finalize_as_confirmation_time( + self, + client: &Client, + seen_at: Option, + new_txs: T, + ) -> Result, Error> + where + T: IntoIterator, + { + let update = self.finalize(seen_at, new_txs); + let update_tip = update.chain.tip().expect("must have tip"); + + let relevant_heights = { + let mut visited_heights = HashSet::new(); + update + .graph + .all_anchors() + .iter() + .map(|(a, _)| a.confirmation_height_upper_bound()) + .filter(move |&h| visited_heights.insert(h)) + .collect::>() + }; + + let height_to_time = relevant_heights + .clone() + .into_iter() + .zip( + client + .batch_block_header(relevant_heights)? + .into_iter() + .map(|bh| bh.time as u64), + ) + .collect::>(); + + if update_tip.hash != client.block_header(update_tip.height as _)?.block_hash() { + // [TODO] We should alter the logic so we won't have to return an error. This is to + // [TODO] ensure obtained block times are "anchored" to our tip. If we exclude this, it + // [TODO] should be "safe" as well. Tx confirmation times would just slightly vary. + return Err(Error::Message(format!( + "tip changed during update: update_tip={:?}", + update_tip + ))); + } + + let graph_additions = { + let old_additions = TxGraph::default().determine_additions(&update.graph); + tx_graph::Additions { + tx: old_additions.tx, + txout: old_additions.txout, + last_seen: old_additions.last_seen, + anchors: old_additions + .anchors + .into_iter() + .map(|(height_anchor, txid)| { + let confirmation_height = dbg!(height_anchor.confirmation_height); + let confirmation_time = height_to_time[&confirmation_height]; + let time_anchor = ConfirmationTimeAnchor { + anchor_block: height_anchor.anchor_block, + confirmation_height, + confirmation_time, + }; + (time_anchor, txid) + }) + .collect(), + } + }; + + Ok(LocalUpdate { + keychain: update.keychain, + graph: { + let mut graph = TxGraph::default(); + graph.apply_additions(graph_additions); + graph + }, + chain: update.chain, + }) + } +} + +pub trait ElectrumExt { + fn get_tip(&self) -> Result<(u32, BlockHash), Error>; + + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + batch_size: usize, + ) -> Result, Error>; + + fn scan_without_keychain( + &self, + local_chain: &BTreeMap, + misc_spks: impl IntoIterator, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + batch_size: usize, + ) -> Result, Error> { + let spk_iter = misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)); + + self.scan( + local_chain, + [((), spk_iter)].into(), + txids, + outpoints, + usize::MAX, + batch_size, + ) + } +} + +impl ElectrumExt for Client { + fn get_tip(&self) -> Result<(u32, BlockHash), Error> { + // TODO: unsubscribe when added to the client, or is there a better call to use here? + self.block_headers_subscribe() + .map(|data| (data.height as u32, data.header.block_hash())) + } + + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + batch_size: usize, + ) -> Result, Error> { + let mut request_spks = keychain_spks + .into_iter() + .map(|(k, s)| (k, s.into_iter())) + .collect::>(); + let mut scanned_spks = BTreeMap::<(K, u32), (Script, bool)>::new(); + + let txids = txids.into_iter().collect::>(); + let outpoints = outpoints.into_iter().collect::>(); + + let update = loop { + let mut update = ElectrumUpdate:: { + chain_update: prepare_chain_update(self, local_chain)?, + ..Default::default() + }; + let anchor_block = update + .chain_update + .tip() + .expect("must have atleast one block"); + + if !request_spks.is_empty() { + if !scanned_spks.is_empty() { + let mut scanned_spk_iter = scanned_spks + .iter() + .map(|(i, (spk, _))| (i.clone(), spk.clone())); + match populate_with_spks( + self, + anchor_block, + &mut update, + &mut scanned_spk_iter, + stop_gap, + batch_size, + ) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(mut spks) => scanned_spks.append(&mut spks), + }; + } + for (keychain, keychain_spks) in &mut request_spks { + match populate_with_spks( + self, + anchor_block, + &mut update, + keychain_spks, + stop_gap, + batch_size, + ) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(spks) => scanned_spks.extend( + spks.into_iter() + .map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)), + ), + }; + } + } + + match populate_with_txids(self, anchor_block, &mut update, &mut txids.iter().cloned()) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(_) => {} + } + + match populate_with_outpoints( + self, + anchor_block, + &mut update, + &mut outpoints.iter().cloned(), + ) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ } + } + + // check for reorgs during scan process + let server_blockhash = self + .block_header(anchor_block.height as usize)? + .block_hash(); + if anchor_block.hash != server_blockhash { + continue; // reorg + } + + update.keychain_update = request_spks + .into_keys() + .filter_map(|k| { + scanned_spks + .range((k.clone(), u32::MIN)..=(k.clone(), u32::MAX)) + .rev() + .find(|(_, (_, active))| *active) + .map(|((_, i), _)| (k, *i)) + }) + .collect::>(); + break update; + }; + + Ok(update) + } +} + +/// Prepare an update "template" based on the checkpoints of the `local_chain`. +fn prepare_chain_update( + client: &Client, + local_chain: &BTreeMap, +) -> Result { + let mut update = LocalChain::default(); + + // Find the local chain block that is still there so our update can connect to the local chain. + for (&existing_height, &existing_hash) in local_chain.iter().rev() { + // TODO: a batch request may be safer, as a reorg that happens when we are obtaining + // `block_header`s will result in inconsistencies + let current_hash = client.block_header(existing_height as usize)?.block_hash(); + let _ = update + .insert_block(BlockId { + height: existing_height, + hash: current_hash, + }) + .expect("This never errors because we are working with a fresh chain"); + + if current_hash == existing_hash { + break; + } + } + + // Insert the new tip so new transactions will be accepted into the sparsechain. + let tip = { + let (height, hash) = crate::get_tip(client)?; + BlockId { height, hash } + }; + if update.insert_block(tip).is_err() { + // There has been a re-org before we even begin scanning addresses. + // Just recursively call (this should never happen). + return prepare_chain_update(client, local_chain); + } + + Ok(update) +} + +fn determine_tx_anchor( + anchor_block: BlockId, + raw_height: i32, + txid: Txid, +) -> Option { + if txid + == Txid::from_hex("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b") + .expect("must deserialize genesis coinbase txid") + { + return Some(ConfirmationHeightAnchor { + anchor_block, + confirmation_height: 0, + }); + } + match raw_height { + h if h <= 0 => { + debug_assert!(h == 0 || h == -1, "unexpected height ({}) from electrum", h); + None + } + h => { + let h = h as u32; + if h > anchor_block.height { + None + } else { + Some(ConfirmationHeightAnchor { + anchor_block, + confirmation_height: h, + }) + } + } + } +} + +fn populate_with_outpoints( + client: &Client, + anchor_block: BlockId, + update: &mut ElectrumUpdate, + outpoints: &mut impl Iterator, +) -> Result, InternalError> { + let mut full_txs = HashMap::new(); + for outpoint in outpoints { + let txid = outpoint.txid; + let tx = client.transaction_get(&txid)?; + debug_assert_eq!(tx.txid(), txid); + let txout = match tx.output.get(outpoint.vout as usize) { + Some(txout) => txout, + None => continue, + }; + // attempt to find the following transactions (alongside their chain positions), and + // add to our sparsechain `update`: + let mut has_residing = false; // tx in which the outpoint resides + let mut has_spending = false; // tx that spends the outpoint + for res in client.script_get_history(&txout.script_pubkey)? { + if has_residing && has_spending { + break; + } + + if res.tx_hash == txid { + if has_residing { + continue; + } + has_residing = true; + full_txs.insert(res.tx_hash, tx.clone()); + } else { + if has_spending { + continue; + } + let res_tx = match full_txs.get(&res.tx_hash) { + Some(tx) => tx, + None => { + let res_tx = client.transaction_get(&res.tx_hash)?; + full_txs.insert(res.tx_hash, res_tx); + full_txs.get(&res.tx_hash).expect("just inserted") + } + }; + has_spending = res_tx + .input + .iter() + .any(|txin| txin.previous_output == outpoint); + if !has_spending { + continue; + } + }; + + let anchor = determine_tx_anchor(anchor_block, res.height, res.tx_hash); + + let tx_entry = update.graph_update.entry(res.tx_hash).or_default(); + if let Some(anchor) = anchor { + tx_entry.insert(anchor); + } + } + } + Ok(full_txs) +} + +fn populate_with_txids( + client: &Client, + anchor_block: BlockId, + update: &mut ElectrumUpdate, + txids: &mut impl Iterator, +) -> Result<(), InternalError> { + for txid in txids { + let tx = match client.transaction_get(&txid) { + Ok(tx) => tx, + Err(electrum_client::Error::Protocol(_)) => continue, + Err(other_err) => return Err(other_err.into()), + }; + + let spk = tx + .output + .get(0) + .map(|txo| &txo.script_pubkey) + .expect("tx must have an output"); + + let anchor = match client + .script_get_history(spk)? + .into_iter() + .find(|r| r.tx_hash == txid) + { + Some(r) => determine_tx_anchor(anchor_block, r.height, txid), + None => continue, + }; + + let tx_entry = update.graph_update.entry(txid).or_default(); + if let Some(anchor) = anchor { + tx_entry.insert(anchor); + } + } + Ok(()) +} + +fn populate_with_spks( + client: &Client, + anchor_block: BlockId, + update: &mut ElectrumUpdate, + spks: &mut impl Iterator, + stop_gap: usize, + batch_size: usize, +) -> Result, InternalError> { + let mut unused_spk_count = 0_usize; + let mut scanned_spks = BTreeMap::new(); + + loop { + let spks = (0..batch_size) + .map_while(|_| spks.next()) + .collect::>(); + if spks.is_empty() { + return Ok(scanned_spks); + } + + let spk_histories = client.batch_script_get_history(spks.iter().map(|(_, s)| s))?; + + for ((spk_index, spk), spk_history) in spks.into_iter().zip(spk_histories) { + if spk_history.is_empty() { + scanned_spks.insert(spk_index, (spk, false)); + unused_spk_count += 1; + if unused_spk_count > stop_gap { + return Ok(scanned_spks); + } + continue; + } else { + scanned_spks.insert(spk_index, (spk, true)); + unused_spk_count = 0; + } + + for tx in spk_history { + let tx_entry = update.graph_update.entry(tx.tx_hash).or_default(); + if let Some(anchor) = determine_tx_anchor(anchor_block, tx.height, tx.tx_hash) { + tx_entry.insert(anchor); + } + } + } + } +} From f55974a64bd0f5f2ef9e95831c2fb5d4f92f8282 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Fri, 12 May 2023 16:17:17 +0800 Subject: [PATCH 05/17] [examples_redesign] Introduce `example_cli` package This is the equivalent of `keychain_tracker_example_cli` that works with the redesigned structures. --- Cargo.toml | 1 + crates/chain/src/chain_data.rs | 7 + crates/chain/src/chain_oracle.rs | 3 + crates/chain/src/indexed_tx_graph.rs | 9 + crates/chain/src/local_chain.rs | 4 + crates/electrum/src/lib.rs | 7 +- example-crates/example_cli/Cargo.toml | 17 + example-crates/example_cli/src/lib.rs | 775 ++++++++++++++++++++++++++ 8 files changed, 819 insertions(+), 4 deletions(-) create mode 100644 example-crates/example_cli/Cargo.toml create mode 100644 example-crates/example_cli/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index 2104196b..4d0f4f4d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,6 +4,7 @@ members = [ "crates/chain", "crates/file_store", "crates/electrum", + "example-crates/example_cli", "example-crates/keychain_tracker_electrum", "example-crates/keychain_tracker_esplora", "example-crates/keychain_tracker_example_cli", diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index 022e1299..d1234298 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -16,6 +16,13 @@ pub enum ObservedAs { Unconfirmed(u64), } +impl ObservedAs { + /// Returns whether [`ObservedAs`] is confirmed or not. + pub fn is_confirmed(&self) -> bool { + matches!(self, Self::Confirmed(_)) + } +} + impl ObservedAs<&A> { pub fn cloned(self) -> ObservedAs { match self { diff --git a/crates/chain/src/chain_oracle.rs b/crates/chain/src/chain_oracle.rs index 58fbf6c1..e736be03 100644 --- a/crates/chain/src/chain_oracle.rs +++ b/crates/chain/src/chain_oracle.rs @@ -19,4 +19,7 @@ pub trait ChainOracle { block: BlockId, chain_tip: BlockId, ) -> Result, Self::Error>; + + /// Get the best chain's chain tip. + fn get_chain_tip(&self) -> Result, Self::Error>; } diff --git a/crates/chain/src/indexed_tx_graph.rs b/crates/chain/src/indexed_tx_graph.rs index f69b227a..24a1884c 100644 --- a/crates/chain/src/indexed_tx_graph.rs +++ b/crates/chain/src/indexed_tx_graph.rs @@ -203,6 +203,15 @@ impl Append for IndexedAdditions { } } +impl From> for IndexedAdditions { + fn from(graph_additions: Additions) -> Self { + Self { + graph_additions, + ..Default::default() + } + } +} + /// Represents a structure that can index transaction data. pub trait Indexer { /// The resultant "additions" when new transaction data is indexed. diff --git a/crates/chain/src/local_chain.rs b/crates/chain/src/local_chain.rs index a32a615c..7623b294 100644 --- a/crates/chain/src/local_chain.rs +++ b/crates/chain/src/local_chain.rs @@ -34,6 +34,10 @@ impl ChainOracle for LocalChain { }, ) } + + fn get_chain_tip(&self) -> Result, Self::Error> { + Ok(self.tip()) + } } impl AsRef> for LocalChain { diff --git a/crates/electrum/src/lib.rs b/crates/electrum/src/lib.rs index 051b6375..df5e1d74 100644 --- a/crates/electrum/src/lib.rs +++ b/crates/electrum/src/lib.rs @@ -130,7 +130,7 @@ impl ElectrumExt for Client { let mut scanned_spk_iter = scanned_spks .iter() .map(|(i, (spk, _))| (i.clone(), spk.clone())); - match populate_with_spks::( + match populate_with_spks::<_, _>( self, &mut update, &mut scanned_spk_iter, @@ -143,7 +143,7 @@ impl ElectrumExt for Client { }; } for (keychain, keychain_spks) in &mut request_spks { - match populate_with_spks::( + match populate_with_spks::( self, &mut update, keychain_spks, @@ -529,7 +529,7 @@ fn populate_with_txids( /// Populate an update [`SparseChain`] with transactions (and associated block positions) from /// the transaction history of the provided `spk`s. -fn populate_with_spks( +fn populate_with_spks( client: &Client, update: &mut SparseChain, spks: &mut S, @@ -537,7 +537,6 @@ fn populate_with_spks( batch_size: usize, ) -> Result, InternalError> where - K: Ord + Clone, I: Ord + Clone, S: Iterator, { diff --git a/example-crates/example_cli/Cargo.toml b/example-crates/example_cli/Cargo.toml new file mode 100644 index 00000000..ffad2f91 --- /dev/null +++ b/example-crates/example_cli/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "example_cli" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../../crates/chain", features = ["serde", "miniscript"]} +bdk_file_store = { path = "../../crates/file_store" } +bdk_tmp_plan = { path = "../../nursery/tmp_plan" } +bdk_coin_select = { path = "../../nursery/coin_select" } + +clap = { version = "3.2.23", features = ["derive", "env"] } +anyhow = "1" +serde = { version = "1", features = ["derive"] } +serde_json = { version = "^1.0" } diff --git a/example-crates/example_cli/src/lib.rs b/example-crates/example_cli/src/lib.rs new file mode 100644 index 00000000..30be503f --- /dev/null +++ b/example-crates/example_cli/src/lib.rs @@ -0,0 +1,775 @@ +pub use anyhow; +use anyhow::Context; +use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue}; +use bdk_file_store::Store; +use serde::{de::DeserializeOwned, Serialize}; +use std::{cmp::Reverse, collections::HashMap, path::PathBuf, sync::Mutex, time::Duration}; + +use bdk_chain::{ + bitcoin::{ + psbt::Prevouts, + secp256k1::{self, Secp256k1}, + util::sighash::SighashCache, + Address, LockTime, Network, Script, Sequence, Transaction, TxIn, TxOut, + }, + indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, + keychain::{DerivationAdditions, KeychainTxOutIndex}, + miniscript::{ + descriptor::{DescriptorSecretKey, KeyMap}, + Descriptor, DescriptorPublicKey, + }, + Anchor, Append, ChainOracle, DescriptorExt, FullTxOut, ObservedAs, Persist, PersistBackend, +}; +pub use bdk_file_store; +pub use clap; + +use clap::{Parser, Subcommand}; + +pub type KeychainTxGraph = IndexedTxGraph>; +pub type Database<'m, A, X> = Persist>, ChangeSet>; + +#[derive(Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)] +#[serde(bound( + deserialize = "A: Ord + serde::Deserialize<'de>, X: serde::Deserialize<'de>", + serialize = "A: Ord + serde::Serialize, X: serde::Serialize", +))] +pub struct ChangeSet { + pub indexed_additions: IndexedAdditions>, + pub extension: X, +} + +impl Default for ChangeSet { + fn default() -> Self { + Self { + indexed_additions: Default::default(), + extension: Default::default(), + } + } +} + +impl Append for ChangeSet { + fn append(&mut self, other: Self) { + Append::append(&mut self.indexed_additions, other.indexed_additions); + Append::append(&mut self.extension, other.extension) + } + + fn is_empty(&self) -> bool { + todo!() + } +} + +#[derive(Parser)] +#[clap(author, version, about, long_about = None)] +#[clap(propagate_version = true)] +pub struct Args { + #[clap(env = "DESCRIPTOR")] + pub descriptor: String, + #[clap(env = "CHANGE_DESCRIPTOR")] + pub change_descriptor: Option, + + #[clap(env = "BITCOIN_NETWORK", long, default_value = "signet")] + pub network: Network, + + #[clap(env = "BDK_DB_PATH", long, default_value = ".bdk_example_db")] + pub db_path: PathBuf, + + #[clap(env = "BDK_CP_LIMIT", long, default_value = "20")] + pub cp_limit: usize, + + #[clap(subcommand)] + pub command: Commands, +} + +#[allow(clippy::almost_swapped)] +#[derive(Subcommand, Debug, Clone)] +pub enum Commands { + #[clap(flatten)] + ChainSpecific(C), + /// Address generation and inspection. + Address { + #[clap(subcommand)] + addr_cmd: AddressCmd, + }, + /// Get the wallet balance. + Balance, + /// TxOut related commands. + #[clap(name = "txout")] + TxOut { + #[clap(subcommand)] + txout_cmd: TxOutCmd, + }, + /// Send coins to an address. + Send { + value: u64, + address: Address, + #[clap(short, default_value = "largest-first")] + coin_select: CoinSelectionAlgo, + }, +} + +#[derive(Clone, Debug)] +pub enum CoinSelectionAlgo { + LargestFirst, + SmallestFirst, + OldestFirst, + NewestFirst, + BranchAndBound, +} + +impl Default for CoinSelectionAlgo { + fn default() -> Self { + Self::LargestFirst + } +} + +impl core::str::FromStr for CoinSelectionAlgo { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + use CoinSelectionAlgo::*; + Ok(match s { + "largest-first" => LargestFirst, + "smallest-first" => SmallestFirst, + "oldest-first" => OldestFirst, + "newest-first" => NewestFirst, + "bnb" => BranchAndBound, + unknown => { + return Err(anyhow::anyhow!( + "unknown coin selection algorithm '{}'", + unknown + )) + } + }) + } +} + +impl core::fmt::Display for CoinSelectionAlgo { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use CoinSelectionAlgo::*; + write!( + f, + "{}", + match self { + LargestFirst => "largest-first", + SmallestFirst => "smallest-first", + OldestFirst => "oldest-first", + NewestFirst => "newest-first", + BranchAndBound => "bnb", + } + ) + } +} + +#[allow(clippy::almost_swapped)] +#[derive(Subcommand, Debug, Clone)] +pub enum AddressCmd { + /// Get the next unused address. + Next, + /// Get a new address regardless of the existing unused addresses. + New, + /// List all addresses + List { + #[clap(long)] + change: bool, + }, + Index, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum TxOutCmd { + List { + /// Return only spent outputs. + #[clap(short, long)] + spent: bool, + /// Return only unspent outputs. + #[clap(short, long)] + unspent: bool, + /// Return only confirmed outputs. + #[clap(long)] + confirmed: bool, + /// Return only unconfirmed outputs. + #[clap(long)] + unconfirmed: bool, + }, +} + +#[derive( + Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, serde::Deserialize, serde::Serialize, +)] +pub enum Keychain { + External, + Internal, +} + +impl core::fmt::Display for Keychain { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Keychain::External => write!(f, "external"), + Keychain::Internal => write!(f, "internal"), + } + } +} + +pub fn run_address_cmd( + graph: &mut KeychainTxGraph, + db: &Mutex>, + network: Network, + cmd: AddressCmd, +) -> anyhow::Result<()> +where + ChangeSet: Default + Append + DeserializeOwned + Serialize, +{ + let process_spk = |spk_i: u32, spk: &Script, index_additions: DerivationAdditions| { + if !index_additions.is_empty() { + let db = &mut *db.lock().unwrap(); + db.stage(ChangeSet { + indexed_additions: IndexedAdditions { + index_additions, + ..Default::default() + }, + ..Default::default() + }); + db.commit()?; + } + let addr = Address::from_script(spk, network).context("failed to derive address")?; + println!("[address @ {}] {}", spk_i, addr); + Ok(()) + }; + + let index = &mut graph.index; + + match cmd { + AddressCmd::Next => { + let ((spk_i, spk), index_additions) = index.next_unused_spk(&Keychain::External); + process_spk(spk_i, spk, index_additions) + } + AddressCmd::New => { + let ((spk_i, spk), index_additions) = index.reveal_next_spk(&Keychain::External); + process_spk(spk_i, spk, index_additions) + } + AddressCmd::Index => { + for (keychain, derivation_index) in index.last_revealed_indices() { + println!("{:?}: {}", keychain, derivation_index); + } + Ok(()) + } + AddressCmd::List { change } => { + let target_keychain = match change { + true => Keychain::Internal, + false => Keychain::External, + }; + for (spk_i, spk) in index.revealed_spks_of_keychain(&target_keychain) { + let address = Address::from_script(spk, network) + .expect("should always be able to derive address"); + println!( + "{:?} {} used:{}", + spk_i, + address, + index.is_used(&(target_keychain, spk_i)) + ); + } + Ok(()) + } + } +} + +pub fn run_balance_cmd( + graph: &KeychainTxGraph, + chain: &O, +) -> Result<(), O::Error> { + let balance = graph.graph().try_balance( + chain, + chain.get_chain_tip()?.unwrap_or_default(), + graph.index.outpoints().iter().cloned(), + |(k, _), _| k == &Keychain::Internal, + )?; + + let confirmed_total = balance.confirmed + balance.immature; + let unconfirmed_total = balance.untrusted_pending + balance.trusted_pending; + + println!("[confirmed]"); + println!(" total = {}sats", confirmed_total); + println!(" spendable = {}sats", balance.confirmed); + println!(" immature = {}sats", balance.immature); + + println!("[unconfirmed]"); + println!(" total = {}sats", unconfirmed_total,); + println!(" trusted = {}sats", balance.trusted_pending); + println!(" untrusted = {}sats", balance.untrusted_pending); + + Ok(()) +} + +pub fn run_txo_cmd( + graph: &KeychainTxGraph, + chain: &O, + network: Network, + cmd: TxOutCmd, +) -> anyhow::Result<()> +where + O::Error: std::error::Error + Send + Sync + 'static, +{ + let chain_tip = chain.get_chain_tip()?.unwrap_or_default(); + let outpoints = graph.index.outpoints().iter().cloned(); + + match cmd { + TxOutCmd::List { + spent, + unspent, + confirmed, + unconfirmed, + } => { + let txouts = graph + .graph() + .try_filter_chain_txouts(chain, chain_tip, outpoints) + .filter(|r| match r { + Ok((_, full_txo)) => match (spent, unspent) { + (true, false) => full_txo.spent_by.is_some(), + (false, true) => full_txo.spent_by.is_none(), + _ => true, + }, + // always keep errored items + Err(_) => true, + }) + .filter(|r| match r { + Ok((_, full_txo)) => match (confirmed, unconfirmed) { + (true, false) => full_txo.chain_position.is_confirmed(), + (false, true) => !full_txo.chain_position.is_confirmed(), + _ => true, + }, + // always keep errored items + Err(_) => true, + }) + .collect::, _>>()?; + + for (spk_i, full_txo) in txouts { + let addr = Address::from_script(&full_txo.txout.script_pubkey, network)?; + println!( + "{:?} {} {} {} spent:{:?}", + spk_i, full_txo.txout.value, full_txo.outpoint, addr, full_txo.spent_by + ) + } + Ok(()) + } + } +} + +#[allow(clippy::too_many_arguments)] +pub fn run_send_cmd( + graph: &Mutex>, + db: &Mutex>, + chain: &O, + keymap: &HashMap, + cs_algorithm: CoinSelectionAlgo, + address: Address, + value: u64, + broadcast: impl FnOnce(&Transaction) -> anyhow::Result<()>, +) -> anyhow::Result<()> +where + O::Error: std::error::Error + Send + Sync + 'static, + ChangeSet: Default + Append + DeserializeOwned + Serialize, +{ + let (transaction, change_index) = { + let graph = &mut *graph.lock().unwrap(); + // take mutable ref to construct tx -- it is only open for a short time while building it. + let (tx, change_info) = create_tx(graph, chain, keymap, cs_algorithm, address, value)?; + + if let Some((index_additions, (change_keychain, index))) = change_info { + // We must first persist to disk the fact that we've got a new address from the + // change keychain so future scans will find the tx we're about to broadcast. + // If we're unable to persist this, then we don't want to broadcast. + db.lock().unwrap().stage(ChangeSet { + indexed_additions: IndexedAdditions { + index_additions, + ..Default::default() + }, + ..Default::default() + }); + + // We don't want other callers/threads to use this address while we're using it + // but we also don't want to scan the tx we just created because it's not + // technically in the blockchain yet. + graph.index.mark_used(&change_keychain, index); + (tx, Some((change_keychain, index))) + } else { + (tx, None) + } + }; + + match (broadcast)(&transaction) { + Ok(_) => { + println!("Broadcasted Tx : {}", transaction.txid()); + + let indexed_additions = graph.lock().unwrap().insert_tx(&transaction, None, None); + + // We know the tx is at least unconfirmed now. Note if persisting here fails, + // it's not a big deal since we can always find it again form + // blockchain. + db.lock().unwrap().stage(ChangeSet { + indexed_additions, + ..Default::default() + }); + Ok(()) + } + Err(e) => { + if let Some((keychain, index)) = change_index { + // We failed to broadcast, so allow our change address to be used in the future + graph.lock().unwrap().index.unmark_used(&keychain, index); + } + Err(e) + } + } +} + +#[allow(clippy::type_complexity)] +pub fn create_tx( + graph: &mut KeychainTxGraph, + chain: &O, + keymap: &HashMap, + cs_algorithm: CoinSelectionAlgo, + address: Address, + value: u64, +) -> anyhow::Result<( + Transaction, + Option<(DerivationAdditions, (Keychain, u32))>, +)> +where + O::Error: std::error::Error + Send + Sync + 'static, +{ + let mut additions = DerivationAdditions::default(); + + let assets = bdk_tmp_plan::Assets { + keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(), + ..Default::default() + }; + + // TODO use planning module + let mut candidates = planned_utxos(graph, chain, &assets)?; + + // apply coin selection algorithm + match cs_algorithm { + CoinSelectionAlgo::LargestFirst => { + candidates.sort_by_key(|(_, utxo)| Reverse(utxo.txout.value)) + } + CoinSelectionAlgo::SmallestFirst => candidates.sort_by_key(|(_, utxo)| utxo.txout.value), + CoinSelectionAlgo::OldestFirst => { + candidates.sort_by_key(|(_, utxo)| utxo.chain_position.clone()) + } + CoinSelectionAlgo::NewestFirst => { + candidates.sort_by_key(|(_, utxo)| Reverse(utxo.chain_position.clone())) + } + CoinSelectionAlgo::BranchAndBound => {} + } + + // turn the txos we chose into weight and value + let wv_candidates = candidates + .iter() + .map(|(plan, utxo)| { + WeightedValue::new( + utxo.txout.value, + plan.expected_weight() as _, + plan.witness_version().is_some(), + ) + }) + .collect(); + + let mut outputs = vec![TxOut { + value, + script_pubkey: address.script_pubkey(), + }]; + + let internal_keychain = if graph.index.keychains().get(&Keychain::Internal).is_some() { + Keychain::Internal + } else { + Keychain::External + }; + + let ((change_index, change_script), change_additions) = + graph.index.next_unused_spk(&internal_keychain); + additions.append(change_additions); + + // Clone to drop the immutable reference. + let change_script = change_script.clone(); + + let change_plan = bdk_tmp_plan::plan_satisfaction( + &graph + .index + .keychains() + .get(&internal_keychain) + .expect("must exist") + .at_derivation_index(change_index), + &assets, + ) + .expect("failed to obtain change plan"); + + let mut change_output = TxOut { + value: 0, + script_pubkey: change_script, + }; + + let cs_opts = CoinSelectorOpt { + target_feerate: 0.5, + min_drain_value: graph + .index + .keychains() + .get(&internal_keychain) + .expect("must exist") + .dust_value(), + ..CoinSelectorOpt::fund_outputs( + &outputs, + &change_output, + change_plan.expected_weight() as u32, + ) + }; + + // TODO: How can we make it easy to shuffle in order of inputs and outputs here? + // apply coin selection by saying we need to fund these outputs + let mut coin_selector = CoinSelector::new(&wv_candidates, &cs_opts); + + // just select coins in the order provided until we have enough + // only use the first result (least waste) + let selection = match cs_algorithm { + CoinSelectionAlgo::BranchAndBound => { + coin_select_bnb(Duration::from_secs(10), coin_selector.clone()) + .map_or_else(|| coin_selector.select_until_finished(), |cs| cs.finish())? + } + _ => coin_selector.select_until_finished()?, + }; + let (_, selection_meta) = selection.best_strategy(); + + // get the selected utxos + let selected_txos = selection.apply_selection(&candidates).collect::>(); + + if let Some(drain_value) = selection_meta.drain_value { + change_output.value = drain_value; + // if the selection tells us to use change and the change value is sufficient, we add it as an output + outputs.push(change_output) + } + + let mut transaction = Transaction { + version: 0x02, + lock_time: chain + .get_chain_tip()? + .and_then(|block_id| LockTime::from_height(block_id.height).ok()) + .unwrap_or(LockTime::ZERO) + .into(), + input: selected_txos + .iter() + .map(|(_, utxo)| TxIn { + previous_output: utxo.outpoint, + sequence: Sequence::ENABLE_RBF_NO_LOCKTIME, + ..Default::default() + }) + .collect(), + output: outputs, + }; + + let prevouts = selected_txos + .iter() + .map(|(_, utxo)| utxo.txout.clone()) + .collect::>(); + let sighash_prevouts = Prevouts::All(&prevouts); + + // first, set tx values for the plan so that we don't change them while signing + for (i, (plan, _)) in selected_txos.iter().enumerate() { + if let Some(sequence) = plan.required_sequence() { + transaction.input[i].sequence = sequence + } + } + + // create a short lived transaction + let _sighash_tx = transaction.clone(); + let mut sighash_cache = SighashCache::new(&_sighash_tx); + + for (i, (plan, _)) in selected_txos.iter().enumerate() { + let requirements = plan.requirements(); + let mut auth_data = bdk_tmp_plan::SatisfactionMaterial::default(); + assert!( + !requirements.requires_hash_preimages(), + "can't have hash pre-images since we didn't provide any." + ); + assert!( + requirements.signatures.sign_with_keymap( + i, + keymap, + &sighash_prevouts, + None, + None, + &mut sighash_cache, + &mut auth_data, + &Secp256k1::default(), + )?, + "we should have signed with this input." + ); + + match plan.try_complete(&auth_data) { + bdk_tmp_plan::PlanState::Complete { + final_script_sig, + final_script_witness, + } => { + if let Some(witness) = final_script_witness { + transaction.input[i].witness = witness; + } + + if let Some(script_sig) = final_script_sig { + transaction.input[i].script_sig = script_sig; + } + } + bdk_tmp_plan::PlanState::Incomplete(_) => { + return Err(anyhow::anyhow!( + "we weren't able to complete the plan with our keys." + )); + } + } + } + + let change_info = if selection_meta.drain_value.is_some() { + Some((additions, (internal_keychain, change_index))) + } else { + None + }; + + Ok((transaction, change_info)) +} + +#[allow(clippy::type_complexity)] +pub fn planned_utxos( + graph: &KeychainTxGraph, + chain: &O, + assets: &bdk_tmp_plan::Assets, +) -> Result, FullTxOut>)>, O::Error> { + let chain_tip = chain.get_chain_tip()?.unwrap_or_default(); + let outpoints = graph.index.outpoints().iter().cloned(); + graph + .graph() + .try_filter_chain_unspents(chain, chain_tip, outpoints) + .filter_map( + #[allow(clippy::type_complexity)] + |r| -> Option, FullTxOut>), _>> { + let (k, i, full_txo) = match r { + Err(err) => return Some(Err(err)), + Ok(((k, i), full_txo)) => (k, i, full_txo), + }; + let desc = graph + .index + .keychains() + .get(&k) + .expect("keychain must exist") + .at_derivation_index(i); + let plan = bdk_tmp_plan::plan_satisfaction(&desc, assets)?; + Some(Ok((plan, full_txo))) + }, + ) + .collect() +} + +pub fn handle_commands( + graph: &Mutex>, + db: &Mutex>, + chain: &O, + keymap: &HashMap, + network: Network, + broadcast: impl FnOnce(&Transaction) -> anyhow::Result<()>, + cmd: Commands, +) -> anyhow::Result<()> +where + O::Error: std::error::Error + Send + Sync + 'static, + ChangeSet: Default + Append + DeserializeOwned + Serialize, +{ + match cmd { + Commands::ChainSpecific(_) => unreachable!("example code should handle this!"), + Commands::Address { addr_cmd } => { + let graph = &mut *graph.lock().unwrap(); + run_address_cmd(graph, db, network, addr_cmd) + } + Commands::Balance => { + let graph = &*graph.lock().unwrap(); + run_balance_cmd(graph, chain).map_err(anyhow::Error::from) + } + Commands::TxOut { txout_cmd } => { + let graph = &*graph.lock().unwrap(); + run_txo_cmd(graph, chain, network, txout_cmd) + } + Commands::Send { + value, + address, + coin_select, + } => run_send_cmd( + graph, + db, + chain, + keymap, + coin_select, + address, + value, + broadcast, + ), + } +} + +pub fn prepare_index( + args: &Args, + secp: &Secp256k1, +) -> anyhow::Result<(KeychainTxOutIndex, KeyMap)> { + let mut index = KeychainTxOutIndex::::default(); + + let (descriptor, mut keymap) = + Descriptor::::parse_descriptor(secp, &args.descriptor)?; + index.add_keychain(Keychain::External, descriptor); + + if let Some((internal_descriptor, internal_keymap)) = args + .change_descriptor + .as_ref() + .map(|desc_str| Descriptor::::parse_descriptor(secp, desc_str)) + .transpose()? + { + keymap.extend(internal_keymap); + index.add_keychain(Keychain::Internal, internal_descriptor); + } + + Ok((index, keymap)) +} + +#[allow(clippy::type_complexity)] +pub fn init<'m, S: clap::Subcommand, A: Anchor, X>( + db_magic: &'m [u8], + db_default_path: &str, +) -> anyhow::Result<( + Args, + KeyMap, + Mutex>, + Mutex>, + X, +)> +where + ChangeSet: Default + Append + Serialize + DeserializeOwned, +{ + if std::env::var("BDK_DB_PATH").is_err() { + std::env::set_var("BDK_DB_PATH", db_default_path); + } + let args = Args::::parse(); + let secp = Secp256k1::default(); + let (index, keymap) = prepare_index(&args, &secp)?; + + let mut indexed_graph = IndexedTxGraph::>::new(index); + + let mut db_backend = + match Store::<'m, ChangeSet>::new_from_path(db_magic, args.db_path.as_path()) { + Ok(db_backend) => db_backend, + Err(err) => return Err(anyhow::anyhow!("failed to init db backend: {:?}", err)), + }; + + let ChangeSet { + indexed_additions, + extension, + } = db_backend.load_from_persistence()?; + indexed_graph.apply_additions(indexed_additions); + + Ok(( + args, + keymap, + Mutex::new(indexed_graph), + Mutex::new(Database::new(db_backend)), + extension, + )) +} From 6a1ac7f80a7f97cd3c6264fb54f2d1e3b1f95130 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Fri, 12 May 2023 17:43:05 +0800 Subject: [PATCH 06/17] [examples_redesign] Implemented `example_electrum` This is a version of `keychain_tracker_electrum` that uses the redesigned structures instead. --- Cargo.toml | 1 + example-crates/example_cli/src/lib.rs | 29 +- example-crates/example_electrum/Cargo.toml | 11 + example-crates/example_electrum/src/main.rs | 315 ++++++++++++++++++++ 4 files changed, 344 insertions(+), 12 deletions(-) create mode 100644 example-crates/example_electrum/Cargo.toml create mode 100644 example-crates/example_electrum/src/main.rs diff --git a/Cargo.toml b/Cargo.toml index 4d0f4f4d..48ecaa88 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,7 @@ members = [ "crates/file_store", "crates/electrum", "example-crates/example_cli", + "example-crates/example_electrum", "example-crates/keychain_tracker_electrum", "example-crates/keychain_tracker_esplora", "example-crates/keychain_tracker_example_cli", diff --git a/example-crates/example_cli/src/lib.rs b/example-crates/example_cli/src/lib.rs index 30be503f..6ac40455 100644 --- a/example-crates/example_cli/src/lib.rs +++ b/example-crates/example_cli/src/lib.rs @@ -54,7 +54,7 @@ impl Append for ChangeSet { } fn is_empty(&self) -> bool { - todo!() + self.indexed_additions.is_empty() && self.extension.is_empty() } } @@ -666,7 +666,7 @@ pub fn planned_utxos( graph: &Mutex>, db: &Mutex>, - chain: &O, + chain: &Mutex, keymap: &HashMap, network: Network, broadcast: impl FnOnce(&Transaction) -> anyhow::Result<()>, @@ -684,26 +684,31 @@ where } Commands::Balance => { let graph = &*graph.lock().unwrap(); + let chain = &*chain.lock().unwrap(); run_balance_cmd(graph, chain).map_err(anyhow::Error::from) } Commands::TxOut { txout_cmd } => { let graph = &*graph.lock().unwrap(); + let chain = &*chain.lock().unwrap(); run_txo_cmd(graph, chain, network, txout_cmd) } Commands::Send { value, address, coin_select, - } => run_send_cmd( - graph, - db, - chain, - keymap, - coin_select, - address, - value, - broadcast, - ), + } => { + let chain = &*chain.lock().unwrap(); + run_send_cmd( + graph, + db, + chain, + keymap, + coin_select, + address, + value, + broadcast, + ) + } } } diff --git a/example-crates/example_electrum/Cargo.toml b/example-crates/example_electrum/Cargo.toml new file mode 100644 index 00000000..49d158e9 --- /dev/null +++ b/example-crates/example_electrum/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "example_electrum" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../../crates/chain", features = ["serde"] } +bdk_electrum = { path = "../../crates/electrum" } +example_cli = { path = "../example_cli" } diff --git a/example-crates/example_electrum/src/main.rs b/example-crates/example_electrum/src/main.rs new file mode 100644 index 00000000..6b67e8a7 --- /dev/null +++ b/example-crates/example_electrum/src/main.rs @@ -0,0 +1,315 @@ +use std::{ + collections::BTreeMap, + io::{self, Write}, + sync::Mutex, +}; + +use bdk_chain::{ + bitcoin::{Address, BlockHash, Network, OutPoint, Txid}, + indexed_tx_graph::IndexedAdditions, + local_chain::{self, LocalChain}, + Append, ConfirmationHeightAnchor, +}; +use bdk_electrum::{ + electrum_client::{self, ElectrumApi}, + v2::{ElectrumExt, ElectrumUpdate}, +}; +use example_cli::{ + anyhow::{self, Context}, + clap::{self, Parser, Subcommand}, +}; + +const DB_MAGIC: &[u8] = b"bdk_example_electrum"; +const DB_PATH: &str = ".bdk_electrum_example.db"; +const ASSUME_FINAL_DEPTH: usize = 10; + +#[derive(Subcommand, Debug, Clone)] +enum ElectrumCommands { + /// Scans the addresses in the wallet using the esplora API. + Scan { + /// When a gap this large has been found for a keychain, it will stop. + #[clap(long, default_value = "5")] + stop_gap: usize, + #[clap(flatten)] + scan_options: ScanOptions, + }, + /// Scans particular addresses using the esplora API. + Sync { + /// Scan all the unused addresses. + #[clap(long)] + unused_spks: bool, + /// Scan every address that you have derived. + #[clap(long)] + all_spks: bool, + /// Scan unspent outpoints for spends or changes to confirmation status of residing tx. + #[clap(long)] + utxos: bool, + /// Scan unconfirmed transactions for updates. + #[clap(long)] + unconfirmed: bool, + #[clap(flatten)] + scan_options: ScanOptions, + }, +} + +#[derive(Parser, Debug, Clone, PartialEq)] +pub struct ScanOptions { + /// Set batch size for each script_history call to electrum client. + #[clap(long, default_value = "25")] + pub batch_size: usize, +} + +fn main() -> anyhow::Result<()> { + let (args, keymap, graph, db, chain_changeset) = + example_cli::init::( + DB_MAGIC, DB_PATH, + )?; + + let chain = Mutex::new({ + let mut chain = LocalChain::default(); + chain.apply_changeset(chain_changeset); + chain + }); + + let electrum_url = match args.network { + Network::Bitcoin => "ssl://electrum.blockstream.info:50002", + Network::Testnet => "ssl://electrum.blockstream.info:60002", + Network::Regtest => "tcp://localhost:60401", + Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001", + }; + let config = electrum_client::Config::builder() + .validate_domain(matches!(args.network, Network::Bitcoin)) + .build(); + + let client = electrum_client::Client::from_config(electrum_url, config)?; + + let electrum_cmd = match &args.command { + example_cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd, + general_cmd => { + let res = example_cli::handle_commands( + &graph, + &db, + &chain, + &keymap, + args.network, + |tx| { + client + .transaction_broadcast(tx) + .map(|_| ()) + .map_err(anyhow::Error::from) + }, + general_cmd.clone(), + ); + + db.lock().unwrap().commit()?; + return res; + } + }; + + let response = match electrum_cmd.clone() { + ElectrumCommands::Scan { + stop_gap, + scan_options, + } => { + let (keychain_spks, c) = { + let graph = &*graph.lock().unwrap(); + let chain = &*chain.lock().unwrap(); + + let keychain_spks = graph + .index + .spks_of_all_keychains() + .into_iter() + .map(|(keychain, iter)| { + let mut first = true; + let spk_iter = iter.inspect(move |(i, _)| { + if first { + eprint!("\nscanning {}: ", keychain); + first = false; + } + + eprint!("{} ", i); + let _ = io::stdout().flush(); + }); + (keychain, spk_iter) + }) + .collect::>(); + + let c = chain + .blocks() + .iter() + .rev() + .take(ASSUME_FINAL_DEPTH) + .map(|(k, v)| (*k, *v)) + .collect::>(); + + (keychain_spks, c) + }; + + client + .scan( + &c, + keychain_spks, + core::iter::empty(), + core::iter::empty(), + stop_gap, + scan_options.batch_size, + ) + .context("scanning the blockchain")? + } + ElectrumCommands::Sync { + mut unused_spks, + all_spks, + mut utxos, + mut unconfirmed, + scan_options, + } => { + // Get a short lock on the tracker to get the spks we're interested in + let graph = graph.lock().unwrap(); + let chain = chain.lock().unwrap(); + let chain_tip = chain.tip().unwrap_or_default(); + + if !(all_spks || unused_spks || utxos || unconfirmed) { + unused_spks = true; + unconfirmed = true; + utxos = true; + } else if all_spks { + unused_spks = false; + } + + let mut spks: Box> = + Box::new(core::iter::empty()); + if all_spks { + let all_spks = graph + .index + .all_spks() + .iter() + .map(|(k, v)| (*k, v.clone())) + .collect::>(); + spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { + eprintln!("scanning {:?}", index); + script + }))); + } + if unused_spks { + let unused_spks = graph + .index + .unused_spks(..) + .map(|(k, v)| (*k, v.clone())) + .collect::>(); + spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { + eprintln!( + "Checking if address {} {:?} has been used", + Address::from_script(&script, args.network).unwrap(), + index + ); + + script + }))); + } + + let mut outpoints: Box> = Box::new(core::iter::empty()); + + if utxos { + let init_outpoints = graph.index.outpoints().iter().cloned(); + + let utxos = graph + .graph() + .filter_chain_unspents(&*chain, chain_tip, init_outpoints) + .map(|(_, utxo)| utxo) + .collect::>(); + + outpoints = Box::new( + utxos + .into_iter() + .inspect(|utxo| { + eprintln!( + "Checking if outpoint {} (value: {}) has been spent", + utxo.outpoint, utxo.txout.value + ); + }) + .map(|utxo| utxo.outpoint), + ); + }; + + let mut txids: Box> = Box::new(core::iter::empty()); + + if unconfirmed { + let unconfirmed_txids = graph + .graph() + .list_chain_txs(&*chain, chain_tip) + .filter(|canonical_tx| !canonical_tx.observed_as.is_confirmed()) + .map(|canonical_tx| canonical_tx.node.txid) + .collect::>(); + + txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| { + eprintln!("Checking if {} is confirmed yet", txid); + })); + } + + let c = chain + .blocks() + .iter() + .rev() + .take(ASSUME_FINAL_DEPTH) + .map(|(k, v)| (*k, *v)) + .collect::>(); + + // drop lock on graph and chain + drop((graph, chain)); + + let update = client + .scan_without_keychain(&c, spks, txids, outpoints, scan_options.batch_size) + .context("scanning the blockchain")?; + ElectrumUpdate { + graph_update: update.graph_update, + chain_update: update.chain_update, + keychain_update: BTreeMap::new(), + } + } + }; + + let missing_txids = { + let graph = &*graph.lock().unwrap(); + response + .missing_full_txs(graph.graph()) + .cloned() + .collect::>() + }; + + let new_txs = client + .batch_transaction_get(&missing_txids) + .context("fetching full transactions")?; + let now = std::time::UNIX_EPOCH + .elapsed() + .expect("must get time") + .as_secs(); + let final_update = response.finalize(Some(now), new_txs); + + let db_changeset = { + let mut chain = chain.lock().unwrap(); + let mut graph = graph.lock().unwrap(); + + let chain_changeset = chain.apply_update(final_update.chain)?; + + let indexed_additions = { + let mut additions = IndexedAdditions::::default(); + let (_, index_additions) = graph.index.reveal_to_target_multi(&final_update.keychain); + additions.append(IndexedAdditions { + index_additions, + ..Default::default() + }); + additions.append(graph.apply_update(final_update.graph)); + additions + }; + + example_cli::ChangeSet { + indexed_additions, + extension: chain_changeset, + } + }; + + let mut db = db.lock().unwrap(); + db.stage(db_changeset); + db.commit()?; + Ok(()) +} From a78967e51ba1fa94f00a0f7a580dfc009428a947 Mon Sep 17 00:00:00 2001 From: LLFourn Date: Sat, 13 May 2023 11:03:03 +0800 Subject: [PATCH 07/17] [example-cli] simplify new address logic --- example-crates/example_cli/src/lib.rs | 34 ++++++++++++--------------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/example-crates/example_cli/src/lib.rs b/example-crates/example_cli/src/lib.rs index 6ac40455..029ccbd4 100644 --- a/example-crates/example_cli/src/lib.rs +++ b/example-crates/example_cli/src/lib.rs @@ -10,7 +10,7 @@ use bdk_chain::{ psbt::Prevouts, secp256k1::{self, Secp256k1}, util::sighash::SighashCache, - Address, LockTime, Network, Script, Sequence, Transaction, TxIn, TxOut, + Address, LockTime, Network, Sequence, Transaction, TxIn, TxOut, }, indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, keychain::{DerivationAdditions, KeychainTxOutIndex}, @@ -219,8 +219,17 @@ pub fn run_address_cmd( where ChangeSet: Default + Append + DeserializeOwned + Serialize, { - let process_spk = |spk_i: u32, spk: &Script, index_additions: DerivationAdditions| { - if !index_additions.is_empty() { + let index = &mut graph.index; + + match cmd { + AddressCmd::Next | AddressCmd::New => { + let spk_chooser = match cmd { + AddressCmd::Next => KeychainTxOutIndex::next_unused_spk, + AddressCmd::New => KeychainTxOutIndex::reveal_next_spk, + _ => unreachable!("only these two variants exist in match arm"), + }; + + let ((spk_i, spk), index_additions) = spk_chooser(index, &Keychain::External); let db = &mut *db.lock().unwrap(); db.stage(ChangeSet { indexed_additions: IndexedAdditions { @@ -230,22 +239,9 @@ where ..Default::default() }); db.commit()?; - } - let addr = Address::from_script(spk, network).context("failed to derive address")?; - println!("[address @ {}] {}", spk_i, addr); - Ok(()) - }; - - let index = &mut graph.index; - - match cmd { - AddressCmd::Next => { - let ((spk_i, spk), index_additions) = index.next_unused_spk(&Keychain::External); - process_spk(spk_i, spk, index_additions) - } - AddressCmd::New => { - let ((spk_i, spk), index_additions) = index.reveal_next_spk(&Keychain::External); - process_spk(spk_i, spk, index_additions) + let addr = Address::from_script(spk, network).context("failed to derive address")?; + println!("[address @ {}] {}", spk_i, addr); + Ok(()) } AddressCmd::Index => { for (keychain, derivation_index) in index.last_revealed_indices() { From 50425e979bdbe81621fcd54463cdc7c7aeed90f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Sat, 13 May 2023 23:28:03 +0800 Subject: [PATCH 08/17] Introduce `keychain::LocalChangeSet` This corresponds to `keychain::KeychainChangeSet` but for the redesigned structures with `LocalChain`. This structure is now used in `Wallet` as well as the examples. --- crates/bdk/src/wallet/mod.rs | 83 ++----------- crates/chain/src/indexed_tx_graph.rs | 10 ++ crates/chain/src/keychain.rs | 66 ++++++++++- example-crates/example_cli/src/lib.rs | 124 ++++++-------------- example-crates/example_electrum/src/main.rs | 26 ++-- 5 files changed, 142 insertions(+), 167 deletions(-) diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index 5fabc6d1..c9e50a9e 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -22,11 +22,11 @@ use alloc::{ pub use bdk_chain::keychain::Balance; use bdk_chain::{ indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, - keychain::{DerivationAdditions, KeychainTxOutIndex, LocalUpdate}, + keychain::{KeychainTxOutIndex, LocalChangeSet, LocalUpdate}, local_chain::{self, LocalChain, UpdateNotConnectedError}, tx_graph::{CanonicalTx, TxGraph}, - Anchor, Append, BlockId, ConfirmationTime, ConfirmationTimeAnchor, FullTxOut, ObservedAs, - Persist, PersistBackend, + Append, BlockId, ConfirmationTime, ConfirmationTimeAnchor, FullTxOut, ObservedAs, Persist, + PersistBackend, }; use bitcoin::consensus::encode::serialize; use bitcoin::secp256k1::Secp256k1; @@ -96,67 +96,8 @@ pub struct Wallet { /// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources. pub type Update = LocalUpdate; -/// The changeset produced internally by applying an update. -#[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)] -#[serde(bound( - deserialize = "A: Ord + serde::Deserialize<'de>, K: Ord + serde::Deserialize<'de>", - serialize = "A: Ord + serde::Serialize, K: Ord + serde::Serialize" -))] -pub struct ChangeSet { - pub chain_changeset: local_chain::ChangeSet, - pub indexed_additions: IndexedAdditions>, -} - -impl Default for ChangeSet { - fn default() -> Self { - Self { - chain_changeset: Default::default(), - indexed_additions: Default::default(), - } - } -} - -impl Append for ChangeSet { - fn append(&mut self, other: Self) { - Append::append(&mut self.chain_changeset, other.chain_changeset); - Append::append(&mut self.indexed_additions, other.indexed_additions); - } - - fn is_empty(&self) -> bool { - self.chain_changeset.is_empty() && self.indexed_additions.is_empty() - } -} - -impl From>> for ChangeSet { - fn from(indexed_additions: IndexedAdditions>) -> Self { - Self { - indexed_additions, - ..Default::default() - } - } -} - -impl From> for ChangeSet { - fn from(index_additions: DerivationAdditions) -> Self { - Self { - indexed_additions: IndexedAdditions { - index_additions, - ..Default::default() - }, - ..Default::default() - } - } -} - -impl From for ChangeSet { - fn from(chain_changeset: local_chain::ChangeSet) -> Self { - Self { - chain_changeset, - ..Default::default() - } - } -} - +// /// The changeset produced internally by applying an update. +pub(crate) type ChangeSet = LocalChangeSet; /// The address index selection strategy to use to derived an address from the wallet's external /// descriptor. See [`Wallet::get_address`]. If you're unsure which one to use use `WalletIndex::New`. #[derive(Debug)] @@ -356,10 +297,11 @@ impl Wallet { let txout_index = &mut self.indexed_graph.index; let (index, spk) = match address_index { AddressIndex::New => { - let ((index, spk), changeset) = txout_index.reveal_next_spk(&keychain); + let ((index, spk), index_additions) = txout_index.reveal_next_spk(&keychain); let spk = spk.clone(); - self.persist.stage(changeset.into()); + self.persist + .stage(ChangeSet::from(IndexedAdditions::from(index_additions))); self.persist.commit().expect("TODO"); (index, spk) } @@ -931,11 +873,12 @@ impl Wallet { Some(ref drain_recipient) => drain_recipient.clone(), None => { let change_keychain = self.map_keychain(KeychainKind::Internal); - let ((index, spk), changeset) = + let ((index, spk), index_additions) = self.indexed_graph.index.next_unused_spk(&change_keychain); let spk = spk.clone(); self.indexed_graph.index.mark_used(&change_keychain, index); - self.persist.stage(changeset.into()); + self.persist + .stage(ChangeSet::from(IndexedAdditions::from(index_additions))); self.persist.commit().expect("TODO"); spk } @@ -1751,11 +1694,11 @@ impl Wallet { D: PersistBackend, { let mut changeset: ChangeSet = self.chain.apply_update(update.chain)?.into(); - let (_, derivation_additions) = self + let (_, index_additions) = self .indexed_graph .index .reveal_to_target_multi(&update.keychain); - changeset.append(derivation_additions.into()); + changeset.append(ChangeSet::from(IndexedAdditions::from(index_additions))); changeset.append(self.indexed_graph.apply_update(update.graph).into()); let changed = !changeset.is_empty(); diff --git a/crates/chain/src/indexed_tx_graph.rs b/crates/chain/src/indexed_tx_graph.rs index 24a1884c..371ba295 100644 --- a/crates/chain/src/indexed_tx_graph.rs +++ b/crates/chain/src/indexed_tx_graph.rs @@ -2,6 +2,7 @@ use alloc::vec::Vec; use bitcoin::{OutPoint, Transaction, TxOut}; use crate::{ + keychain::DerivationAdditions, tx_graph::{Additions, TxGraph}, Anchor, Append, }; @@ -212,6 +213,15 @@ impl From> for IndexedAdditions { } } +impl From> for IndexedAdditions> { + fn from(index_additions: DerivationAdditions) -> Self { + Self { + graph_additions: Default::default(), + index_additions, + } + } +} + /// Represents a structure that can index transaction data. pub trait Indexer { /// The resultant "additions" when new transaction data is indexed. diff --git a/crates/chain/src/keychain.rs b/crates/chain/src/keychain.rs index 0f108b2d..1a8b0cc4 100644 --- a/crates/chain/src/keychain.rs +++ b/crates/chain/src/keychain.rs @@ -18,10 +18,11 @@ use crate::{ chain_graph::{self, ChainGraph}, collections::BTreeMap, - local_chain::LocalChain, + indexed_tx_graph::IndexedAdditions, + local_chain::{self, LocalChain}, sparse_chain::ChainPosition, tx_graph::TxGraph, - Append, ForEachTxOut, + Anchor, Append, ForEachTxOut, }; #[cfg(feature = "miniscript")] @@ -125,6 +126,67 @@ impl Default for LocalUpdate { } } +/// A structure that records the corresponding changes as result of applying an [`LocalUpdate`]. +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde( + crate = "serde_crate", + bound( + deserialize = "K: Ord + serde::Deserialize<'de>, A: Ord + serde::Deserialize<'de>", + serialize = "K: Ord + serde::Serialize, A: Ord + serde::Serialize", + ) + ) +)] +pub struct LocalChangeSet { + /// Changes to the [`LocalChain`]. + pub chain_changeset: local_chain::ChangeSet, + + /// Additions to [`IndexedTxGraph`]. + /// + /// [`IndexedTxGraph`]: crate::indexed_tx_graph::IndexedTxGraph + pub indexed_additions: IndexedAdditions>, +} + +impl Default for LocalChangeSet { + fn default() -> Self { + Self { + chain_changeset: Default::default(), + indexed_additions: Default::default(), + } + } +} + +impl Append for LocalChangeSet { + fn append(&mut self, other: Self) { + Append::append(&mut self.chain_changeset, other.chain_changeset); + Append::append(&mut self.indexed_additions, other.indexed_additions); + } + + fn is_empty(&self) -> bool { + self.chain_changeset.is_empty() && self.indexed_additions.is_empty() + } +} + +impl From for LocalChangeSet { + fn from(chain_changeset: local_chain::ChangeSet) -> Self { + Self { + chain_changeset, + ..Default::default() + } + } +} + +impl From>> for LocalChangeSet { + fn from(indexed_additions: IndexedAdditions>) -> Self { + Self { + indexed_additions, + ..Default::default() + } + } +} + #[derive(Clone, Debug, PartialEq)] /// An update that includes the last active indexes of each keychain. pub struct KeychainScan { diff --git a/example-crates/example_cli/src/lib.rs b/example-crates/example_cli/src/lib.rs index 029ccbd4..abb35343 100644 --- a/example-crates/example_cli/src/lib.rs +++ b/example-crates/example_cli/src/lib.rs @@ -26,42 +26,13 @@ pub use clap; use clap::{Parser, Subcommand}; pub type KeychainTxGraph = IndexedTxGraph>; -pub type Database<'m, A, X> = Persist>, ChangeSet>; - -#[derive(Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)] -#[serde(bound( - deserialize = "A: Ord + serde::Deserialize<'de>, X: serde::Deserialize<'de>", - serialize = "A: Ord + serde::Serialize, X: serde::Serialize", -))] -pub struct ChangeSet { - pub indexed_additions: IndexedAdditions>, - pub extension: X, -} - -impl Default for ChangeSet { - fn default() -> Self { - Self { - indexed_additions: Default::default(), - extension: Default::default(), - } - } -} - -impl Append for ChangeSet { - fn append(&mut self, other: Self) { - Append::append(&mut self.indexed_additions, other.indexed_additions); - Append::append(&mut self.extension, other.extension) - } - - fn is_empty(&self) -> bool { - self.indexed_additions.is_empty() && self.extension.is_empty() - } -} +pub type KeychainAdditions = IndexedAdditions>; +pub type Database<'m, C> = Persist, C>; #[derive(Parser)] #[clap(author, version, about, long_about = None)] #[clap(propagate_version = true)] -pub struct Args { +pub struct Args { #[clap(env = "DESCRIPTOR")] pub descriptor: String, #[clap(env = "CHANGE_DESCRIPTOR")] @@ -77,14 +48,14 @@ pub struct Args { pub cp_limit: usize, #[clap(subcommand)] - pub command: Commands, + pub command: Commands, } #[allow(clippy::almost_swapped)] #[derive(Subcommand, Debug, Clone)] -pub enum Commands { +pub enum Commands { #[clap(flatten)] - ChainSpecific(C), + ChainSpecific(S), /// Address generation and inspection. Address { #[clap(subcommand)] @@ -210,14 +181,14 @@ impl core::fmt::Display for Keychain { } } -pub fn run_address_cmd( +pub fn run_address_cmd( graph: &mut KeychainTxGraph, - db: &Mutex>, + db: &Mutex>, network: Network, cmd: AddressCmd, ) -> anyhow::Result<()> where - ChangeSet: Default + Append + DeserializeOwned + Serialize, + C: Default + Append + DeserializeOwned + Serialize + From>, { let index = &mut graph.index; @@ -231,13 +202,7 @@ where let ((spk_i, spk), index_additions) = spk_chooser(index, &Keychain::External); let db = &mut *db.lock().unwrap(); - db.stage(ChangeSet { - indexed_additions: IndexedAdditions { - index_additions, - ..Default::default() - }, - ..Default::default() - }); + db.stage(C::from(KeychainAdditions::from(index_additions))); db.commit()?; let addr = Address::from_script(spk, network).context("failed to derive address")?; println!("[address @ {}] {}", spk_i, addr); @@ -351,9 +316,9 @@ where } #[allow(clippy::too_many_arguments)] -pub fn run_send_cmd( +pub fn run_send_cmd( graph: &Mutex>, - db: &Mutex>, + db: &Mutex>, chain: &O, keymap: &HashMap, cs_algorithm: CoinSelectionAlgo, @@ -363,7 +328,7 @@ pub fn run_send_cmd( ) -> anyhow::Result<()> where O::Error: std::error::Error + Send + Sync + 'static, - ChangeSet: Default + Append + DeserializeOwned + Serialize, + C: Default + Append + DeserializeOwned + Serialize + From>, { let (transaction, change_index) = { let graph = &mut *graph.lock().unwrap(); @@ -374,13 +339,9 @@ where // We must first persist to disk the fact that we've got a new address from the // change keychain so future scans will find the tx we're about to broadcast. // If we're unable to persist this, then we don't want to broadcast. - db.lock().unwrap().stage(ChangeSet { - indexed_additions: IndexedAdditions { - index_additions, - ..Default::default() - }, - ..Default::default() - }); + db.lock() + .unwrap() + .stage(C::from(KeychainAdditions::from(index_additions))); // We don't want other callers/threads to use this address while we're using it // but we also don't want to scan the tx we just created because it's not @@ -396,15 +357,12 @@ where Ok(_) => { println!("Broadcasted Tx : {}", transaction.txid()); - let indexed_additions = graph.lock().unwrap().insert_tx(&transaction, None, None); + let keychain_additions = graph.lock().unwrap().insert_tx(&transaction, None, None); // We know the tx is at least unconfirmed now. Note if persisting here fails, // it's not a big deal since we can always find it again form // blockchain. - db.lock().unwrap().stage(ChangeSet { - indexed_additions, - ..Default::default() - }); + db.lock().unwrap().stage(C::from(keychain_additions)); Ok(()) } Err(e) => { @@ -659,18 +617,18 @@ pub fn planned_utxos( +pub fn handle_commands( graph: &Mutex>, - db: &Mutex>, + db: &Mutex>, chain: &Mutex, keymap: &HashMap, network: Network, broadcast: impl FnOnce(&Transaction) -> anyhow::Result<()>, - cmd: Commands, + cmd: Commands, ) -> anyhow::Result<()> where O::Error: std::error::Error + Send + Sync + 'static, - ChangeSet: Default + Append + DeserializeOwned + Serialize, + C: Default + Append + DeserializeOwned + Serialize + From>, { match cmd { Commands::ChainSpecific(_) => unreachable!("example code should handle this!"), @@ -708,9 +666,9 @@ where } } -pub fn prepare_index( - args: &Args, - secp: &Secp256k1, +pub fn prepare_index( + args: &Args, + secp: &Secp256k1, ) -> anyhow::Result<(KeychainTxOutIndex, KeyMap)> { let mut index = KeychainTxOutIndex::::default(); @@ -732,18 +690,18 @@ pub fn prepare_index( } #[allow(clippy::type_complexity)] -pub fn init<'m, S: clap::Subcommand, A: Anchor, X>( +pub fn init<'m, S: clap::Subcommand, C>( db_magic: &'m [u8], db_default_path: &str, ) -> anyhow::Result<( Args, KeyMap, - Mutex>, - Mutex>, - X, + KeychainTxOutIndex, + Mutex>, + C, )> where - ChangeSet: Default + Append + Serialize + DeserializeOwned, + C: Default + Append + Serialize + DeserializeOwned, { if std::env::var("BDK_DB_PATH").is_err() { std::env::set_var("BDK_DB_PATH", db_default_path); @@ -752,25 +710,19 @@ where let secp = Secp256k1::default(); let (index, keymap) = prepare_index(&args, &secp)?; - let mut indexed_graph = IndexedTxGraph::>::new(index); + let mut db_backend = match Store::<'m, C>::new_from_path(db_magic, &args.db_path) { + Ok(db_backend) => db_backend, + // we cannot return `err` directly as it has lifetime `'m` + Err(err) => return Err(anyhow::anyhow!("failed to init db backend: {:?}", err)), + }; - let mut db_backend = - match Store::<'m, ChangeSet>::new_from_path(db_magic, args.db_path.as_path()) { - Ok(db_backend) => db_backend, - Err(err) => return Err(anyhow::anyhow!("failed to init db backend: {:?}", err)), - }; - - let ChangeSet { - indexed_additions, - extension, - } = db_backend.load_from_persistence()?; - indexed_graph.apply_additions(indexed_additions); + let init_changeset = db_backend.load_from_persistence()?; Ok(( args, keymap, - Mutex::new(indexed_graph), + index, Mutex::new(Database::new(db_backend)), - extension, + init_changeset, )) } diff --git a/example-crates/example_electrum/src/main.rs b/example-crates/example_electrum/src/main.rs index 6b67e8a7..42dc7471 100644 --- a/example-crates/example_electrum/src/main.rs +++ b/example-crates/example_electrum/src/main.rs @@ -6,8 +6,9 @@ use std::{ use bdk_chain::{ bitcoin::{Address, BlockHash, Network, OutPoint, Txid}, - indexed_tx_graph::IndexedAdditions, - local_chain::{self, LocalChain}, + indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, + keychain::LocalChangeSet, + local_chain::LocalChain, Append, ConfirmationHeightAnchor, }; use bdk_electrum::{ @@ -17,6 +18,7 @@ use bdk_electrum::{ use example_cli::{ anyhow::{self, Context}, clap::{self, Parser, Subcommand}, + Keychain, }; const DB_MAGIC: &[u8] = b"bdk_example_electrum"; @@ -59,15 +61,21 @@ pub struct ScanOptions { pub batch_size: usize, } +type ChangeSet = LocalChangeSet; + fn main() -> anyhow::Result<()> { - let (args, keymap, graph, db, chain_changeset) = - example_cli::init::( - DB_MAGIC, DB_PATH, - )?; + let (args, keymap, index, db, init_changeset) = + example_cli::init::(DB_MAGIC, DB_PATH)?; + + let graph = Mutex::new({ + let mut graph = IndexedTxGraph::new(index); + graph.apply_additions(init_changeset.indexed_additions); + graph + }); let chain = Mutex::new({ let mut chain = LocalChain::default(); - chain.apply_changeset(chain_changeset); + chain.apply_changeset(init_changeset.chain_changeset); chain }); @@ -302,9 +310,9 @@ fn main() -> anyhow::Result<()> { additions }; - example_cli::ChangeSet { + ChangeSet { indexed_additions, - extension: chain_changeset, + chain_changeset, } }; From 92709d03ce8ed979cda127c2c30811bb1c8c5f58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Wed, 17 May 2023 11:48:35 +0800 Subject: [PATCH 09/17] Various tweaks to code arrangement and documentation As per reviews by @danielabrozzoni and @LLFourn --- crates/bdk/src/wallet/mod.rs | 22 ++--- crates/electrum/src/v2.rs | 22 +++-- example-crates/example_cli/src/lib.rs | 90 +++++++++++---------- example-crates/example_electrum/src/main.rs | 8 +- 4 files changed, 70 insertions(+), 72 deletions(-) diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index c9e50a9e..f627f969 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -501,20 +501,7 @@ impl Wallet { Ok(changed) } - #[deprecated(note = "use Wallet::transactions instead")] - /// Deprecated. use `Wallet::transactions` instead. - pub fn list_transactions( - &self, - include_raw: bool, - ) -> impl Iterator + '_ { - self.indexed_graph - .graph() - .list_chain_txs(&self.chain, self.chain.tip().unwrap_or_default()) - .map(move |canonical_tx| new_tx_details(&self.indexed_graph, canonical_tx, include_raw)) - } - - /// Iterate over the transactions in the wallet in order of ascending confirmation time with - /// unconfirmed transactions last. + /// Iterate over the transactions in the wallet. pub fn transactions( &self, ) -> impl Iterator> + '_ { @@ -1685,6 +1672,8 @@ impl Wallet { /// Applies an update to the wallet and stages the changes (but does not [`commit`] them). /// + /// This returns whether the `update` resulted in any changes. + /// /// Usually you create an `update` by interacting with some blockchain data source and inserting /// transactions related to your wallet into it. /// @@ -1706,7 +1695,10 @@ impl Wallet { Ok(changed) } - /// Commits all curently [`staged`] changed to the persistence backend returning and error when this fails. + /// Commits all curently [`staged`] changed to the persistence backend returning and error when + /// this fails. + /// + /// This returns whether the `update` resulted in any changes. /// /// [`staged`]: Self::staged pub fn commit(&mut self) -> Result diff --git a/crates/electrum/src/v2.rs b/crates/electrum/src/v2.rs index 6a942a1f..3ddb0102 100644 --- a/crates/electrum/src/v2.rs +++ b/crates/electrum/src/v2.rs @@ -53,7 +53,6 @@ impl<'a, K, A: Anchor> ElectrumUpdate { let _ = graph_update.insert_anchor(txid, anchor); } } - dbg!(graph_update.full_txs().count()); LocalUpdate { keychain: self.keychain_update, graph: graph_update, @@ -63,6 +62,12 @@ impl<'a, K, A: Anchor> ElectrumUpdate { } impl ElectrumUpdate { + /// Finalizes the [`ElectrumUpdate`] with `new_txs` and anchors of type + /// [`ConfirmationTimeAnchor`]. + /// + /// **Note:** The confirmation time might not be precisely correct if there has been a reorg. + /// Electrum's API intends that we use the merkle proof API, we should change `bdk_electrum` to + /// use it. pub fn finalize_as_confirmation_time( self, client: &Client, @@ -73,7 +78,6 @@ impl ElectrumUpdate { T: IntoIterator, { let update = self.finalize(seen_at, new_txs); - let update_tip = update.chain.tip().expect("must have tip"); let relevant_heights = { let mut visited_heights = HashSet::new(); @@ -97,16 +101,6 @@ impl ElectrumUpdate { ) .collect::>(); - if update_tip.hash != client.block_header(update_tip.height as _)?.block_hash() { - // [TODO] We should alter the logic so we won't have to return an error. This is to - // [TODO] ensure obtained block times are "anchored" to our tip. If we exclude this, it - // [TODO] should be "safe" as well. Tx confirmation times would just slightly vary. - return Err(Error::Message(format!( - "tip changed during update: update_tip={:?}", - update_tip - ))); - } - let graph_additions = { let old_additions = TxGraph::default().determine_additions(&update.graph); tx_graph::Additions { @@ -336,6 +330,10 @@ fn determine_tx_anchor( raw_height: i32, txid: Txid, ) -> Option { + // The electrum API has a weird quirk where an unconfirmed transaction is presented with a + // height of 0. To avoid invalid representation in our data structures, we manually set + // transactions residing in the genesis block to have height 0, then interpret a height of 0 as + // unconfirmed for all other transactions. if txid == Txid::from_hex("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b") .expect("must deserialize genesis coinbase txid") diff --git a/example-crates/example_cli/src/lib.rs b/example-crates/example_cli/src/lib.rs index abb35343..ea362a5e 100644 --- a/example-crates/example_cli/src/lib.rs +++ b/example-crates/example_cli/src/lib.rs @@ -7,10 +7,8 @@ use std::{cmp::Reverse, collections::HashMap, path::PathBuf, sync::Mutex, time:: use bdk_chain::{ bitcoin::{ - psbt::Prevouts, - secp256k1::{self, Secp256k1}, - util::sighash::SighashCache, - Address, LockTime, Network, Sequence, Transaction, TxIn, TxOut, + psbt::Prevouts, secp256k1::Secp256k1, util::sighash::SighashCache, Address, LockTime, + Network, Sequence, Transaction, TxIn, TxOut, }, indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, keychain::{DerivationAdditions, KeychainTxOutIndex}, @@ -73,7 +71,7 @@ pub enum Commands { Send { value: u64, address: Address, - #[clap(short, default_value = "largest-first")] + #[clap(short, default_value = "bnb")] coin_select: CoinSelectionAlgo, }, } @@ -238,6 +236,13 @@ pub fn run_balance_cmd( graph: &KeychainTxGraph, chain: &O, ) -> Result<(), O::Error> { + fn print_balances<'a>(title_str: &'a str, items: impl IntoIterator) { + println!("{}:", title_str); + for (name, amount) in items.into_iter() { + println!(" {:<10} {:>12} sats", name, amount) + } + } + let balance = graph.graph().try_balance( chain, chain.get_chain_tip()?.unwrap_or_default(), @@ -248,15 +253,22 @@ pub fn run_balance_cmd( let confirmed_total = balance.confirmed + balance.immature; let unconfirmed_total = balance.untrusted_pending + balance.trusted_pending; - println!("[confirmed]"); - println!(" total = {}sats", confirmed_total); - println!(" spendable = {}sats", balance.confirmed); - println!(" immature = {}sats", balance.immature); - - println!("[unconfirmed]"); - println!(" total = {}sats", unconfirmed_total,); - println!(" trusted = {}sats", balance.trusted_pending); - println!(" untrusted = {}sats", balance.untrusted_pending); + print_balances( + "confirmed", + [ + ("total", confirmed_total), + ("spendable", balance.confirmed), + ("immature", balance.immature), + ], + ); + print_balances( + "unconfirmed", + [ + ("total", unconfirmed_total), + ("trusted", balance.trusted_pending), + ("untrusted", balance.untrusted_pending), + ], + ); Ok(()) } @@ -339,9 +351,11 @@ where // We must first persist to disk the fact that we've got a new address from the // change keychain so future scans will find the tx we're about to broadcast. // If we're unable to persist this, then we don't want to broadcast. - db.lock() - .unwrap() - .stage(C::from(KeychainAdditions::from(index_additions))); + { + let db = &mut *db.lock().unwrap(); + db.stage(C::from(KeychainAdditions::from(index_additions))); + db.commit()?; + } // We don't want other callers/threads to use this address while we're using it // but we also don't want to scan the tx we just created because it's not @@ -502,6 +516,8 @@ where let mut transaction = Transaction { version: 0x02, + // because the temporary planning module does not support timelocks, we can use the chain + // tip as the `lock_time` for anti-fee-sniping purposes lock_time: chain .get_chain_tip()? .and_then(|block_id| LockTime::from_height(block_id.height).ok()) @@ -666,29 +682,6 @@ where } } -pub fn prepare_index( - args: &Args, - secp: &Secp256k1, -) -> anyhow::Result<(KeychainTxOutIndex, KeyMap)> { - let mut index = KeychainTxOutIndex::::default(); - - let (descriptor, mut keymap) = - Descriptor::::parse_descriptor(secp, &args.descriptor)?; - index.add_keychain(Keychain::External, descriptor); - - if let Some((internal_descriptor, internal_keymap)) = args - .change_descriptor - .as_ref() - .map(|desc_str| Descriptor::::parse_descriptor(secp, desc_str)) - .transpose()? - { - keymap.extend(internal_keymap); - index.add_keychain(Keychain::Internal, internal_descriptor); - } - - Ok((index, keymap)) -} - #[allow(clippy::type_complexity)] pub fn init<'m, S: clap::Subcommand, C>( db_magic: &'m [u8], @@ -708,7 +701,22 @@ where } let args = Args::::parse(); let secp = Secp256k1::default(); - let (index, keymap) = prepare_index(&args, &secp)?; + + let mut index = KeychainTxOutIndex::::default(); + + let (descriptor, mut keymap) = + Descriptor::::parse_descriptor(&secp, &args.descriptor)?; + index.add_keychain(Keychain::External, descriptor); + + if let Some((internal_descriptor, internal_keymap)) = args + .change_descriptor + .as_ref() + .map(|desc_str| Descriptor::::parse_descriptor(&secp, desc_str)) + .transpose()? + { + keymap.extend(internal_keymap); + index.add_keychain(Keychain::Internal, internal_descriptor); + } let mut db_backend = match Store::<'m, C>::new_from_path(db_magic, &args.db_path) { Ok(db_backend) => db_backend, diff --git a/example-crates/example_electrum/src/main.rs b/example-crates/example_electrum/src/main.rs index 42dc7471..f8d2e6af 100644 --- a/example-crates/example_electrum/src/main.rs +++ b/example-crates/example_electrum/src/main.rs @@ -27,7 +27,7 @@ const ASSUME_FINAL_DEPTH: usize = 10; #[derive(Subcommand, Debug, Clone)] enum ElectrumCommands { - /// Scans the addresses in the wallet using the esplora API. + /// Scans the addresses in the wallet using the electrum API. Scan { /// When a gap this large has been found for a keychain, it will stop. #[clap(long, default_value = "5")] @@ -35,7 +35,7 @@ enum ElectrumCommands { #[clap(flatten)] scan_options: ScanOptions, }, - /// Scans particular addresses using the esplora API. + /// Scans particular addresses using the electrum API. Sync { /// Scan all the unused addresses. #[clap(long)] @@ -119,7 +119,7 @@ fn main() -> anyhow::Result<()> { stop_gap, scan_options, } => { - let (keychain_spks, c) = { + let (keychain_spks, local_chain) = { let graph = &*graph.lock().unwrap(); let chain = &*chain.lock().unwrap(); @@ -155,7 +155,7 @@ fn main() -> anyhow::Result<()> { client .scan( - &c, + &local_chain, keychain_spks, core::iter::empty(), core::iter::empty(), From 78a7920ba378bb57f0b61d93faf29ec813889a75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Thu, 18 May 2023 10:02:23 +0800 Subject: [PATCH 10/17] `bdk_electrum` API improvements and simplifications * `ElectrumUpdate::missing_full_txs` now returns a `Vec` so we don't keep a reference to the passed-in `graph`. * `ElectrumUpdate::finalize*` methods now takes in `missing` txids instead of `full_txs`. `Client::batch_transaction_get` is called within the methods. Other changes: * `wallet::ChangeSet` is now made public externally. This is required as a wallet db should implement `PersistBackend`. --- crates/bdk/src/wallet/mod.rs | 3 +- crates/electrum/src/v2.rs | 37 ++++++++++----------- example-crates/example_electrum/src/main.rs | 11 ++---- 3 files changed, 23 insertions(+), 28 deletions(-) diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index f627f969..ec6a9dfd 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -97,7 +97,8 @@ pub struct Wallet { pub type Update = LocalUpdate; // /// The changeset produced internally by applying an update. -pub(crate) type ChangeSet = LocalChangeSet; +pub type ChangeSet = LocalChangeSet; + /// The address index selection strategy to use to derived an address from the wallet's external /// descriptor. See [`Wallet::get_address`]. If you're unsure which one to use use `WalletIndex::New`. #[derive(Debug)] diff --git a/crates/electrum/src/v2.rs b/crates/electrum/src/v2.rs index 3ddb0102..bedfdfac 100644 --- a/crates/electrum/src/v2.rs +++ b/crates/electrum/src/v2.rs @@ -30,20 +30,22 @@ impl Default for ElectrumUpdate { } } -impl<'a, K, A: Anchor> ElectrumUpdate { - pub fn missing_full_txs( - &'a self, - graph: &'a TxGraph, - ) -> impl Iterator + 'a { +impl ElectrumUpdate { + pub fn missing_full_txs(&self, graph: &TxGraph) -> Vec { self.graph_update .keys() .filter(move |&&txid| graph.as_ref().get_tx(txid).is_none()) + .cloned() + .collect() } - pub fn finalize(self, seen_at: Option, new_txs: T) -> LocalUpdate - where - T: IntoIterator, - { + pub fn finalize( + self, + client: &Client, + seen_at: Option, + missing: Vec, + ) -> Result, Error> { + let new_txs = client.batch_transaction_get(&missing)?; let mut graph_update = TxGraph::::new(new_txs); for (txid, anchors) in self.graph_update { if let Some(seen_at) = seen_at { @@ -53,11 +55,11 @@ impl<'a, K, A: Anchor> ElectrumUpdate { let _ = graph_update.insert_anchor(txid, anchor); } } - LocalUpdate { + Ok(LocalUpdate { keychain: self.keychain_update, graph: graph_update, chain: self.chain_update, - } + }) } } @@ -68,16 +70,13 @@ impl ElectrumUpdate { /// **Note:** The confirmation time might not be precisely correct if there has been a reorg. /// Electrum's API intends that we use the merkle proof API, we should change `bdk_electrum` to /// use it. - pub fn finalize_as_confirmation_time( + pub fn finalize_as_confirmation_time( self, client: &Client, seen_at: Option, - new_txs: T, - ) -> Result, Error> - where - T: IntoIterator, - { - let update = self.finalize(seen_at, new_txs); + missing: Vec, + ) -> Result, Error> { + let update = self.finalize(client, seen_at, missing)?; let relevant_heights = { let mut visited_heights = HashSet::new(); @@ -111,7 +110,7 @@ impl ElectrumUpdate { .anchors .into_iter() .map(|(height_anchor, txid)| { - let confirmation_height = dbg!(height_anchor.confirmation_height); + let confirmation_height = height_anchor.confirmation_height; let confirmation_time = height_to_time[&confirmation_height]; let time_anchor = ConfirmationTimeAnchor { anchor_block: height_anchor.anchor_block, diff --git a/example-crates/example_electrum/src/main.rs b/example-crates/example_electrum/src/main.rs index f8d2e6af..cfd06c30 100644 --- a/example-crates/example_electrum/src/main.rs +++ b/example-crates/example_electrum/src/main.rs @@ -278,20 +278,15 @@ fn main() -> anyhow::Result<()> { let missing_txids = { let graph = &*graph.lock().unwrap(); - response - .missing_full_txs(graph.graph()) - .cloned() - .collect::>() + response.missing_full_txs(graph.graph()) }; - let new_txs = client - .batch_transaction_get(&missing_txids) - .context("fetching full transactions")?; let now = std::time::UNIX_EPOCH .elapsed() .expect("must get time") .as_secs(); - let final_update = response.finalize(Some(now), new_txs); + + let final_update = response.finalize(&client, Some(now), missing_txids)?; let db_changeset = { let mut chain = chain.lock().unwrap(); From 2952341e5245acef14623b482095526d55b64bd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Thu, 18 May 2023 10:14:47 +0800 Subject: [PATCH 11/17] Update the `wallet_electrum` example --- example-crates/wallet_electrum/src/main.rs | 164 ++++++++++----------- 1 file changed, 76 insertions(+), 88 deletions(-) diff --git a/example-crates/wallet_electrum/src/main.rs b/example-crates/wallet_electrum/src/main.rs index 7eb88264..7f352bc1 100644 --- a/example-crates/wallet_electrum/src/main.rs +++ b/example-crates/wallet_electrum/src/main.rs @@ -1,105 +1,93 @@ -// use std::{io::Write, str::FromStr}; +const DB_MAGIC: &str = "bdk_wallet_electrum_example"; +const SEND_AMOUNT: u64 = 5000; +const STOP_GAP: usize = 50; +const BATCH_SIZE: usize = 5; -// use bdk::{ -// bitcoin::{Address, Network}, -// SignOptions, Wallet, -// }; -// use bdk_electrum::{ -// electrum_client::{self, ElectrumApi}, -// ElectrumExt, -// }; -// use bdk_file_store::KeychainStore; +use std::io::Write; +use std::str::FromStr; -// const SEND_AMOUNT: u64 = 5000; -// const STOP_GAP: usize = 50; -// const BATCH_SIZE: usize = 5; +use bdk::bitcoin::Address; +use bdk::SignOptions; +use bdk::{bitcoin::Network, Wallet}; +use bdk_electrum::electrum_client::{self, ElectrumApi}; +use bdk_electrum::v2::ElectrumExt; +use bdk_file_store::Store; fn main() -> Result<(), Box> { - todo!("update this example!"); - // println!("Hello, world!"); + let db_path = std::env::temp_dir().join("bdk-electrum-example"); + let db = Store::::new_from_path(DB_MAGIC.as_bytes(), db_path)?; + let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; - // let db_path = std::env::temp_dir().join("bdk-electrum-example"); - // let db = KeychainStore::new_from_path(db_path)?; - // let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; - // let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + let mut wallet = Wallet::new( + external_descriptor, + Some(internal_descriptor), + db, + Network::Testnet, + )?; - // let mut wallet = Wallet::new( - // external_descriptor, - // Some(internal_descriptor), - // db, - // Network::Testnet, - // )?; + let address = wallet.get_address(bdk::wallet::AddressIndex::New); + println!("Generated Address: {}", address); - // let address = wallet.get_address(bdk::wallet::AddressIndex::New); - // println!("Generated Address: {}", address); + let balance = wallet.get_balance(); + println!("Wallet balance before syncing: {} sats", balance.total()); - // let balance = wallet.get_balance(); - // println!("Wallet balance before syncing: {} sats", balance.total()); + print!("Syncing..."); + let client = electrum_client::Client::new("ssl://electrum.blockstream.info:60002")?; - // print!("Syncing..."); - // // Scanning the chain... - // let electrum_url = "ssl://electrum.blockstream.info:60002"; - // let client = electrum_client::Client::new(electrum_url)?; - // let local_chain = wallet.checkpoints(); - // let spks = wallet - // .spks_of_all_keychains() - // .into_iter() - // .map(|(k, spks)| { - // let mut first = true; - // ( - // k, - // spks.inspect(move |(spk_i, _)| { - // if first { - // first = false; - // print!("\nScanning keychain [{:?}]:", k); - // } - // print!(" {}", spk_i); - // let _ = std::io::stdout().flush(); - // }), - // ) - // }) - // .collect(); - // let electrum_update = client - // .scan( - // local_chain, - // spks, - // core::iter::empty(), - // core::iter::empty(), - // STOP_GAP, - // BATCH_SIZE, - // )? - // .into_confirmation_time_update(&client)?; - // println!(); - // let new_txs = client.batch_transaction_get(electrum_update.missing_full_txs(&wallet))?; - // let update = electrum_update.into_keychain_scan(new_txs, &wallet)?; - // wallet.apply_update(update)?; - // wallet.commit()?; + let local_chain = wallet.checkpoints(); + let keychain_spks = wallet + .spks_of_all_keychains() + .into_iter() + .map(|(k, k_spks)| { + let mut once = Some(()); + let mut stdout = std::io::stdout(); + let k_spks = k_spks + .inspect(move |(spk_i, _)| match once.take() { + Some(_) => print!("\nScanning keychain [{:?}]", k), + None => print!(" {:<3}", spk_i), + }) + .inspect(move |_| stdout.flush().expect("must flush")); + (k, k_spks) + }) + .collect(); - // let balance = wallet.get_balance(); - // println!("Wallet balance after syncing: {} sats", balance.total()); + let electrum_update = + client.scan(local_chain, keychain_spks, None, None, STOP_GAP, BATCH_SIZE)?; - // if balance.total() < SEND_AMOUNT { - // println!( - // "Please send at least {} sats to the receiving address", - // SEND_AMOUNT - // ); - // std::process::exit(0); - // } + println!(); - // let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + let missing = electrum_update.missing_full_txs(wallet.as_ref()); + let update = electrum_update.finalize_as_confirmation_time(&client, None, missing)?; - // let mut tx_builder = wallet.build_tx(); - // tx_builder - // .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) - // .enable_rbf(); + wallet.apply_update(update)?; + wallet.commit()?; - // let (mut psbt, _) = tx_builder.finish()?; - // let finalized = wallet.sign(&mut psbt, SignOptions::default())?; - // assert!(finalized); + let balance = wallet.get_balance(); + println!("Wallet balance after syncing: {} sats", balance.total()); - // let tx = psbt.extract_tx(); - // client.transaction_broadcast(&tx)?; - // println!("Tx broadcasted! Txid: {}", tx.txid()); + if balance.total() < SEND_AMOUNT { + println!( + "Please send at least {} sats to the receiving address", + SEND_AMOUNT + ); + std::process::exit(0); + } - // Ok(()) + let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + + let mut tx_builder = wallet.build_tx(); + tx_builder + .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + .enable_rbf(); + + let (mut psbt, _) = tx_builder.finish()?; + let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + assert!(finalized); + + let tx = psbt.extract_tx(); + client.transaction_broadcast(&tx)?; + println!("Tx broadcasted! Txid: {}", tx.txid()); + + Ok(()) } From 5860704b2dfab5d3883fc89960ce4a69b92b65ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Thu, 18 May 2023 14:04:48 +0800 Subject: [PATCH 12/17] Implement redesigned versions of `EsploraExt` and `EsploraAsyncExt` All associated examples are also updated. --- crates/esplora/src/async_ext.rs | 9 +- crates/esplora/src/blocking_ext.rs | 7 +- crates/esplora/src/lib.rs | 17 +- crates/esplora/src/v2/async_ext.rs | 266 ++++++++++++++++++ crates/esplora/src/v2/blocking_ext.rs | 247 ++++++++++++++++ crates/esplora/src/v2/mod.rs | 9 + example-crates/wallet_esplora/Cargo.toml | 2 +- example-crates/wallet_esplora/src/main.rs | 161 ++++++----- .../wallet_esplora_async/src/main.rs | 163 ++++++----- 9 files changed, 704 insertions(+), 177 deletions(-) create mode 100644 crates/esplora/src/v2/async_ext.rs create mode 100644 crates/esplora/src/v2/blocking_ext.rs create mode 100644 crates/esplora/src/v2/mod.rs diff --git a/crates/esplora/src/async_ext.rs b/crates/esplora/src/async_ext.rs index 266fd30b..475f4443 100644 --- a/crates/esplora/src/async_ext.rs +++ b/crates/esplora/src/async_ext.rs @@ -19,7 +19,6 @@ use crate::map_confirmation_time; /// /// [`EsploraExt`]: crate::EsploraExt /// [crate-level documentation]: crate -#[cfg(feature = "async")] #[cfg_attr(target_arch = "wasm32", async_trait(?Send))] #[cfg_attr(not(target_arch = "wasm32"), async_trait)] pub trait EsploraAsyncExt { @@ -84,7 +83,6 @@ pub trait EsploraAsyncExt { } } -#[cfg(feature = "async")] #[cfg_attr(target_arch = "wasm32", async_trait(?Send))] #[cfg_attr(not(target_arch = "wasm32"), async_trait)] impl EsploraAsyncExt for esplora_client::AsyncClient { @@ -103,7 +101,7 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { ) -> Result, Error> { let txids = txids.into_iter(); let outpoints = outpoints.into_iter(); - let parallel_requests = parallel_requests.max(1); + let parallel_requests = Ord::max(parallel_requests, 1); let mut scan = KeychainScan::default(); let update = &mut scan.update; let last_active_indices = &mut scan.last_active_indices; @@ -285,7 +283,7 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { } let reorg_occurred = { - if let Some(checkpoint) = update.chain().latest_checkpoint() { + if let Some(checkpoint) = ChainGraph::chain(update).latest_checkpoint() { self.get_block_hash(checkpoint.height).await? != checkpoint.hash } else { false @@ -295,8 +293,7 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { if reorg_occurred { // A reorg occurred, so let's find out where all the txids we found are in the chain now. // XXX: collect required because of weird type naming issues - let txids_found = update - .chain() + let txids_found = ChainGraph::chain(update) .txids() .map(|(_, txid)| *txid) .collect::>(); diff --git a/crates/esplora/src/blocking_ext.rs b/crates/esplora/src/blocking_ext.rs index c22668a5..092c6069 100644 --- a/crates/esplora/src/blocking_ext.rs +++ b/crates/esplora/src/blocking_ext.rs @@ -82,7 +82,7 @@ impl EsploraExt for esplora_client::BlockingClient { stop_gap: usize, parallel_requests: usize, ) -> Result, Error> { - let parallel_requests = parallel_requests.max(1); + let parallel_requests = Ord::max(parallel_requests, 1); let mut scan = KeychainScan::default(); let update = &mut scan.update; let last_active_indices = &mut scan.last_active_indices; @@ -260,7 +260,7 @@ impl EsploraExt for esplora_client::BlockingClient { } let reorg_occurred = { - if let Some(checkpoint) = update.chain().latest_checkpoint() { + if let Some(checkpoint) = ChainGraph::chain(update).latest_checkpoint() { self.get_block_hash(checkpoint.height)? != checkpoint.hash } else { false @@ -270,8 +270,7 @@ impl EsploraExt for esplora_client::BlockingClient { if reorg_occurred { // A reorg occurred, so let's find out where all the txids we found are now in the chain. // XXX: collect required because of weird type naming issues - let txids_found = update - .chain() + let txids_found = ChainGraph::chain(update) .txids() .map(|(_, txid)| *txid) .collect::>(); diff --git a/crates/esplora/src/lib.rs b/crates/esplora/src/lib.rs index a6af0fad..315d050d 100644 --- a/crates/esplora/src/lib.rs +++ b/crates/esplora/src/lib.rs @@ -1,8 +1,9 @@ #![doc = include_str!("../README.md")] -use bdk_chain::ConfirmationTime; +use bdk_chain::{BlockId, ConfirmationTime, ConfirmationTimeAnchor}; use esplora_client::TxStatus; pub use esplora_client; +pub mod v2; #[cfg(feature = "blocking")] mod blocking_ext; @@ -25,3 +26,17 @@ pub(crate) fn map_confirmation_time( _ => ConfirmationTime::Unconfirmed { last_seen: 0 }, } } + +pub(crate) fn map_confirmation_time_anchor( + tx_status: &TxStatus, + tip_at_start: BlockId, +) -> Option { + match (tx_status.block_time, tx_status.block_height) { + (Some(confirmation_time), Some(confirmation_height)) => Some(ConfirmationTimeAnchor { + anchor_block: tip_at_start, + confirmation_height, + confirmation_time, + }), + _ => None, + } +} diff --git a/crates/esplora/src/v2/async_ext.rs b/crates/esplora/src/v2/async_ext.rs new file mode 100644 index 00000000..ac6eed91 --- /dev/null +++ b/crates/esplora/src/v2/async_ext.rs @@ -0,0 +1,266 @@ +use async_trait::async_trait; +use bdk_chain::{ + bitcoin::{BlockHash, OutPoint, Script, Txid}, + collections::BTreeMap, + keychain::LocalUpdate, + BlockId, ConfirmationTimeAnchor, +}; +use esplora_client::{Error, OutputStatus}; +use futures::{stream::FuturesOrdered, TryStreamExt}; + +use crate::map_confirmation_time_anchor; + +/// Trait to extend [`esplora_client::AsyncClient`] functionality. +/// +/// This is the async version of [`EsploraExt`]. Refer to +/// [crate-level documentation] for more. +/// +/// [`EsploraExt`]: crate::EsploraExt +/// [crate-level documentation]: crate +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +pub trait EsploraAsyncExt { + /// Scan the blockchain (via esplora) for the data specified and returns a + /// [`LocalUpdate`]. + /// + /// - `local_chain`: the most recent block hashes present locally + /// - `keychain_spks`: keychains that we want to scan transactions for + /// - `txids`: transactions for which we want updated [`ChainPosition`]s + /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we + /// want to included in the update + /// + /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated + /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in + /// parallel. + /// + /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition + #[allow(clippy::result_large_err)] // FIXME + async fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap< + K, + impl IntoIterator + Send> + Send, + >, + txids: impl IntoIterator + Send> + Send, + outpoints: impl IntoIterator + Send> + Send, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error>; + + /// Convenience method to call [`scan`] without requiring a keychain. + /// + /// [`scan`]: EsploraAsyncExt::scan + #[allow(clippy::result_large_err)] // FIXME + async fn scan_without_keychain( + &self, + local_chain: &BTreeMap, + misc_spks: impl IntoIterator + Send> + Send, + txids: impl IntoIterator + Send> + Send, + outpoints: impl IntoIterator + Send> + Send, + parallel_requests: usize, + ) -> Result, Error> { + self.scan( + local_chain, + [( + (), + misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)), + )] + .into(), + txids, + outpoints, + usize::MAX, + parallel_requests, + ) + .await + } +} + +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +impl EsploraAsyncExt for esplora_client::AsyncClient { + #[allow(clippy::result_large_err)] // FIXME + async fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap< + K, + impl IntoIterator + Send> + Send, + >, + txids: impl IntoIterator + Send> + Send, + outpoints: impl IntoIterator + Send> + Send, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error> { + let parallel_requests = Ord::max(parallel_requests, 1); + + let (mut update, tip_at_start) = loop { + let mut update = LocalUpdate::::default(); + + for (&height, &original_hash) in local_chain.iter().rev() { + let update_block_id = BlockId { + height, + hash: self.get_block_hash(height).await?, + }; + let _ = update + .chain + .insert_block(update_block_id) + .expect("cannot repeat height here"); + if update_block_id.hash == original_hash { + break; + } + } + + let tip_at_start = BlockId { + height: self.get_height().await?, + hash: self.get_tip_hash().await?, + }; + + if update.chain.insert_block(tip_at_start).is_ok() { + break (update, tip_at_start); + } + }; + + for (keychain, spks) in keychain_spks { + let mut spks = spks.into_iter(); + let mut last_active_index = None; + let mut empty_scripts = 0; + type IndexWithTxs = (u32, Vec); + + loop { + let futures = (0..parallel_requests) + .filter_map(|_| { + let (index, script) = spks.next()?; + let client = self.clone(); + Some(async move { + let mut related_txs = client.scripthash_txs(&script, None).await?; + + let n_confirmed = + related_txs.iter().filter(|tx| tx.status.confirmed).count(); + // esplora pages on 25 confirmed transactions. If there are 25 or more we + // keep requesting to see if there's more. + if n_confirmed >= 25 { + loop { + let new_related_txs = client + .scripthash_txs( + &script, + Some(related_txs.last().unwrap().txid), + ) + .await?; + let n = new_related_txs.len(); + related_txs.extend(new_related_txs); + // we've reached the end + if n < 25 { + break; + } + } + } + + Result::<_, esplora_client::Error>::Ok((index, related_txs)) + }) + }) + .collect::>(); + + let n_futures = futures.len(); + + for (index, related_txs) in futures.try_collect::>().await? { + if related_txs.is_empty() { + empty_scripts += 1; + } else { + last_active_index = Some(index); + empty_scripts = 0; + } + for tx in related_txs { + let anchor = map_confirmation_time_anchor(&tx.status, tip_at_start); + + let _ = update.graph.insert_tx(tx.to_tx()); + if let Some(anchor) = anchor { + let _ = update.graph.insert_anchor(tx.txid, anchor); + } + } + } + + if n_futures == 0 || empty_scripts >= stop_gap { + break; + } + } + + if let Some(last_active_index) = last_active_index { + update.keychain.insert(keychain, last_active_index); + } + } + + for txid in txids.into_iter() { + if update.graph.get_tx(txid).is_none() { + match self.get_tx(&txid).await? { + Some(tx) => { + let _ = update.graph.insert_tx(tx); + } + None => continue, + } + } + match self.get_tx_status(&txid).await? { + Some(tx_status) => { + if let Some(anchor) = map_confirmation_time_anchor(&tx_status, tip_at_start) { + let _ = update.graph.insert_anchor(txid, anchor); + } + } + None => continue, + } + } + + for op in outpoints.into_iter() { + let mut op_txs = Vec::with_capacity(2); + if let (Some(tx), Some(tx_status)) = ( + self.get_tx(&op.txid).await?, + self.get_tx_status(&op.txid).await?, + ) { + op_txs.push((tx, tx_status)); + if let Some(OutputStatus { + txid: Some(txid), + status: Some(spend_status), + .. + }) = self.get_output_status(&op.txid, op.vout as _).await? + { + if let Some(spend_tx) = self.get_tx(&txid).await? { + op_txs.push((spend_tx, spend_status)); + } + } + } + + for (tx, status) in op_txs { + let txid = tx.txid(); + let anchor = map_confirmation_time_anchor(&status, tip_at_start); + + let _ = update.graph.insert_tx(tx); + if let Some(anchor) = anchor { + let _ = update.graph.insert_anchor(txid, anchor); + } + } + } + + if tip_at_start.hash != self.get_block_hash(tip_at_start.height).await? { + // A reorg occurred, so let's find out where all the txids we found are now in the chain + let txids_found = update + .graph + .full_txs() + .map(|tx_node| tx_node.txid) + .collect::>(); + update.chain = EsploraAsyncExt::scan_without_keychain( + self, + local_chain, + [], + txids_found, + [], + parallel_requests, + ) + .await? + .chain; + } + + Ok(update) + } +} diff --git a/crates/esplora/src/v2/blocking_ext.rs b/crates/esplora/src/v2/blocking_ext.rs new file mode 100644 index 00000000..63e4c923 --- /dev/null +++ b/crates/esplora/src/v2/blocking_ext.rs @@ -0,0 +1,247 @@ +use bdk_chain::bitcoin::{BlockHash, OutPoint, Script, Txid}; +use bdk_chain::collections::BTreeMap; +use bdk_chain::BlockId; +use bdk_chain::{keychain::LocalUpdate, ConfirmationTimeAnchor}; +use esplora_client::{Error, OutputStatus}; + +use crate::map_confirmation_time_anchor; + +/// Trait to extend [`esplora_client::BlockingClient`] functionality. +/// +/// Refer to [crate-level documentation] for more. +/// +/// [crate-level documentation]: crate +pub trait EsploraExt { + /// Scan the blockchain (via esplora) for the data specified and returns a + /// [`LocalUpdate`]. + /// + /// - `local_chain`: the most recent block hashes present locally + /// - `keychain_spks`: keychains that we want to scan transactions for + /// - `txids`: transactions for which we want updated [`ChainPosition`]s + /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we + /// want to included in the update + /// + /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated + /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in + /// parallel. + /// + /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition + #[allow(clippy::result_large_err)] // FIXME + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error>; + + /// Convenience method to call [`scan`] without requiring a keychain. + /// + /// [`scan`]: EsploraExt::scan + #[allow(clippy::result_large_err)] // FIXME + fn scan_without_keychain( + &self, + local_chain: &BTreeMap, + misc_spks: impl IntoIterator, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + parallel_requests: usize, + ) -> Result, Error> { + self.scan( + local_chain, + [( + (), + misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)), + )] + .into(), + txids, + outpoints, + usize::MAX, + parallel_requests, + ) + } +} + +impl EsploraExt for esplora_client::BlockingClient { + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error> { + let parallel_requests = Ord::max(parallel_requests, 1); + + let (mut update, tip_at_start) = loop { + let mut update = LocalUpdate::::default(); + + for (&height, &original_hash) in local_chain.iter().rev() { + let update_block_id = BlockId { + height, + hash: self.get_block_hash(height)?, + }; + let _ = update + .chain + .insert_block(update_block_id) + .expect("cannot repeat height here"); + if update_block_id.hash == original_hash { + break; + } + } + + let tip_at_start = BlockId { + height: self.get_height()?, + hash: self.get_tip_hash()?, + }; + + if update.chain.insert_block(tip_at_start).is_ok() { + break (update, tip_at_start); + } + }; + + for (keychain, spks) in keychain_spks { + let mut spks = spks.into_iter(); + let mut last_active_index = None; + let mut empty_scripts = 0; + type IndexWithTxs = (u32, Vec); + + loop { + let handles = (0..parallel_requests) + .filter_map( + |_| -> Option>> { + let (index, script) = spks.next()?; + let client = self.clone(); + Some(std::thread::spawn(move || { + let mut related_txs = client.scripthash_txs(&script, None)?; + + let n_confirmed = + related_txs.iter().filter(|tx| tx.status.confirmed).count(); + // esplora pages on 25 confirmed transactions. If there are 25 or more we + // keep requesting to see if there's more. + if n_confirmed >= 25 { + loop { + let new_related_txs = client.scripthash_txs( + &script, + Some(related_txs.last().unwrap().txid), + )?; + let n = new_related_txs.len(); + related_txs.extend(new_related_txs); + // we've reached the end + if n < 25 { + break; + } + } + } + + Result::<_, esplora_client::Error>::Ok((index, related_txs)) + })) + }, + ) + .collect::>(); + + let n_handles = handles.len(); + + for handle in handles { + let (index, related_txs) = handle.join().unwrap()?; // TODO: don't unwrap + if related_txs.is_empty() { + empty_scripts += 1; + } else { + last_active_index = Some(index); + empty_scripts = 0; + } + for tx in related_txs { + let anchor = map_confirmation_time_anchor(&tx.status, tip_at_start); + + let _ = update.graph.insert_tx(tx.to_tx()); + if let Some(anchor) = anchor { + let _ = update.graph.insert_anchor(tx.txid, anchor); + } + } + } + + if n_handles == 0 || empty_scripts >= stop_gap { + break; + } + } + + if let Some(last_active_index) = last_active_index { + update.keychain.insert(keychain, last_active_index); + } + } + + for txid in txids.into_iter() { + if update.graph.get_tx(txid).is_none() { + match self.get_tx(&txid)? { + Some(tx) => { + let _ = update.graph.insert_tx(tx); + } + None => continue, + } + } + match self.get_tx_status(&txid)? { + Some(tx_status) => { + if let Some(anchor) = map_confirmation_time_anchor(&tx_status, tip_at_start) { + let _ = update.graph.insert_anchor(txid, anchor); + } + } + None => continue, + } + } + + for op in outpoints.into_iter() { + let mut op_txs = Vec::with_capacity(2); + if let (Some(tx), Some(tx_status)) = + (self.get_tx(&op.txid)?, self.get_tx_status(&op.txid)?) + { + op_txs.push((tx, tx_status)); + if let Some(OutputStatus { + txid: Some(txid), + status: Some(spend_status), + .. + }) = self.get_output_status(&op.txid, op.vout as _)? + { + if let Some(spend_tx) = self.get_tx(&txid)? { + op_txs.push((spend_tx, spend_status)); + } + } + } + + for (tx, status) in op_txs { + let txid = tx.txid(); + let anchor = map_confirmation_time_anchor(&status, tip_at_start); + + let _ = update.graph.insert_tx(tx); + if let Some(anchor) = anchor { + let _ = update.graph.insert_anchor(txid, anchor); + } + } + } + + if tip_at_start.hash != self.get_block_hash(tip_at_start.height)? { + // A reorg occurred, so let's find out where all the txids we found are now in the chain + let txids_found = update + .graph + .full_txs() + .map(|tx_node| tx_node.txid) + .collect::>(); + update.chain = EsploraExt::scan_without_keychain( + self, + local_chain, + [], + txids_found, + [], + parallel_requests, + )? + .chain; + } + + Ok(update) + } +} diff --git a/crates/esplora/src/v2/mod.rs b/crates/esplora/src/v2/mod.rs new file mode 100644 index 00000000..5720e2dd --- /dev/null +++ b/crates/esplora/src/v2/mod.rs @@ -0,0 +1,9 @@ +#[cfg(feature = "blocking")] +mod blocking_ext; +#[cfg(feature = "blocking")] +pub use blocking_ext::*; + +#[cfg(feature = "async")] +mod async_ext; +#[cfg(feature = "async")] +pub use async_ext::*; diff --git a/example-crates/wallet_esplora/Cargo.toml b/example-crates/wallet_esplora/Cargo.toml index 8e19cb7b..e58a4c2b 100644 --- a/example-crates/wallet_esplora/Cargo.toml +++ b/example-crates/wallet_esplora/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "bdk-esplora-wallet-example" +name = "wallet_esplora" version = "0.1.0" edition = "2021" publish = false diff --git a/example-crates/wallet_esplora/src/main.rs b/example-crates/wallet_esplora/src/main.rs index d9d07c7a..f71033fb 100644 --- a/example-crates/wallet_esplora/src/main.rs +++ b/example-crates/wallet_esplora/src/main.rs @@ -1,97 +1,94 @@ -// use bdk::{ -// bitcoin::{Address, Network}, -// wallet::AddressIndex, -// SignOptions, Wallet, -// }; -// use bdk_esplora::esplora_client; -// use bdk_esplora::EsploraExt; -// use bdk_file_store::KeychainStore; -// use std::{io::Write, str::FromStr}; +const DB_MAGIC: &str = "bdk_wallet_esplora_example"; +const SEND_AMOUNT: u64 = 5000; +const STOP_GAP: usize = 50; +const PARALLEL_REQUESTS: usize = 5; -// const SEND_AMOUNT: u64 = 5000; -// const STOP_GAP: usize = 50; -// const PARALLEL_REQUESTS: usize = 5; +use std::{io::Write, str::FromStr}; + +use bdk::{ + bitcoin::{Address, Network}, + wallet::AddressIndex, + SignOptions, Wallet, +}; +use bdk_esplora::{esplora_client, v2::EsploraExt}; +use bdk_file_store::Store; fn main() -> Result<(), Box> { - todo!("update this exampe!"); - // let db_path = std::env::temp_dir().join("bdk-esplora-example"); - // let db = KeychainStore::new_from_path(db_path)?; - // let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; - // let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + let db_path = std::env::temp_dir().join("bdk-esplora-example"); + let db = Store::::new_from_path(DB_MAGIC.as_bytes(), db_path)?; + let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; - // let mut wallet = Wallet::new( - // external_descriptor, - // Some(internal_descriptor), - // db, - // Network::Testnet, - // )?; + let mut wallet = Wallet::new( + external_descriptor, + Some(internal_descriptor), + db, + Network::Testnet, + )?; - // let address = wallet.get_address(AddressIndex::New); - // println!("Generated Address: {}", address); + let address = wallet.get_address(AddressIndex::New); + println!("Generated Address: {}", address); - // let balance = wallet.get_balance(); - // println!("Wallet balance before syncing: {} sats", balance.total()); + let balance = wallet.get_balance(); + println!("Wallet balance before syncing: {} sats", balance.total()); - // print!("Syncing..."); - // // Scanning the chain... - // let esplora_url = "https://mempool.space/testnet/api"; - // let client = esplora_client::Builder::new(esplora_url).build_blocking()?; - // let checkpoints = wallet.checkpoints(); - // let spks = wallet - // .spks_of_all_keychains() - // .into_iter() - // .map(|(k, spks)| { - // let mut first = true; - // ( - // k, - // spks.inspect(move |(spk_i, _)| { - // if first { - // first = false; - // print!("\nScanning keychain [{:?}]:", k); - // } - // print!(" {}", spk_i); - // let _ = std::io::stdout().flush(); - // }), - // ) - // }) - // .collect(); - // let update = client.scan( - // checkpoints, - // spks, - // core::iter::empty(), - // core::iter::empty(), - // STOP_GAP, - // PARALLEL_REQUESTS, - // )?; - // println!(); - // wallet.apply_update(update)?; - // wallet.commit()?; + print!("Syncing..."); + let client = + esplora_client::Builder::new("https://blockstream.info/testnet/api").build_blocking()?; - // let balance = wallet.get_balance(); - // println!("Wallet balance after syncing: {} sats", balance.total()); + let local_chain = wallet.checkpoints(); + let keychain_spks = wallet + .spks_of_all_keychains() + .into_iter() + .map(|(k, k_spks)| { + let mut once = Some(()); + let mut stdout = std::io::stdout(); + let k_spks = k_spks + .inspect(move |(spk_i, _)| match once.take() { + Some(_) => print!("\nScanning keychain [{:?}]", k), + None => print!(" {:<3}", spk_i), + }) + .inspect(move |_| stdout.flush().expect("must flush")); + (k, k_spks) + }) + .collect(); + let update = client.scan( + local_chain, + keychain_spks, + None, + None, + STOP_GAP, + PARALLEL_REQUESTS, + )?; + println!(); + wallet.apply_update(update)?; + wallet.commit()?; - // if balance.total() < SEND_AMOUNT { - // println!( - // "Please send at least {} sats to the receiving address", - // SEND_AMOUNT - // ); - // std::process::exit(0); - // } + let balance = wallet.get_balance(); + println!("Wallet balance after syncing: {} sats", balance.total()); - // let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + if balance.total() < SEND_AMOUNT { + println!( + "Please send at least {} sats to the receiving address", + SEND_AMOUNT + ); + std::process::exit(0); + } - // let mut tx_builder = wallet.build_tx(); - // tx_builder - // .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) - // .enable_rbf(); + let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; - // let (mut psbt, _) = tx_builder.finish()?; - // let finalized = wallet.sign(&mut psbt, SignOptions::default())?; - // assert!(finalized); + let mut tx_builder = wallet.build_tx(); + tx_builder + .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + .enable_rbf(); - // let tx = psbt.extract_tx(); - // client.broadcast(&tx)?; - // println!("Tx broadcasted! Txid: {}", tx.txid()); + let (mut psbt, _) = tx_builder.finish()?; + let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + assert!(finalized); - // Ok(()) + let tx = psbt.extract_tx(); + client.broadcast(&tx)?; + println!("Tx broadcasted! Txid: {}", tx.txid()); + + Ok(()) } diff --git a/example-crates/wallet_esplora_async/src/main.rs b/example-crates/wallet_esplora_async/src/main.rs index 66cdf689..02271c04 100644 --- a/example-crates/wallet_esplora_async/src/main.rs +++ b/example-crates/wallet_esplora_async/src/main.rs @@ -1,100 +1,97 @@ -// use std::{io::Write, str::FromStr}; +use std::{io::Write, str::FromStr}; -// use bdk::{ -// bitcoin::{Address, Network}, -// wallet::AddressIndex, -// SignOptions, Wallet, -// }; -// use bdk_esplora::{esplora_client, EsploraAsyncExt}; -// use bdk_file_store::KeychainStore; +use bdk::{ + bitcoin::{Address, Network}, + wallet::AddressIndex, + SignOptions, Wallet, +}; +use bdk_esplora::{esplora_client, v2::EsploraAsyncExt}; +use bdk_file_store::Store; -// const SEND_AMOUNT: u64 = 5000; -// const STOP_GAP: usize = 50; -// const PARALLEL_REQUESTS: usize = 5; +const DB_MAGIC: &str = "bdk_wallet_esplora_async_example"; +const SEND_AMOUNT: u64 = 5000; +const STOP_GAP: usize = 50; +const PARALLEL_REQUESTS: usize = 5; #[tokio::main] async fn main() -> Result<(), Box> { - todo!("update this example!"); - // let db_path = std::env::temp_dir().join("bdk-esplora-example"); - // let db = KeychainStore::new_from_path(db_path)?; - // let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; - // let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + let db_path = std::env::temp_dir().join("bdk-esplora-async-example"); + let db = Store::::new_from_path(DB_MAGIC.as_bytes(), db_path)?; + let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; - // let mut wallet = Wallet::new( - // external_descriptor, - // Some(internal_descriptor), - // db, - // Network::Testnet, - // )?; + let mut wallet = Wallet::new( + external_descriptor, + Some(internal_descriptor), + db, + Network::Testnet, + )?; - // let address = wallet.get_address(AddressIndex::New); - // println!("Generated Address: {}", address); + let address = wallet.get_address(AddressIndex::New); + println!("Generated Address: {}", address); - // let balance = wallet.get_balance(); - // println!("Wallet balance before syncing: {} sats", balance.total()); + let balance = wallet.get_balance(); + println!("Wallet balance before syncing: {} sats", balance.total()); - // print!("Syncing..."); - // // Scanning the blockchain - // let esplora_url = "https://mempool.space/testnet/api"; - // let client = esplora_client::Builder::new(esplora_url).build_async()?; - // let checkpoints = wallet.checkpoints(); - // let spks = wallet - // .spks_of_all_keychains() - // .into_iter() - // .map(|(k, spks)| { - // let mut first = true; - // ( - // k, - // spks.inspect(move |(spk_i, _)| { - // if first { - // first = false; - // print!("\nScanning keychain [{:?}]:", k); - // } - // print!(" {}", spk_i); - // let _ = std::io::stdout().flush(); - // }), - // ) - // }) - // .collect(); - // let update = client - // .scan( - // checkpoints, - // spks, - // std::iter::empty(), - // std::iter::empty(), - // STOP_GAP, - // PARALLEL_REQUESTS, - // ) - // .await?; - // println!(); - // wallet.apply_update(update)?; - // wallet.commit()?; + print!("Syncing..."); + let client = + esplora_client::Builder::new("https://blockstream.info/testnet/api").build_async()?; - // let balance = wallet.get_balance(); - // println!("Wallet balance after syncing: {} sats", balance.total()); + let local_chain = wallet.checkpoints(); + let keychain_spks = wallet + .spks_of_all_keychains() + .into_iter() + .map(|(k, k_spks)| { + let mut once = Some(()); + let mut stdout = std::io::stdout(); + let k_spks = k_spks + .inspect(move |(spk_i, _)| match once.take() { + Some(_) => print!("\nScanning keychain [{:?}]", k), + None => print!(" {:<3}", spk_i), + }) + .inspect(move |_| stdout.flush().expect("must flush")); + (k, k_spks) + }) + .collect(); + let update = client + .scan( + local_chain, + keychain_spks, + [], + [], + STOP_GAP, + PARALLEL_REQUESTS, + ) + .await?; + println!(); + wallet.apply_update(update)?; + wallet.commit()?; - // if balance.total() < SEND_AMOUNT { - // println!( - // "Please send at least {} sats to the receiving address", - // SEND_AMOUNT - // ); - // std::process::exit(0); - // } + let balance = wallet.get_balance(); + println!("Wallet balance after syncing: {} sats", balance.total()); - // let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + if balance.total() < SEND_AMOUNT { + println!( + "Please send at least {} sats to the receiving address", + SEND_AMOUNT + ); + std::process::exit(0); + } - // let mut tx_builder = wallet.build_tx(); - // tx_builder - // .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) - // .enable_rbf(); + let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; - // let (mut psbt, _) = tx_builder.finish()?; - // let finalized = wallet.sign(&mut psbt, SignOptions::default())?; - // assert!(finalized); + let mut tx_builder = wallet.build_tx(); + tx_builder + .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + .enable_rbf(); - // let tx = psbt.extract_tx(); - // client.broadcast(&tx).await?; - // println!("Tx broadcasted! Txid: {}", tx.txid()); + let (mut psbt, _) = tx_builder.finish()?; + let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + assert!(finalized); - // Ok(()) + let tx = psbt.extract_tx(); + client.broadcast(&tx).await?; + println!("Tx broadcasted! Txid: {}", tx.txid()); + + Ok(()) } From 1c3cbefa4df7a4f93bc95203534da8ea0186fc5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Wed, 24 May 2023 11:37:26 +0800 Subject: [PATCH 13/17] [chain_redesign] Remove old structures Other changes: * The `async-https` feature of `bdk_esplora` is no longer default. * Rename `ObservedAs` to `ChainPosition`. * Set temporary MSRV to 1.60.0 to compile all workspace members will all features. --- Cargo.toml | 4 +- crates/bdk/src/wallet/export.rs | 4 +- crates/bdk/src/wallet/mod.rs | 10 +- crates/bdk/tests/wallet.rs | 30 +- crates/chain/src/chain_data.rs | 191 +-- crates/chain/src/chain_graph.rs | 639 ---------- crates/chain/src/keychain.rs | 149 +-- crates/chain/src/keychain/persist.rs | 108 -- crates/chain/src/keychain/tracker.rs | 308 ----- crates/chain/src/keychain/txout_index.rs | 5 +- crates/chain/src/lib.rs | 2 - crates/chain/src/sparse_chain.rs | 1102 ----------------- crates/chain/src/spk_txout_index.rs | 4 +- crates/chain/src/tx_graph.rs | 30 +- crates/chain/tests/test_chain_graph.rs | 655 ---------- crates/chain/tests/test_indexed_tx_graph.rs | 10 +- crates/chain/tests/test_keychain_tracker.rs | 240 ---- crates/chain/tests/test_sparse_chain.rs | 773 ------------ crates/chain/tests/test_tx_graph.rs | 20 +- .../electrum/src/{v2.rs => electrum_ext.rs} | 67 +- crates/electrum/src/lib.rs | 559 +-------- crates/esplora/Cargo.toml | 4 +- crates/esplora/README.md | 2 +- crates/esplora/src/async_ext.rs | 236 ++-- crates/esplora/src/blocking_ext.rs | 194 ++- crates/esplora/src/lib.rs | 15 +- crates/esplora/src/v2/async_ext.rs | 266 ---- crates/esplora/src/v2/blocking_ext.rs | 247 ---- crates/esplora/src/v2/mod.rs | 9 - crates/file_store/README.md | 4 +- crates/file_store/src/keychain_store.rs | 313 ----- crates/file_store/src/lib.rs | 31 - example-crates/example_cli/src/lib.rs | 6 +- example-crates/example_electrum/src/main.rs | 2 +- .../keychain_tracker_electrum/.gitignore | 1 - .../keychain_tracker_electrum/Cargo.toml | 9 - .../keychain_tracker_electrum/README.md | 6 - .../keychain_tracker_electrum/src/main.rs | 245 ---- .../keychain_tracker_esplora/.gitignore | 3 - .../keychain_tracker_esplora/Cargo.toml | 11 - .../keychain_tracker_esplora/src/main.rs | 241 ---- .../keychain_tracker_example_cli/.gitignore | 1 - .../keychain_tracker_example_cli/Cargo.toml | 16 - .../keychain_tracker_example_cli/README.md | 1 - .../keychain_tracker_example_cli/src/lib.rs | 692 ----------- example-crates/wallet_electrum/src/main.rs | 2 +- example-crates/wallet_esplora/src/main.rs | 2 +- .../wallet_esplora_async/src/main.rs | 2 +- 48 files changed, 310 insertions(+), 7161 deletions(-) delete mode 100644 crates/chain/src/chain_graph.rs delete mode 100644 crates/chain/src/keychain/persist.rs delete mode 100644 crates/chain/src/keychain/tracker.rs delete mode 100644 crates/chain/src/sparse_chain.rs delete mode 100644 crates/chain/tests/test_chain_graph.rs delete mode 100644 crates/chain/tests/test_keychain_tracker.rs delete mode 100644 crates/chain/tests/test_sparse_chain.rs rename crates/electrum/src/{v2.rs => electrum_ext.rs} (88%) delete mode 100644 crates/esplora/src/v2/async_ext.rs delete mode 100644 crates/esplora/src/v2/blocking_ext.rs delete mode 100644 crates/esplora/src/v2/mod.rs delete mode 100644 crates/file_store/src/keychain_store.rs delete mode 100644 example-crates/keychain_tracker_electrum/.gitignore delete mode 100644 example-crates/keychain_tracker_electrum/Cargo.toml delete mode 100644 example-crates/keychain_tracker_electrum/README.md delete mode 100644 example-crates/keychain_tracker_electrum/src/main.rs delete mode 100644 example-crates/keychain_tracker_esplora/.gitignore delete mode 100644 example-crates/keychain_tracker_esplora/Cargo.toml delete mode 100644 example-crates/keychain_tracker_esplora/src/main.rs delete mode 100644 example-crates/keychain_tracker_example_cli/.gitignore delete mode 100644 example-crates/keychain_tracker_example_cli/Cargo.toml delete mode 100644 example-crates/keychain_tracker_example_cli/README.md delete mode 100644 example-crates/keychain_tracker_example_cli/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index 48ecaa88..c5f2692d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,11 +4,9 @@ members = [ "crates/chain", "crates/file_store", "crates/electrum", + "crates/esplora", "example-crates/example_cli", "example-crates/example_electrum", - "example-crates/keychain_tracker_electrum", - "example-crates/keychain_tracker_esplora", - "example-crates/keychain_tracker_example_cli", "example-crates/wallet_electrum", "example-crates/wallet_esplora", "example-crates/wallet_esplora_async", diff --git a/crates/bdk/src/wallet/export.rs b/crates/bdk/src/wallet/export.rs index fe87fedd..a4d93976 100644 --- a/crates/bdk/src/wallet/export.rs +++ b/crates/bdk/src/wallet/export.rs @@ -130,8 +130,8 @@ impl FullyNodedExport { .transactions() .next() .map_or(0, |canonical_tx| match canonical_tx.observed_as { - bdk_chain::ObservedAs::Confirmed(a) => a.confirmation_height, - bdk_chain::ObservedAs::Unconfirmed(_) => 0, + bdk_chain::ChainPosition::Confirmed(a) => a.confirmation_height, + bdk_chain::ChainPosition::Unconfirmed(_) => 0, }) } else { 0 diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index ec6a9dfd..550bc780 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -25,7 +25,7 @@ use bdk_chain::{ keychain::{KeychainTxOutIndex, LocalChangeSet, LocalUpdate}, local_chain::{self, LocalChain, UpdateNotConnectedError}, tx_graph::{CanonicalTx, TxGraph}, - Append, BlockId, ConfirmationTime, ConfirmationTimeAnchor, FullTxOut, ObservedAs, Persist, + Append, BlockId, ChainPosition, ConfirmationTime, ConfirmationTimeAnchor, FullTxOut, Persist, PersistBackend, }; use bitcoin::consensus::encode::serialize; @@ -1015,7 +1015,7 @@ impl Wallet { let pos = graph .get_chain_position(&self.chain, chain_tip, txid) .ok_or(Error::TransactionNotFound)?; - if let ObservedAs::Confirmed(_) = pos { + if let ChainPosition::Confirmed(_) = pos { return Err(Error::TransactionConfirmed); } @@ -1258,8 +1258,8 @@ impl Wallet { .graph() .get_chain_position(&self.chain, chain_tip, input.previous_output.txid) .map(|observed_as| match observed_as { - ObservedAs::Confirmed(a) => a.confirmation_height, - ObservedAs::Unconfirmed(_) => u32::MAX, + ChainPosition::Confirmed(a) => a.confirmation_height, + ChainPosition::Unconfirmed(_) => u32::MAX, }); let current_height = sign_options .assume_height @@ -1775,7 +1775,7 @@ where fn new_local_utxo( keychain: KeychainKind, derivation_index: u32, - full_txo: FullTxOut>, + full_txo: FullTxOut, ) -> LocalUtxo { LocalUtxo { outpoint: full_txo.outpoint, diff --git a/crates/bdk/tests/wallet.rs b/crates/bdk/tests/wallet.rs index c5bf8e16..282a74fc 100644 --- a/crates/bdk/tests/wallet.rs +++ b/crates/bdk/tests/wallet.rs @@ -8,8 +8,8 @@ use bdk::Error; use bdk::FeeRate; use bdk::KeychainKind; use bdk_chain::BlockId; +use bdk_chain::ConfirmationTime; use bdk_chain::COINBASE_MATURITY; -use bdk_chain::{ConfirmationTime, TxHeight}; use bitcoin::hashes::Hash; use bitcoin::BlockHash; use bitcoin::Script; @@ -23,7 +23,7 @@ use core::str::FromStr; mod common; use common::*; -fn receive_output(wallet: &mut Wallet, value: u64, height: TxHeight) -> OutPoint { +fn receive_output(wallet: &mut Wallet, value: u64, height: ConfirmationTime) -> OutPoint { let tx = Transaction { version: 1, lock_time: PackedLockTime(0), @@ -34,18 +34,7 @@ fn receive_output(wallet: &mut Wallet, value: u64, height: TxHeight) -> OutPoint }], }; - wallet - .insert_tx( - tx.clone(), - match height { - TxHeight::Confirmed(height) => ConfirmationTime::Confirmed { - height, - time: 42_000, - }, - TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed { last_seen: 0 }, - }, - ) - .unwrap(); + wallet.insert_tx(tx.clone(), height).unwrap(); OutPoint { txid: tx.txid(), @@ -54,7 +43,10 @@ fn receive_output(wallet: &mut Wallet, value: u64, height: TxHeight) -> OutPoint } fn receive_output_in_latest_block(wallet: &mut Wallet, value: u64) -> OutPoint { - let height = wallet.latest_checkpoint().map(|id| id.height).into(); + let height = match wallet.latest_checkpoint() { + Some(BlockId { height, .. }) => ConfirmationTime::Confirmed { height, time: 0 }, + None => ConfirmationTime::Unconfirmed { last_seen: 0 }, + }; receive_output(wallet, value, height) } @@ -1941,7 +1933,11 @@ fn test_bump_fee_unconfirmed_inputs_only() { let (psbt, __details) = builder.finish().unwrap(); // Now we receive one transaction with 0 confirmations. We won't be able to use that for // fee bumping, as it's still unconfirmed! - receive_output(&mut wallet, 25_000, TxHeight::Unconfirmed); + receive_output( + &mut wallet, + 25_000, + ConfirmationTime::Unconfirmed { last_seen: 0 }, + ); let mut tx = psbt.extract_tx(); let txid = tx.txid(); for txin in &mut tx.input { @@ -1966,7 +1962,7 @@ fn test_bump_fee_unconfirmed_input() { let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); // We receive a tx with 0 confirmations, which will be used as an input // in the drain tx. - receive_output(&mut wallet, 25_000, TxHeight::Unconfirmed); + receive_output(&mut wallet, 25_000, ConfirmationTime::unconfirmed(0)); let mut builder = wallet.build_tx(); builder .drain_wallet() diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index d1234298..6decfd07 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -1,102 +1,43 @@ use bitcoin::{hashes::Hash, BlockHash, OutPoint, TxOut, Txid}; -use crate::{ - sparse_chain::{self, ChainPosition}, - Anchor, COINBASE_MATURITY, -}; +use crate::{Anchor, COINBASE_MATURITY}; -/// Represents an observation of some chain data. +/// Represents the observed position of some chain data. /// /// The generic `A` should be a [`Anchor`] implementation. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, core::hash::Hash)] -pub enum ObservedAs { +pub enum ChainPosition { /// The chain data is seen as confirmed, and in anchored by `A`. Confirmed(A), /// The chain data is seen in mempool at this given timestamp. Unconfirmed(u64), } -impl ObservedAs { - /// Returns whether [`ObservedAs`] is confirmed or not. +impl ChainPosition { + /// Returns whether [`ChainPosition`] is confirmed or not. pub fn is_confirmed(&self) -> bool { matches!(self, Self::Confirmed(_)) } } -impl ObservedAs<&A> { - pub fn cloned(self) -> ObservedAs { +impl ChainPosition<&A> { + pub fn cloned(self) -> ChainPosition { match self { - ObservedAs::Confirmed(a) => ObservedAs::Confirmed(a.clone()), - ObservedAs::Unconfirmed(last_seen) => ObservedAs::Unconfirmed(last_seen), + ChainPosition::Confirmed(a) => ChainPosition::Confirmed(a.clone()), + ChainPosition::Unconfirmed(last_seen) => ChainPosition::Unconfirmed(last_seen), } } } -/// Represents the height at which a transaction is confirmed. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr( - feature = "serde", - derive(serde::Deserialize, serde::Serialize), - serde(crate = "serde_crate") -)] -pub enum TxHeight { - Confirmed(u32), - Unconfirmed, -} - -impl Default for TxHeight { - fn default() -> Self { - Self::Unconfirmed - } -} - -impl core::fmt::Display for TxHeight { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { +impl ChainPosition { + pub fn confirmation_height_upper_bound(&self) -> Option { match self { - Self::Confirmed(h) => core::write!(f, "confirmed_at({})", h), - Self::Unconfirmed => core::write!(f, "unconfirmed"), + ChainPosition::Confirmed(a) => Some(a.confirmation_height_upper_bound()), + ChainPosition::Unconfirmed(_) => None, } } } -impl From> for TxHeight { - fn from(opt: Option) -> Self { - match opt { - Some(h) => Self::Confirmed(h), - None => Self::Unconfirmed, - } - } -} - -impl From for Option { - fn from(height: TxHeight) -> Self { - match height { - TxHeight::Confirmed(h) => Some(h), - TxHeight::Unconfirmed => None, - } - } -} - -impl crate::sparse_chain::ChainPosition for TxHeight { - fn height(&self) -> TxHeight { - *self - } - - fn max_ord_of_height(height: TxHeight) -> Self { - height - } - - fn min_ord_of_height(height: TxHeight) -> Self { - height - } -} - -impl TxHeight { - pub fn is_confirmed(&self) -> bool { - matches!(self, Self::Confirmed(_)) - } -} - /// Block height and timestamp at which a transaction is confirmed. #[derive(Debug, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)] #[cfg_attr( @@ -109,49 +50,24 @@ pub enum ConfirmationTime { Unconfirmed { last_seen: u64 }, } -impl sparse_chain::ChainPosition for ConfirmationTime { - fn height(&self) -> TxHeight { - match self { - ConfirmationTime::Confirmed { height, .. } => TxHeight::Confirmed(*height), - ConfirmationTime::Unconfirmed { .. } => TxHeight::Unconfirmed, - } - } - - fn max_ord_of_height(height: TxHeight) -> Self { - match height { - TxHeight::Confirmed(height) => Self::Confirmed { - height, - time: u64::MAX, - }, - TxHeight::Unconfirmed => Self::Unconfirmed { last_seen: 0 }, - } - } - - fn min_ord_of_height(height: TxHeight) -> Self { - match height { - TxHeight::Confirmed(height) => Self::Confirmed { - height, - time: u64::MIN, - }, - TxHeight::Unconfirmed => Self::Unconfirmed { last_seen: 0 }, - } - } -} - impl ConfirmationTime { + pub fn unconfirmed(last_seen: u64) -> Self { + Self::Unconfirmed { last_seen } + } + pub fn is_confirmed(&self) -> bool { matches!(self, Self::Confirmed { .. }) } } -impl From> for ConfirmationTime { - fn from(observed_as: ObservedAs) -> Self { +impl From> for ConfirmationTime { + fn from(observed_as: ChainPosition) -> Self { match observed_as { - ObservedAs::Confirmed(a) => Self::Confirmed { + ChainPosition::Confirmed(a) => Self::Confirmed { height: a.confirmation_height, time: a.confirmation_time, }, - ObservedAs::Unconfirmed(_) => Self::Unconfirmed { last_seen: 0 }, + ChainPosition::Unconfirmed(_) => Self::Unconfirmed { last_seen: 0 }, } } } @@ -254,75 +170,32 @@ impl Anchor for ConfirmationTimeAnchor { } /// A `TxOut` with as much data as we can retrieve about it #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct FullTxOut

{ +pub struct FullTxOut { /// The location of the `TxOut`. pub outpoint: OutPoint, /// The `TxOut`. pub txout: TxOut, /// The position of the transaction in `outpoint` in the overall chain. - pub chain_position: P, + pub chain_position: ChainPosition, /// The txid and chain position of the transaction (if any) that has spent this output. - pub spent_by: Option<(P, Txid)>, + pub spent_by: Option<(ChainPosition, Txid)>, /// Whether this output is on a coinbase transaction. pub is_on_coinbase: bool, } -impl FullTxOut

{ - /// Whether the utxo is/was/will be spendable at `height`. - /// - /// It is spendable if it is not an immature coinbase output and no spending tx has been - /// confirmed by that height. - pub fn is_spendable_at(&self, height: u32) -> bool { - if !self.is_mature(height) { - return false; - } - - if self.chain_position.height() > TxHeight::Confirmed(height) { - return false; - } - - match &self.spent_by { - Some((spending_height, _)) => spending_height.height() > TxHeight::Confirmed(height), - None => true, - } - } - - pub fn is_mature(&self, height: u32) -> bool { - if self.is_on_coinbase { - let tx_height = match self.chain_position.height() { - TxHeight::Confirmed(tx_height) => tx_height, - TxHeight::Unconfirmed => { - debug_assert!(false, "coinbase tx can never be unconfirmed"); - return false; - } - }; - let age = height.saturating_sub(tx_height); - if age + 1 < COINBASE_MATURITY { - return false; - } - } - - true - } -} - -impl FullTxOut> { +impl FullTxOut { /// Whether the `txout` is considered mature. /// - /// This is the alternative version of [`is_mature`] which depends on `chain_position` being a - /// [`ObservedAs`] where `A` implements [`Anchor`]. - /// /// Depending on the implementation of [`confirmation_height_upper_bound`] in [`Anchor`], this /// method may return false-negatives. In other words, interpretted confirmation count may be /// less than the actual value. /// - /// [`is_mature`]: Self::is_mature /// [`confirmation_height_upper_bound`]: Anchor::confirmation_height_upper_bound pub fn is_mature(&self, tip: u32) -> bool { if self.is_on_coinbase { let tx_height = match &self.chain_position { - ObservedAs::Confirmed(anchor) => anchor.confirmation_height_upper_bound(), - ObservedAs::Unconfirmed(_) => { + ChainPosition::Confirmed(anchor) => anchor.confirmation_height_upper_bound(), + ChainPosition::Unconfirmed(_) => { debug_assert!(false, "coinbase tx can never be unconfirmed"); return false; } @@ -340,14 +213,10 @@ impl FullTxOut> { /// /// This method does not take into account the locktime. /// - /// This is the alternative version of [`is_spendable_at`] which depends on `chain_position` - /// being a [`ObservedAs`] where `A` implements [`Anchor`]. - /// /// Depending on the implementation of [`confirmation_height_upper_bound`] in [`Anchor`], this /// method may return false-negatives. In other words, interpretted confirmation count may be /// less than the actual value. /// - /// [`is_spendable_at`]: Self::is_spendable_at /// [`confirmation_height_upper_bound`]: Anchor::confirmation_height_upper_bound pub fn is_confirmed_and_spendable(&self, tip: u32) -> bool { if !self.is_mature(tip) { @@ -355,15 +224,15 @@ impl FullTxOut> { } let confirmation_height = match &self.chain_position { - ObservedAs::Confirmed(anchor) => anchor.confirmation_height_upper_bound(), - ObservedAs::Unconfirmed(_) => return false, + ChainPosition::Confirmed(anchor) => anchor.confirmation_height_upper_bound(), + ChainPosition::Unconfirmed(_) => return false, }; if confirmation_height > tip { return false; } // if the spending tx is confirmed within tip height, the txout is no longer spendable - if let Some((ObservedAs::Confirmed(spending_anchor), _)) = &self.spent_by { + if let Some((ChainPosition::Confirmed(spending_anchor), _)) = &self.spent_by { if spending_anchor.anchor_block().height <= tip { return false; } diff --git a/crates/chain/src/chain_graph.rs b/crates/chain/src/chain_graph.rs deleted file mode 100644 index 47845c5a..00000000 --- a/crates/chain/src/chain_graph.rs +++ /dev/null @@ -1,639 +0,0 @@ -//! Module for structures that combine the features of [`sparse_chain`] and [`tx_graph`]. -use crate::{ - collections::HashSet, - sparse_chain::{self, ChainPosition, SparseChain}, - tx_graph::{self, TxGraph}, - Append, BlockId, ForEachTxOut, FullTxOut, TxHeight, -}; -use alloc::{string::ToString, vec::Vec}; -use bitcoin::{OutPoint, Transaction, TxOut, Txid}; -use core::fmt::Debug; - -/// A consistent combination of a [`SparseChain

`] and a [`TxGraph`]. -/// -/// `SparseChain` only keeps track of transaction ids and their position in the chain, but you often -/// want to store the full transactions as well. Additionally, you want to make sure that everything -/// in the chain is consistent with the full transaction data. `ChainGraph` enforces these two -/// invariants: -/// -/// 1. Every transaction that is in the chain is also in the graph (you always have the full -/// transaction). -/// 2. No transactions in the chain conflict with each other, i.e., they don't double spend each -/// other or have ancestors that double spend each other. -/// -/// Note that the `ChainGraph` guarantees a 1:1 mapping between transactions in the `chain` and -/// `graph` but not the other way around. Transactions may fall out of the *chain* (via re-org or -/// mempool eviction) but will remain in the *graph*. -#[derive(Clone, Debug, PartialEq)] -pub struct ChainGraph

{ - chain: SparseChain

, - graph: TxGraph, -} - -impl

Default for ChainGraph

{ - fn default() -> Self { - Self { - chain: Default::default(), - graph: Default::default(), - } - } -} - -impl

AsRef> for ChainGraph

{ - fn as_ref(&self) -> &SparseChain

{ - &self.chain - } -} - -impl

AsRef for ChainGraph

{ - fn as_ref(&self) -> &TxGraph { - &self.graph - } -} - -impl

AsRef> for ChainGraph

{ - fn as_ref(&self) -> &ChainGraph

{ - self - } -} - -impl

ChainGraph

{ - /// Returns a reference to the internal [`SparseChain`]. - pub fn chain(&self) -> &SparseChain

{ - &self.chain - } - - /// Returns a reference to the internal [`TxGraph`]. - pub fn graph(&self) -> &TxGraph { - &self.graph - } -} - -impl

ChainGraph

-where - P: ChainPosition, -{ - /// Create a new chain graph from a `chain` and a `graph`. - /// - /// There are two reasons this can return an `Err`: - /// - /// 1. There is a transaction in the `chain` that does not have its corresponding full - /// transaction in `graph`. - /// 2. The `chain` has two transactions that are allegedly in it, but they conflict in the `graph` - /// (so could not possibly be in the same chain). - pub fn new(chain: SparseChain

, graph: TxGraph) -> Result> { - let mut missing = HashSet::default(); - for (pos, txid) in chain.txids() { - if let Some(tx) = graph.get_tx(*txid) { - let conflict = graph - .walk_conflicts(tx, |_, txid| Some((chain.tx_position(txid)?.clone(), txid))) - .next(); - if let Some((conflict_pos, conflict)) = conflict { - return Err(NewError::Conflict { - a: (pos.clone(), *txid), - b: (conflict_pos, conflict), - }); - } - } else { - missing.insert(*txid); - } - } - - if !missing.is_empty() { - return Err(NewError::Missing(missing)); - } - - Ok(Self { chain, graph }) - } - - /// Take an update in the form of a [`SparseChain

`][`SparseChain`] and attempt to turn it - /// into a chain graph by filling in full transactions from `self` and from `new_txs`. This - /// returns a `ChainGraph>` where the [`Cow<'a, T>`] will borrow the transaction if it - /// got it from `self`. - /// - /// This is useful when interacting with services like an electrum server which returns a list - /// of txids and heights when calling [`script_get_history`], which can easily be inserted into a - /// [`SparseChain`][`SparseChain`]. From there, you need to figure out which full - /// transactions you are missing in your chain graph and form `new_txs`. You then use - /// `inflate_update` to turn this into an update `ChainGraph>` and finally - /// use [`determine_changeset`] to generate the changeset from it. - /// - /// [`SparseChain`]: crate::sparse_chain::SparseChain - /// [`Cow<'a, T>`]: std::borrow::Cow - /// [`script_get_history`]: https://docs.rs/electrum-client/latest/electrum_client/trait.ElectrumApi.html#tymethod.script_get_history - /// [`determine_changeset`]: Self::determine_changeset - pub fn inflate_update( - &self, - update: SparseChain

, - new_txs: impl IntoIterator, - ) -> Result, NewError

> { - let mut inflated_chain = SparseChain::default(); - let mut inflated_graph = TxGraph::default(); - - for (height, hash) in update.checkpoints().clone().into_iter() { - let _ = inflated_chain - .insert_checkpoint(BlockId { height, hash }) - .expect("must insert"); - } - - // [TODO] @evanlinjin: These need better comments - // - copy transactions that have changed positions into the graph - // - add new transactions to an inflated chain - for (pos, txid) in update.txids() { - match self.chain.tx_position(*txid) { - Some(original_pos) => { - if original_pos != pos { - let tx = self - .graph - .get_tx(*txid) - .expect("tx must exist as it is referenced in sparsechain") - .clone(); - let _ = inflated_chain - .insert_tx(*txid, pos.clone()) - .expect("must insert since this was already in update"); - let _ = inflated_graph.insert_tx(tx); - } - } - None => { - let _ = inflated_chain - .insert_tx(*txid, pos.clone()) - .expect("must insert since this was already in update"); - } - } - } - - for tx in new_txs { - let _ = inflated_graph.insert_tx(tx); - } - - ChainGraph::new(inflated_chain, inflated_graph) - } - - /// Gets the checkpoint limit. - /// - /// Refer to [`SparseChain::checkpoint_limit`] for more. - pub fn checkpoint_limit(&self) -> Option { - self.chain.checkpoint_limit() - } - - /// Sets the checkpoint limit. - /// - /// Refer to [`SparseChain::set_checkpoint_limit`] for more. - pub fn set_checkpoint_limit(&mut self, limit: Option) { - self.chain.set_checkpoint_limit(limit) - } - - /// Determines the changes required to invalidate checkpoints `from_height` (inclusive) and - /// above. Displaced transactions will have their positions moved to [`TxHeight::Unconfirmed`]. - pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet

{ - ChangeSet { - chain: self.chain.invalidate_checkpoints_preview(from_height), - ..Default::default() - } - } - - /// Invalidate checkpoints `from_height` (inclusive) and above. Displaced transactions will be - /// re-positioned to [`TxHeight::Unconfirmed`]. - /// - /// This is equivalent to calling [`Self::invalidate_checkpoints_preview`] and - /// [`Self::apply_changeset`] in sequence. - pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet

- where - ChangeSet

: Clone, - { - let changeset = self.invalidate_checkpoints_preview(from_height); - self.apply_changeset(changeset.clone()); - changeset - } - - /// Get a transaction currently in the underlying [`SparseChain`]. - /// - /// This does not necessarily mean that it is *confirmed* in the blockchain; it might just be in - /// the unconfirmed transaction list within the [`SparseChain`]. - pub fn get_tx_in_chain(&self, txid: Txid) -> Option<(&P, &Transaction)> { - let position = self.chain.tx_position(txid)?; - let full_tx = self.graph.get_tx(txid).expect("must exist"); - Some((position, full_tx)) - } - - /// Determines the changes required to insert a transaction into the inner [`ChainGraph`] and - /// [`SparseChain`] at the given `position`. - /// - /// If inserting it into the chain `position` will result in conflicts, the returned - /// [`ChangeSet`] should evict conflicting transactions. - pub fn insert_tx_preview( - &self, - tx: Transaction, - pos: P, - ) -> Result, InsertTxError

> { - let mut changeset = ChangeSet { - chain: self.chain.insert_tx_preview(tx.txid(), pos)?, - graph: self.graph.insert_tx_preview(tx), - }; - self.fix_conflicts(&mut changeset)?; - Ok(changeset) - } - - /// Inserts [`Transaction`] at the given chain position. - /// - /// This is equivalent to calling [`Self::insert_tx_preview`] and [`Self::apply_changeset`] in - /// sequence. - pub fn insert_tx(&mut self, tx: Transaction, pos: P) -> Result, InsertTxError

> { - let changeset = self.insert_tx_preview(tx, pos)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - /// Determines the changes required to insert a [`TxOut`] into the internal [`TxGraph`]. - pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> ChangeSet

{ - ChangeSet { - chain: Default::default(), - graph: self.graph.insert_txout_preview(outpoint, txout), - } - } - - /// Inserts a [`TxOut`] into the internal [`TxGraph`]. - /// - /// This is equivalent to calling [`Self::insert_txout_preview`] and [`Self::apply_changeset`] - /// in sequence. - pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> ChangeSet

{ - let changeset = self.insert_txout_preview(outpoint, txout); - self.apply_changeset(changeset.clone()); - changeset - } - - /// Determines the changes required to insert a `block_id` (a height and block hash) into the - /// chain. - /// - /// If a checkpoint with a different hash already exists at that height, this will return an error. - pub fn insert_checkpoint_preview( - &self, - block_id: BlockId, - ) -> Result, InsertCheckpointError> { - self.chain - .insert_checkpoint_preview(block_id) - .map(|chain_changeset| ChangeSet { - chain: chain_changeset, - ..Default::default() - }) - } - - /// Inserts checkpoint into [`Self`]. - /// - /// This is equivalent to calling [`Self::insert_checkpoint_preview`] and - /// [`Self::apply_changeset`] in sequence. - pub fn insert_checkpoint( - &mut self, - block_id: BlockId, - ) -> Result, InsertCheckpointError> { - let changeset = self.insert_checkpoint_preview(block_id)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - /// Calculates the difference between self and `update` in the form of a [`ChangeSet`]. - pub fn determine_changeset( - &self, - update: &ChainGraph

, - ) -> Result, UpdateError

> { - let chain_changeset = self - .chain - .determine_changeset(&update.chain) - .map_err(UpdateError::Chain)?; - - let mut changeset = ChangeSet { - chain: chain_changeset, - graph: self.graph.determine_additions(&update.graph), - }; - - self.fix_conflicts(&mut changeset)?; - Ok(changeset) - } - - /// Given a transaction, return an iterator of `txid`s that conflict with it (spends at least - /// one of the same inputs). This iterator includes all descendants of conflicting transactions. - /// - /// This method only returns conflicts that exist in the [`SparseChain`] as transactions that - /// are not included in [`SparseChain`] are already considered as evicted. - pub fn tx_conflicts_in_chain<'a>( - &'a self, - tx: &'a Transaction, - ) -> impl Iterator + 'a { - self.graph.walk_conflicts(tx, move |_, conflict_txid| { - self.chain - .tx_position(conflict_txid) - .map(|conflict_pos| (conflict_pos, conflict_txid)) - }) - } - - /// Fix changeset conflicts. - /// - /// **WARNING:** If there are any missing full txs, conflict resolution will not be complete. In - /// debug mode, this will result in panic. - fn fix_conflicts(&self, changeset: &mut ChangeSet

) -> Result<(), UnresolvableConflict

> { - let mut chain_conflicts = vec![]; - - for (&txid, pos_change) in &changeset.chain.txids { - let pos = match pos_change { - Some(pos) => { - // Ignore txs that are still in the chain -- we only care about new ones - if self.chain.tx_position(txid).is_some() { - continue; - } - pos - } - // Ignore txids that are being deleted by the change (they can't conflict) - None => continue, - }; - - let mut full_tx = self.graph.get_tx(txid); - - if full_tx.is_none() { - full_tx = changeset.graph.tx.iter().find(|tx| tx.txid() == txid) - } - - debug_assert!(full_tx.is_some(), "should have full tx at this point"); - - let full_tx = match full_tx { - Some(full_tx) => full_tx, - None => continue, - }; - - for (conflict_pos, conflict_txid) in self.tx_conflicts_in_chain(full_tx) { - chain_conflicts.push((pos.clone(), txid, conflict_pos, conflict_txid)) - } - } - - for (update_pos, update_txid, conflicting_pos, conflicting_txid) in chain_conflicts { - // We have found a tx that conflicts with our update txid. Only allow this when the - // conflicting tx will be positioned as "unconfirmed" after the update is applied. - // If so, we will modify the changeset to evict the conflicting txid. - - // determine the position of the conflicting txid after the current changeset is applied - let conflicting_new_pos = changeset - .chain - .txids - .get(&conflicting_txid) - .map(Option::as_ref) - .unwrap_or(Some(conflicting_pos)); - - match conflicting_new_pos { - None => { - // conflicting txid will be deleted, can ignore - } - Some(existing_new_pos) => match existing_new_pos.height() { - TxHeight::Confirmed(_) => { - // the new position of the conflicting tx is "confirmed", therefore cannot be - // evicted, return error - return Err(UnresolvableConflict { - already_confirmed_tx: (conflicting_pos.clone(), conflicting_txid), - update_tx: (update_pos, update_txid), - }); - } - TxHeight::Unconfirmed => { - // the new position of the conflicting tx is "unconfirmed", therefore it can - // be evicted - changeset.chain.txids.insert(conflicting_txid, None); - } - }, - }; - } - - Ok(()) - } - - /// Applies `changeset` to `self`. - /// - /// **Warning** this method assumes that the changeset is correctly formed. If it is not, the - /// chain graph may behave incorrectly in the future and panic unexpectedly. - pub fn apply_changeset(&mut self, changeset: ChangeSet

) { - self.chain.apply_changeset(changeset.chain); - self.graph.apply_additions(changeset.graph); - } - - /// Applies the `update` chain graph. Note this is shorthand for calling - /// [`Self::determine_changeset()`] and [`Self::apply_changeset()`] in sequence. - pub fn apply_update(&mut self, update: ChainGraph

) -> Result, UpdateError

> { - let changeset = self.determine_changeset(&update)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - /// Get the full transaction output at an outpoint if it exists in the chain and the graph. - pub fn full_txout(&self, outpoint: OutPoint) -> Option> { - self.chain.full_txout(&self.graph, outpoint) - } - - /// Iterate over the full transactions and their position in the chain ordered by their position - /// in ascending order. - pub fn transactions_in_chain(&self) -> impl DoubleEndedIterator { - self.chain - .txids() - .map(move |(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist"))) - } - - /// Find the transaction in the chain that spends `outpoint`. - /// - /// This uses the input/output relationships in the internal `graph`. Note that the transaction - /// which includes `outpoint` does not need to be in the `graph` or the `chain` for this to - /// return `Some(_)`. - pub fn spent_by(&self, outpoint: OutPoint) -> Option<(&P, Txid)> { - self.chain.spent_by(&self.graph, outpoint) - } - - /// Whether the chain graph contains any data whatsoever. - pub fn is_empty(&self) -> bool { - self.chain.is_empty() && self.graph.is_empty() - } -} - -/// Represents changes to [`ChainGraph`]. -/// -/// This is essentially a combination of [`sparse_chain::ChangeSet`] and [`tx_graph::Additions`]. -#[derive(Debug, Clone, PartialEq)] -#[cfg_attr( - feature = "serde", - derive(serde::Deserialize, serde::Serialize), - serde( - crate = "serde_crate", - bound( - deserialize = "P: serde::Deserialize<'de>", - serialize = "P: serde::Serialize" - ) - ) -)] -#[must_use] -pub struct ChangeSet

{ - pub chain: sparse_chain::ChangeSet

, - pub graph: tx_graph::Additions, -} - -impl

ChangeSet

{ - /// Returns `true` if this [`ChangeSet`] records no changes. - pub fn is_empty(&self) -> bool { - self.chain.is_empty() && self.graph.is_empty() - } - - /// Returns `true` if this [`ChangeSet`] contains transaction evictions. - pub fn contains_eviction(&self) -> bool { - self.chain - .txids - .iter() - .any(|(_, new_pos)| new_pos.is_none()) - } - - /// Appends the changes in `other` into self such that applying `self` afterward has the same - /// effect as sequentially applying the original `self` and `other`. - pub fn append(&mut self, other: ChangeSet

) - where - P: ChainPosition, - { - self.chain.append(other.chain); - self.graph.append(other.graph); - } -} - -impl

Default for ChangeSet

{ - fn default() -> Self { - Self { - chain: Default::default(), - graph: Default::default(), - } - } -} - -impl

ForEachTxOut for ChainGraph

{ - fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) { - self.graph.for_each_txout(f) - } -} - -impl

ForEachTxOut for ChangeSet

{ - fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) { - self.graph.for_each_txout(f) - } -} - -/// Error that may occur when calling [`ChainGraph::new`]. -#[derive(Clone, Debug, PartialEq)] -pub enum NewError

{ - /// Two transactions within the sparse chain conflicted with each other - Conflict { a: (P, Txid), b: (P, Txid) }, - /// One or more transactions in the chain were not in the graph - Missing(HashSet), -} - -impl core::fmt::Display for NewError

{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - NewError::Conflict { a, b } => write!( - f, - "Unable to inflate sparse chain to chain graph since transactions {:?} and {:?}", - a, b - ), - NewError::Missing(missing) => write!( - f, - "missing full transactions for {}", - missing - .iter() - .map(|txid| txid.to_string()) - .collect::>() - .join(", ") - ), - } - } -} - -#[cfg(feature = "std")] -impl std::error::Error for NewError

{} - -/// Error that may occur when inserting a transaction. -/// -/// Refer to [`ChainGraph::insert_tx_preview`] and [`ChainGraph::insert_tx`]. -#[derive(Clone, Debug, PartialEq)] -pub enum InsertTxError

{ - Chain(sparse_chain::InsertTxError

), - UnresolvableConflict(UnresolvableConflict

), -} - -impl core::fmt::Display for InsertTxError

{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - InsertTxError::Chain(inner) => core::fmt::Display::fmt(inner, f), - InsertTxError::UnresolvableConflict(inner) => core::fmt::Display::fmt(inner, f), - } - } -} - -impl

From> for InsertTxError

{ - fn from(inner: sparse_chain::InsertTxError

) -> Self { - Self::Chain(inner) - } -} - -#[cfg(feature = "std")] -impl std::error::Error for InsertTxError

{} - -/// A nice alias of [`sparse_chain::InsertCheckpointError`]. -pub type InsertCheckpointError = sparse_chain::InsertCheckpointError; - -/// Represents an update failure. -#[derive(Clone, Debug, PartialEq)] -pub enum UpdateError

{ - /// The update chain was inconsistent with the existing chain - Chain(sparse_chain::UpdateError

), - /// A transaction in the update spent the same input as an already confirmed transaction - UnresolvableConflict(UnresolvableConflict

), -} - -impl core::fmt::Display for UpdateError

{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - UpdateError::Chain(inner) => core::fmt::Display::fmt(inner, f), - UpdateError::UnresolvableConflict(inner) => core::fmt::Display::fmt(inner, f), - } - } -} - -impl

From> for UpdateError

{ - fn from(inner: sparse_chain::UpdateError

) -> Self { - Self::Chain(inner) - } -} - -#[cfg(feature = "std")] -impl std::error::Error for UpdateError

{} - -/// Represents an unresolvable conflict between an update's transaction and an -/// already-confirmed transaction. -#[derive(Clone, Debug, PartialEq)] -pub struct UnresolvableConflict

{ - pub already_confirmed_tx: (P, Txid), - pub update_tx: (P, Txid), -} - -impl core::fmt::Display for UnresolvableConflict

{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let Self { - already_confirmed_tx, - update_tx, - } = self; - write!(f, "update transaction {} at height {:?} conflicts with an already confirmed transaction {} at height {:?}", - update_tx.1, update_tx.0, already_confirmed_tx.1, already_confirmed_tx.0) - } -} - -impl

From> for UpdateError

{ - fn from(inner: UnresolvableConflict

) -> Self { - Self::UnresolvableConflict(inner) - } -} - -impl

From> for InsertTxError

{ - fn from(inner: UnresolvableConflict

) -> Self { - Self::UnresolvableConflict(inner) - } -} - -#[cfg(feature = "std")] -impl std::error::Error for UnresolvableConflict

{} diff --git a/crates/chain/src/keychain.rs b/crates/chain/src/keychain.rs index 1a8b0cc4..f9b2436f 100644 --- a/crates/chain/src/keychain.rs +++ b/crates/chain/src/keychain.rs @@ -8,31 +8,16 @@ //! has a `txout` containing an indexed script pubkey). Internally, this uses [`SpkTxOutIndex`], but //! also maintains "revealed" and "lookahead" index counts per keychain. //! -//! [`KeychainTracker`] combines [`ChainGraph`] and [`KeychainTxOutIndex`] and enforces atomic -//! changes between both these structures. [`KeychainScan`] is a structure used to update to -//! [`KeychainTracker`] and changes made on a [`KeychainTracker`] are reported by -//! [`KeychainChangeSet`]s. -//! //! [`SpkTxOutIndex`]: crate::SpkTxOutIndex use crate::{ - chain_graph::{self, ChainGraph}, collections::BTreeMap, indexed_tx_graph::IndexedAdditions, local_chain::{self, LocalChain}, - sparse_chain::ChainPosition, tx_graph::TxGraph, - Anchor, Append, ForEachTxOut, + Anchor, Append, }; -#[cfg(feature = "miniscript")] -pub mod persist; -#[cfg(feature = "miniscript")] -pub use persist::*; -#[cfg(feature = "miniscript")] -mod tracker; -#[cfg(feature = "miniscript")] -pub use tracker::*; #[cfg(feature = "miniscript")] mod txout_index; #[cfg(feature = "miniscript")] @@ -187,116 +172,6 @@ impl From>> for LocalChangeSet< } } -#[derive(Clone, Debug, PartialEq)] -/// An update that includes the last active indexes of each keychain. -pub struct KeychainScan { - /// The update data in the form of a chain that could be applied - pub update: ChainGraph

, - /// The last active indexes of each keychain - pub last_active_indices: BTreeMap, -} - -impl Default for KeychainScan { - fn default() -> Self { - Self { - update: Default::default(), - last_active_indices: Default::default(), - } - } -} - -impl From> for KeychainScan { - fn from(update: ChainGraph

) -> Self { - KeychainScan { - update, - last_active_indices: Default::default(), - } - } -} - -/// Represents changes to a [`KeychainTracker`]. -/// -/// This is essentially a combination of [`DerivationAdditions`] and [`chain_graph::ChangeSet`]. -#[derive(Clone, Debug)] -#[cfg_attr( - feature = "serde", - derive(serde::Deserialize, serde::Serialize), - serde( - crate = "serde_crate", - bound( - deserialize = "K: Ord + serde::Deserialize<'de>, P: serde::Deserialize<'de>", - serialize = "K: Ord + serde::Serialize, P: serde::Serialize" - ) - ) -)] -#[must_use] -pub struct KeychainChangeSet { - /// The changes in local keychain derivation indices - pub derivation_indices: DerivationAdditions, - /// The changes that have occurred in the blockchain - pub chain_graph: chain_graph::ChangeSet

, -} - -impl Default for KeychainChangeSet { - fn default() -> Self { - Self { - chain_graph: Default::default(), - derivation_indices: Default::default(), - } - } -} - -impl KeychainChangeSet { - /// Returns whether the [`KeychainChangeSet`] is empty (no changes recorded). - pub fn is_empty(&self) -> bool { - self.chain_graph.is_empty() && self.derivation_indices.is_empty() - } - - /// Appends the changes in `other` into `self` such that applying `self` afterward has the same - /// effect as sequentially applying the original `self` and `other`. - /// - /// Note the derivation indices cannot be decreased, so `other` will only change the derivation - /// index for a keychain, if it's value is higher than the one in `self`. - pub fn append(&mut self, other: KeychainChangeSet) - where - K: Ord, - P: ChainPosition, - { - self.derivation_indices.append(other.derivation_indices); - self.chain_graph.append(other.chain_graph); - } -} - -impl From> for KeychainChangeSet { - fn from(changeset: chain_graph::ChangeSet

) -> Self { - Self { - chain_graph: changeset, - ..Default::default() - } - } -} - -impl From> for KeychainChangeSet { - fn from(additions: DerivationAdditions) -> Self { - Self { - derivation_indices: additions, - ..Default::default() - } - } -} - -impl AsRef for KeychainScan { - fn as_ref(&self) -> &TxGraph { - self.update.graph() - } -} - -impl ForEachTxOut for KeychainChangeSet { - fn for_each_txout(&self, f: impl FnMut((bitcoin::OutPoint, &bitcoin::TxOut))) { - self.chain_graph.for_each_txout(f) - } -} - /// Balance, differentiated into various categories. #[derive(Debug, PartialEq, Eq, Clone, Default)] #[cfg_attr( @@ -355,9 +230,8 @@ impl core::ops::Add for Balance { #[cfg(test)] mod test { - use crate::TxHeight; - use super::*; + #[test] fn append_keychain_derivation_indices() { #[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)] @@ -375,25 +249,18 @@ mod test { rhs_di.insert(Keychain::Two, 5); lhs_di.insert(Keychain::Three, 3); rhs_di.insert(Keychain::Four, 4); - let mut lhs = KeychainChangeSet { - derivation_indices: DerivationAdditions(lhs_di), - chain_graph: chain_graph::ChangeSet::::default(), - }; - - let rhs = KeychainChangeSet { - derivation_indices: DerivationAdditions(rhs_di), - chain_graph: chain_graph::ChangeSet::::default(), - }; + let mut lhs = DerivationAdditions(lhs_di); + let rhs = DerivationAdditions(rhs_di); lhs.append(rhs); // Exiting index doesn't update if the new index in `other` is lower than `self`. - assert_eq!(lhs.derivation_indices.0.get(&Keychain::One), Some(&7)); + assert_eq!(lhs.0.get(&Keychain::One), Some(&7)); // Existing index updates if the new index in `other` is higher than `self`. - assert_eq!(lhs.derivation_indices.0.get(&Keychain::Two), Some(&5)); + assert_eq!(lhs.0.get(&Keychain::Two), Some(&5)); // Existing index is unchanged if keychain doesn't exist in `other`. - assert_eq!(lhs.derivation_indices.0.get(&Keychain::Three), Some(&3)); + assert_eq!(lhs.0.get(&Keychain::Three), Some(&3)); // New keychain gets added if the keychain is in `other` but not in `self`. - assert_eq!(lhs.derivation_indices.0.get(&Keychain::Four), Some(&4)); + assert_eq!(lhs.0.get(&Keychain::Four), Some(&4)); } } diff --git a/crates/chain/src/keychain/persist.rs b/crates/chain/src/keychain/persist.rs deleted file mode 100644 index 1a3ffab0..00000000 --- a/crates/chain/src/keychain/persist.rs +++ /dev/null @@ -1,108 +0,0 @@ -//! Persistence for changes made to a [`KeychainTracker`]. -//! -//! BDK's [`KeychainTracker`] needs somewhere to persist changes it makes during operation. -//! Operations like giving out a new address are crucial to persist so that next time the -//! application is loaded, it can find transactions related to that address. -//! -//! Note that the [`KeychainTracker`] does not read this persisted data during operation since it -//! always has a copy in memory. -//! -//! [`KeychainTracker`]: crate::keychain::KeychainTracker - -use crate::{keychain, sparse_chain::ChainPosition}; - -/// `Persist` wraps a [`PersistBackend`] to create a convenient staging area for changes before they -/// are persisted. Not all changes made to the [`KeychainTracker`] need to be written to disk right -/// away so you can use [`Persist::stage`] to *stage* it first and then [`Persist::commit`] to -/// finally, write it to disk. -/// -/// [`KeychainTracker`]: keychain::KeychainTracker -#[derive(Debug)] -pub struct Persist { - backend: B, - stage: keychain::KeychainChangeSet, -} - -impl Persist { - /// Create a new `Persist` from a [`PersistBackend`]. - pub fn new(backend: B) -> Self { - Self { - backend, - stage: Default::default(), - } - } - - /// Stage a `changeset` to later persistence with [`commit`]. - /// - /// [`commit`]: Self::commit - pub fn stage(&mut self, changeset: keychain::KeychainChangeSet) - where - K: Ord, - P: ChainPosition, - { - self.stage.append(changeset) - } - - /// Get the changes that haven't been committed yet - pub fn staged(&self) -> &keychain::KeychainChangeSet { - &self.stage - } - - /// Commit the staged changes to the underlying persistence backend. - /// - /// Returns a backend-defined error if this fails. - pub fn commit(&mut self) -> Result<(), B::WriteError> - where - B: PersistBackend, - { - self.backend.append_changeset(&self.stage)?; - self.stage = Default::default(); - Ok(()) - } -} - -/// A persistence backend for [`Persist`]. -pub trait PersistBackend { - /// The error the backend returns when it fails to write. - type WriteError: core::fmt::Debug; - - /// The error the backend returns when it fails to load. - type LoadError: core::fmt::Debug; - - /// Appends a new changeset to the persistent backend. - /// - /// It is up to the backend what it does with this. It could store every changeset in a list or - /// it inserts the actual changes into a more structured database. All it needs to guarantee is - /// that [`load_into_keychain_tracker`] restores a keychain tracker to what it should be if all - /// changesets had been applied sequentially. - /// - /// [`load_into_keychain_tracker`]: Self::load_into_keychain_tracker - fn append_changeset( - &mut self, - changeset: &keychain::KeychainChangeSet, - ) -> Result<(), Self::WriteError>; - - /// Applies all the changesets the backend has received to `tracker`. - fn load_into_keychain_tracker( - &mut self, - tracker: &mut keychain::KeychainTracker, - ) -> Result<(), Self::LoadError>; -} - -impl PersistBackend for () { - type WriteError = (); - type LoadError = (); - - fn append_changeset( - &mut self, - _changeset: &keychain::KeychainChangeSet, - ) -> Result<(), Self::WriteError> { - Ok(()) - } - fn load_into_keychain_tracker( - &mut self, - _tracker: &mut keychain::KeychainTracker, - ) -> Result<(), Self::LoadError> { - Ok(()) - } -} diff --git a/crates/chain/src/keychain/tracker.rs b/crates/chain/src/keychain/tracker.rs deleted file mode 100644 index fff5ee2b..00000000 --- a/crates/chain/src/keychain/tracker.rs +++ /dev/null @@ -1,308 +0,0 @@ -use bitcoin::Transaction; -use miniscript::{Descriptor, DescriptorPublicKey}; - -use crate::{ - chain_graph::{self, ChainGraph}, - collections::*, - keychain::{KeychainChangeSet, KeychainScan, KeychainTxOutIndex}, - sparse_chain::{self, SparseChain}, - tx_graph::TxGraph, - BlockId, FullTxOut, TxHeight, -}; - -use super::{Balance, DerivationAdditions}; - -/// A convenient combination of a [`KeychainTxOutIndex`] and a [`ChainGraph`]. -/// -/// The [`KeychainTracker`] atomically updates its [`KeychainTxOutIndex`] whenever new chain data is -/// incorporated into its internal [`ChainGraph`]. -#[derive(Clone, Debug)] -pub struct KeychainTracker { - /// Index between script pubkeys to transaction outputs - pub txout_index: KeychainTxOutIndex, - chain_graph: ChainGraph

, -} - -impl KeychainTracker -where - P: sparse_chain::ChainPosition, - K: Ord + Clone + core::fmt::Debug, -{ - /// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses. - /// This is just shorthand for calling [`KeychainTxOutIndex::add_keychain`] on the internal - /// `txout_index`. - /// - /// Adding a keychain means you will be able to derive new script pubkeys under that keychain - /// and the tracker will discover transaction outputs with those script pubkeys. - pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor) { - self.txout_index.add_keychain(keychain, descriptor) - } - - /// Get the internal map of keychains to their descriptors. This is just shorthand for calling - /// [`KeychainTxOutIndex::keychains`] on the internal `txout_index`. - pub fn keychains(&mut self) -> &BTreeMap> { - self.txout_index.keychains() - } - - /// Get the checkpoint limit of the internal [`SparseChain`]. - /// - /// Refer to [`SparseChain::checkpoint_limit`] for more. - pub fn checkpoint_limit(&self) -> Option { - self.chain_graph.checkpoint_limit() - } - - /// Set the checkpoint limit of the internal [`SparseChain`]. - /// - /// Refer to [`SparseChain::set_checkpoint_limit`] for more. - pub fn set_checkpoint_limit(&mut self, limit: Option) { - self.chain_graph.set_checkpoint_limit(limit) - } - - /// Determines the resultant [`KeychainChangeSet`] if the given [`KeychainScan`] is applied. - /// - /// Internally, we call [`ChainGraph::determine_changeset`] and also determine the additions of - /// [`KeychainTxOutIndex`]. - pub fn determine_changeset( - &self, - scan: &KeychainScan, - ) -> Result, chain_graph::UpdateError

> { - // TODO: `KeychainTxOutIndex::determine_additions` - let mut derivation_indices = scan.last_active_indices.clone(); - derivation_indices.retain(|keychain, index| { - match self.txout_index.last_revealed_index(keychain) { - Some(existing) => *index > existing, - None => true, - } - }); - - Ok(KeychainChangeSet { - derivation_indices: DerivationAdditions(derivation_indices), - chain_graph: self.chain_graph.determine_changeset(&scan.update)?, - }) - } - - /// Directly applies a [`KeychainScan`] on [`KeychainTracker`]. - /// - /// This is equivalent to calling [`determine_changeset`] and [`apply_changeset`] in sequence. - /// - /// [`determine_changeset`]: Self::determine_changeset - /// [`apply_changeset`]: Self::apply_changeset - pub fn apply_update( - &mut self, - scan: KeychainScan, - ) -> Result, chain_graph::UpdateError

> { - let changeset = self.determine_changeset(&scan)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - /// Applies the changes in `changeset` to [`KeychainTracker`]. - /// - /// Internally, this calls [`KeychainTxOutIndex::apply_additions`] and - /// [`ChainGraph::apply_changeset`] in sequence. - pub fn apply_changeset(&mut self, changeset: KeychainChangeSet) { - let KeychainChangeSet { - derivation_indices, - chain_graph, - } = changeset; - self.txout_index.apply_additions(derivation_indices); - let _ = self.txout_index.scan(&chain_graph); - self.chain_graph.apply_changeset(chain_graph) - } - - /// Iterates through [`FullTxOut`]s that are considered to exist in our representation of the - /// blockchain/mempool. - /// - /// In other words, these are `txout`s of confirmed and in-mempool transactions, based on our - /// view of the blockchain/mempool. - pub fn full_txouts(&self) -> impl Iterator)> + '_ { - self.txout_index - .txouts() - .filter_map(move |(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?))) - } - - /// Iterates through [`FullTxOut`]s that are unspent outputs. - /// - /// Refer to [`full_txouts`] for more. - /// - /// [`full_txouts`]: Self::full_txouts - pub fn full_utxos(&self) -> impl Iterator)> + '_ { - self.full_txouts() - .filter(|(_, txout)| txout.spent_by.is_none()) - } - - /// Returns a reference to the internal [`ChainGraph`]. - pub fn chain_graph(&self) -> &ChainGraph

{ - &self.chain_graph - } - - /// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]). - pub fn graph(&self) -> &TxGraph { - self.chain_graph().graph() - } - - /// Returns a reference to the internal [`SparseChain`] (which is part of the [`ChainGraph`]). - pub fn chain(&self) -> &SparseChain

{ - self.chain_graph().chain() - } - - /// Determines the changes as a result of inserting `block_id` (a height and block hash) into the - /// tracker. - /// - /// The caller is responsible for guaranteeing that a block exists at that height. If a - /// checkpoint already exists at that height with a different hash; this will return an error. - /// Otherwise it will return `Ok(true)` if the checkpoint didn't already exist or `Ok(false)` - /// if it did. - /// - /// **Warning**: This function modifies the internal state of the tracker. You are responsible - /// for persisting these changes to disk if you need to restore them. - pub fn insert_checkpoint_preview( - &self, - block_id: BlockId, - ) -> Result, chain_graph::InsertCheckpointError> { - Ok(KeychainChangeSet { - chain_graph: self.chain_graph.insert_checkpoint_preview(block_id)?, - ..Default::default() - }) - } - - /// Directly insert a `block_id` into the tracker. - /// - /// This is equivalent of calling [`insert_checkpoint_preview`] and [`apply_changeset`] in - /// sequence. - /// - /// [`insert_checkpoint_preview`]: Self::insert_checkpoint_preview - /// [`apply_changeset`]: Self::apply_changeset - pub fn insert_checkpoint( - &mut self, - block_id: BlockId, - ) -> Result, chain_graph::InsertCheckpointError> { - let changeset = self.insert_checkpoint_preview(block_id)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - /// Determines the changes as a result of inserting a transaction into the inner [`ChainGraph`] - /// and optionally into the inner chain at `position`. - /// - /// **Warning**: This function modifies the internal state of the chain graph. You are - /// responsible for persisting these changes to disk if you need to restore them. - pub fn insert_tx_preview( - &self, - tx: Transaction, - pos: P, - ) -> Result, chain_graph::InsertTxError

> { - Ok(KeychainChangeSet { - chain_graph: self.chain_graph.insert_tx_preview(tx, pos)?, - ..Default::default() - }) - } - - /// Directly insert a transaction into the inner [`ChainGraph`] and optionally into the inner - /// chain at `position`. - /// - /// This is equivalent of calling [`insert_tx_preview`] and [`apply_changeset`] in sequence. - /// - /// [`insert_tx_preview`]: Self::insert_tx_preview - /// [`apply_changeset`]: Self::apply_changeset - pub fn insert_tx( - &mut self, - tx: Transaction, - pos: P, - ) -> Result, chain_graph::InsertTxError

> { - let changeset = self.insert_tx_preview(tx, pos)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - /// Returns the *balance* of the keychain, i.e., the value of unspent transaction outputs tracked. - /// - /// The caller provides a `should_trust` predicate which must decide whether the value of - /// unconfirmed outputs on this keychain are guaranteed to be realized or not. For example: - /// - /// - For an *internal* (change) keychain, `should_trust` should generally be `true` since even if - /// you lose an internal output due to eviction, you will always gain back the value from whatever output the - /// unconfirmed transaction was spending (since that output is presumably from your wallet). - /// - For an *external* keychain, you might want `should_trust` to return `false` since someone may cancel (by double spending) - /// a payment made to addresses on that keychain. - /// - /// When in doubt set `should_trust` to return false. This doesn't do anything other than change - /// where the unconfirmed output's value is accounted for in `Balance`. - pub fn balance(&self, mut should_trust: impl FnMut(&K) -> bool) -> Balance { - let mut immature = 0; - let mut trusted_pending = 0; - let mut untrusted_pending = 0; - let mut confirmed = 0; - let last_sync_height = self.chain().latest_checkpoint().map(|latest| latest.height); - for ((keychain, _), utxo) in self.full_utxos() { - let chain_position = &utxo.chain_position; - - match chain_position.height() { - TxHeight::Confirmed(_) => { - if utxo.is_on_coinbase { - if utxo.is_mature( - last_sync_height - .expect("since it's confirmed we must have a checkpoint"), - ) { - confirmed += utxo.txout.value; - } else { - immature += utxo.txout.value; - } - } else { - confirmed += utxo.txout.value; - } - } - TxHeight::Unconfirmed => { - if should_trust(keychain) { - trusted_pending += utxo.txout.value; - } else { - untrusted_pending += utxo.txout.value; - } - } - } - } - - Balance { - immature, - trusted_pending, - untrusted_pending, - confirmed, - } - } - - /// Returns the balance of all spendable confirmed unspent outputs of this tracker at a - /// particular height. - pub fn balance_at(&self, height: u32) -> u64 { - self.full_txouts() - .filter(|(_, full_txout)| full_txout.is_spendable_at(height)) - .map(|(_, full_txout)| full_txout.txout.value) - .sum() - } -} - -impl Default for KeychainTracker { - fn default() -> Self { - Self { - txout_index: Default::default(), - chain_graph: Default::default(), - } - } -} - -impl AsRef> for KeychainTracker { - fn as_ref(&self) -> &SparseChain

{ - self.chain_graph.chain() - } -} - -impl AsRef for KeychainTracker { - fn as_ref(&self) -> &TxGraph { - self.chain_graph.graph() - } -} - -impl AsRef> for KeychainTracker { - fn as_ref(&self) -> &ChainGraph

{ - &self.chain_graph - } -} diff --git a/crates/chain/src/keychain/txout_index.rs b/crates/chain/src/keychain/txout_index.rs index 397c4338..8c16afc0 100644 --- a/crates/chain/src/keychain/txout_index.rs +++ b/crates/chain/src/keychain/txout_index.rs @@ -166,7 +166,10 @@ impl KeychainTxOutIndex { /// /// This will panic if a different `descriptor` is introduced to the same `keychain`. pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor) { - let old_descriptor = &*self.keychains.entry(keychain).or_insert(descriptor.clone()); + let old_descriptor = &*self + .keychains + .entry(keychain) + .or_insert_with(|| descriptor.clone()); assert_eq!( &descriptor, old_descriptor, "keychain already contains a different descriptor" diff --git a/crates/chain/src/lib.rs b/crates/chain/src/lib.rs index cbadf170..dc5261e2 100644 --- a/crates/chain/src/lib.rs +++ b/crates/chain/src/lib.rs @@ -19,7 +19,6 @@ //! [Bitcoin Dev Kit]: https://bitcoindevkit.org/ #![no_std] pub use bitcoin; -pub mod chain_graph; mod spk_txout_index; pub use spk_txout_index::*; mod chain_data; @@ -27,7 +26,6 @@ pub use chain_data::*; pub mod indexed_tx_graph; pub mod keychain; pub mod local_chain; -pub mod sparse_chain; mod tx_data_traits; pub mod tx_graph; pub use tx_data_traits::*; diff --git a/crates/chain/src/sparse_chain.rs b/crates/chain/src/sparse_chain.rs deleted file mode 100644 index b9c1e24b..00000000 --- a/crates/chain/src/sparse_chain.rs +++ /dev/null @@ -1,1102 +0,0 @@ -//! Module for structures that maintain sparse (purposely incomplete) snapshots of blockchain data. -//! -//! [`SparseChain`] stores [`Txid`]s ordered by an index that implements [`ChainPosition`] (this -//! represents the transaction's position in the blockchain; by default, [`TxHeight`] is used). -//! [`SparseChain`] also contains "checkpoints" which relate block height to block hash. Changes to -//! a [`SparseChain`] is reported by returning [`ChangeSet`]s. -//! -//! # Updating [`SparseChain`] -//! -//! A sparsechain can be thought of as a consistent snapshot of history. A [`SparseChain`] can be -//! updated by applying an update [`SparseChain`] on top, but only if they "connect" via their -//! checkpoints and don't result in unexpected movements of transactions. -//! -//! ``` -//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*}; -//! # use bitcoin::BlockHash; -//! # let hash_a = new_hash::("a"); -//! # let hash_b = new_hash::("b"); -//! # let hash_c = new_hash::("c"); -//! # let hash_d = new_hash::("d"); -//! // create empty sparsechain -//! let mut chain = SparseChain::::default(); -//! -//! /* Updating an empty sparsechain will always succeed */ -//! -//! let update = SparseChain::from_checkpoints(vec![ -//! BlockId { -//! height: 1, -//! hash: hash_a, -//! }, -//! BlockId { -//! height: 2, -//! hash: hash_b, -//! }, -//! ]); -//! let _ = chain -//! .apply_update(update) -//! .expect("updating an empty sparsechain will always succeed"); -//! -//! /* To update a non-empty sparsechain, the update must connect */ -//! -//! let update = SparseChain::from_checkpoints(vec![ -//! BlockId { -//! height: 2, -//! hash: hash_b, -//! }, -//! BlockId { -//! height: 3, -//! hash: hash_c, -//! }, -//! ]); -//! let _ = chain -//! .apply_update(update) -//! .expect("we have connected at block height 2, so this must succeed"); -//! ``` -//! -//! ## Invalid updates -//! -//! As shown above, sparsechains can be "connected" by comparing their checkpoints. However, there -//! are situations where two sparsechains cannot connect in a way that guarantees consistency. -//! -//! ``` -//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*}; -//! # use bitcoin::BlockHash; -//! # let hash_a = new_hash::("a"); -//! # let hash_b = new_hash::("b"); -//! # let hash_c = new_hash::("c"); -//! # let hash_d = new_hash::("d"); -//! // our sparsechain has two checkpoints -//! let chain = SparseChain::::from_checkpoints(vec![ -//! BlockId { -//! height: 1, -//! hash: hash_a, -//! }, -//! BlockId { -//! height: 2, -//! hash: hash_b, -//! }, -//! ]); -//! -//! /* Example of an ambiguous update that does not fully connect */ -//! -//! let ambiguous_update = SparseChain::from_checkpoints(vec![ -//! // the update sort of "connects" at checkpoint 1, but... -//! BlockId { -//! height: 1, -//! hash: hash_a, -//! }, -//! // we cannot determine whether checkpoint 3 connects with checkpoint 2 -//! BlockId { -//! height: 3, -//! hash: hash_c, -//! }, -//! ]); -//! let _ = chain -//! .determine_changeset(&ambiguous_update) -//! .expect_err("cannot apply ambiguous update"); -//! -//! /* Example of an update that completely misses the point */ -//! -//! let disconnected_update = SparseChain::from_checkpoints(vec![ -//! // the last checkpoint in the chain is 2, so 3 and 4 do not connect -//! BlockId { -//! height: 3, -//! hash: hash_c, -//! }, -//! BlockId { -//! height: 4, -//! hash: hash_d, -//! }, -//! ]); -//! let _ = chain -//! .determine_changeset(&disconnected_update) -//! .expect_err("cannot apply a totally-disconnected update"); -//! ``` -//! -//! ## Handling reorgs -//! -//! Updates can be formed to evict data from the original sparsechain. This is useful for handling -//! blockchain reorgs. -//! -//! ``` -//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*}; -//! # use bitcoin::BlockHash; -//! # let hash_a = new_hash::("a"); -//! # let hash_b = new_hash::("b"); -//! # let hash_c = new_hash::("c"); -//! # let hash_d = new_hash::("d"); -//! // our chain has a single checkpoint at height 11. -//! let mut chain = SparseChain::::from_checkpoints(vec![BlockId { -//! height: 11, -//! hash: hash_a, -//! }]); -//! -//! // we detect a reorg at height 11, and we introduce a new checkpoint at height 12 -//! let update = SparseChain::from_checkpoints(vec![ -//! BlockId { -//! height: 11, -//! hash: hash_b, -//! }, -//! BlockId { -//! height: 12, -//! hash: hash_c, -//! }, -//! ]); -//! let _ = chain -//! .apply_update(update) -//! .expect("we can evict/replace checkpoint 11 since it is the only checkpoint"); -//! -//! // now our `chain` has two checkpoints (11:hash_b & 12:hash_c) -//! // we detect another reorg, this time at height 12. -//! let update = SparseChain::from_checkpoints(vec![ -//! // we connect at checkpoint 11 as this is our "point of agreement". -//! BlockId { -//! height: 11, -//! hash: hash_b, -//! }, -//! BlockId { -//! height: 12, -//! hash: hash_d, -//! }, -//! ]); -//! let _ = chain -//! .apply_update(update) -//! .expect("we have provided a valid point of agreement, so our reorg update will succeed"); -//! ``` -//! -//! ## Movement of transactions during update -//! -//! If the original sparsechain and update sparsechain contain the same transaction at different -//! [`ChainPosition`]s, the transaction is considered as "moved". There are various movements of a -//! transaction that are invalid and update will fail. -//! -//! Valid movements: -//! -//! * When the transaction moved from unconfirmed (in original) to confirmed (in update). In other -//! words, confirming transactions are allowed! -//! * If there has been a reorg at height x, an originally confirmed transaction at height x or -//! above, may move to another height (that is at x or above, including becoming unconfirmed). -//! -//! Invalid movements: -//! -//! * A confirmed transaction cannot move without a reorg. -//! * Even with a reorg, an originally confirmed transaction cannot be moved below the height of the -//! reorg. -//! -//! # Custom [`ChainPosition`] -//! -//! [`SparseChain`] maintains a list of txids ordered by [`ChainPosition`]. By default, [`TxHeight`] -//! is used; however, additional data can be incorporated into the implementation. -//! -//! For example, we can have "perfect ordering" of transactions if our positional index is a -//! combination of block height and transaction position in a block. -//! -//! ``` -//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*}; -//! # use bitcoin::{BlockHash, Txid}; -//! # let hash_a = new_hash::("a"); -//! # let txid_1 = new_hash::("1"); -//! # let txid_2 = new_hash::("2"); -//! # let txid_3 = new_hash::("3"); -//! #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -//! pub enum TxPosition { -//! Confirmed { -//! height: u32, // height of block -//! position: u32, // position of transaction in the block -//! }, -//! Unconfirmed, -//! } -//! -//! impl Default for TxPosition { -//! fn default() -> Self { -//! Self::Unconfirmed -//! } -//! } -//! -//! impl ChainPosition for TxPosition { -//! fn height(&self) -> TxHeight { -//! match self { -//! Self::Confirmed { height, .. } => TxHeight::Confirmed(*height), -//! Self::Unconfirmed => TxHeight::Unconfirmed, -//! } -//! } -//! -//! fn max_ord_of_height(height: TxHeight) -> Self { -//! match height { -//! TxHeight::Confirmed(height) => Self::Confirmed { -//! height, -//! position: u32::MAX, -//! }, -//! TxHeight::Unconfirmed => Self::Unconfirmed, -//! } -//! } -//! -//! fn min_ord_of_height(height: TxHeight) -> Self { -//! match height { -//! TxHeight::Confirmed(height) => Self::Confirmed { -//! height, -//! position: u32::MIN, -//! }, -//! TxHeight::Unconfirmed => Self::Unconfirmed, -//! } -//! } -//! } -//! -//! let mut chain = SparseChain::::default(); -//! let _ = chain -//! .insert_checkpoint(BlockId { -//! height: 10, -//! hash: hash_a, -//! }) -//! .unwrap(); -//! let _ = chain -//! .insert_tx( -//! txid_1, -//! TxPosition::Confirmed { -//! height: 9, -//! position: 4321, -//! }, -//! ) -//! .unwrap(); -//! let _ = chain -//! .insert_tx( -//! txid_2, -//! TxPosition::Confirmed { -//! height: 9, -//! position: 1234, -//! }, -//! ) -//! .unwrap(); -//! let _ = chain -//! .insert_tx( -//! txid_3, -//! TxPosition::Confirmed { -//! height: 10, -//! position: 321, -//! }, -//! ) -//! .unwrap(); -//! -//! // transactions are ordered correctly -//! assert_eq!( -//! chain.txids().collect::>(), -//! vec![ -//! &( -//! TxPosition::Confirmed { -//! height: 9, -//! position: 1234 -//! }, -//! txid_2 -//! ), -//! &( -//! TxPosition::Confirmed { -//! height: 9, -//! position: 4321 -//! }, -//! txid_1 -//! ), -//! &( -//! TxPosition::Confirmed { -//! height: 10, -//! position: 321 -//! }, -//! txid_3 -//! ), -//! ], -//! ); -//! ``` -use core::{ - fmt::Debug, - ops::{Bound, RangeBounds}, -}; - -use crate::{collections::*, tx_graph::TxGraph, BlockId, FullTxOut, TxHeight}; -use bitcoin::{hashes::Hash, BlockHash, OutPoint, Txid}; - -/// This is a non-monotone structure that tracks relevant [`Txid`]s that are ordered by chain -/// position `P`. -/// -/// We use [`BlockHash`]s alongside their chain height as "checkpoints" to enforce consistency. -/// -/// To "merge" two [`SparseChain`]s, the [`ChangeSet`] can be calculated by calling -/// [`determine_changeset`] and applying the [`ChangeSet`] via [`apply_changeset`]. For convenience, -/// [`apply_update`] does the above two steps in one call. -/// -/// Refer to [module-level documentation] for more. -/// -/// [`determine_changeset`]: Self::determine_changeset -/// [`apply_changeset`]: Self::apply_changeset -/// [`apply_update`]: Self::apply_update -/// [module-level documentation]: crate::sparse_chain -#[derive(Clone, Debug, PartialEq)] -pub struct SparseChain

{ - /// Block height to checkpoint data. - checkpoints: BTreeMap, - /// Txids ordered by the pos `P`. - ordered_txids: BTreeSet<(P, Txid)>, - /// Confirmation heights of txids. - txid_to_pos: HashMap, - /// Limit the number of checkpoints. - checkpoint_limit: Option, -} - -impl

AsRef> for SparseChain

{ - fn as_ref(&self) -> &SparseChain

{ - self - } -} - -impl

Default for SparseChain

{ - fn default() -> Self { - Self { - checkpoints: Default::default(), - ordered_txids: Default::default(), - txid_to_pos: Default::default(), - checkpoint_limit: Default::default(), - } - } -} - -/// Represents a failure when trying to insert a [`Txid`] into [`SparseChain`]. -#[derive(Clone, Debug, PartialEq)] -pub enum InsertTxError

{ - /// Occurs when the [`Txid`] is to be inserted at a height higher than the [`SparseChain`]'s tip. - TxTooHigh { - txid: Txid, - tx_height: u32, - tip_height: Option, - }, - /// Occurs when the [`Txid`] is already in the [`SparseChain`], and the insertion would result in - /// an unexpected move in [`ChainPosition`]. - TxMovedUnexpectedly { - txid: Txid, - original_pos: P, - update_pos: P, - }, -} - -impl core::fmt::Display for InsertTxError

{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - InsertTxError::TxTooHigh { - txid, - tx_height, - tip_height, - } => write!( - f, - "txid ({}) cannot be inserted at height ({}) greater than chain tip ({:?})", - txid, tx_height, tip_height - ), - InsertTxError::TxMovedUnexpectedly { - txid, - original_pos, - update_pos, - } => write!( - f, - "txid ({}) insertion resulted in an expected positional move from {:?} to {:?}", - txid, original_pos, update_pos - ), - } - } -} - -#[cfg(feature = "std")] -impl std::error::Error for InsertTxError

{} - -/// Represents a failure when trying to insert a checkpoint into [`SparseChain`]. -#[derive(Clone, Debug, PartialEq)] -pub enum InsertCheckpointError { - /// Occurs when a checkpoint of the same height already exists with a different [`BlockHash`]. - HashNotMatching { - height: u32, - original_hash: BlockHash, - update_hash: BlockHash, - }, -} - -impl core::fmt::Display for InsertCheckpointError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{:?}", self) - } -} - -#[cfg(feature = "std")] -impl std::error::Error for InsertCheckpointError {} - -/// Represents an update failure of [`SparseChain`]. -#[derive(Clone, Debug, PartialEq)] -pub enum UpdateError

{ - /// The update cannot be applied to the chain because the chain suffix it represents did not - /// connect to the existing chain. This error case contains the checkpoint height to include so - /// that the chains can connect. - NotConnected(u32), - /// The update contains inconsistent tx states (e.g., it changed the transaction's height). This - /// error is usually the inconsistency found. - TxInconsistent { - txid: Txid, - original_pos: P, - update_pos: P, - }, -} - -impl core::fmt::Display for UpdateError

{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::NotConnected(h) => - write!(f, "the checkpoints in the update could not be connected to the checkpoints in the chain, try include checkpoint of height {} to connect", - h), - Self::TxInconsistent { txid, original_pos, update_pos } => - write!(f, "tx ({}) had position ({:?}), but is ({:?}) in the update", - txid, original_pos, update_pos), - } - } -} - -#[cfg(feature = "std")] -impl std::error::Error for UpdateError

{} - -impl SparseChain

{ - /// Creates a new chain from a list of block hashes and heights. The caller must guarantee they - /// are in the same chain. - pub fn from_checkpoints(checkpoints: C) -> Self - where - C: IntoIterator, - { - Self { - checkpoints: checkpoints - .into_iter() - .map(|block_id| block_id.into()) - .collect(), - ..Default::default() - } - } - - /// Get the checkpoint for the last known tip. - pub fn latest_checkpoint(&self) -> Option { - self.checkpoints - .iter() - .last() - .map(|(&height, &hash)| BlockId { height, hash }) - } - - /// Get the checkpoint at the given height if it exists. - pub fn checkpoint_at(&self, height: u32) -> Option { - self.checkpoints - .get(&height) - .map(|&hash| BlockId { height, hash }) - } - - /// Return the [`ChainPosition`] of a `txid`. - /// - /// This returns [`None`] if the transaction does not exist. - pub fn tx_position(&self, txid: Txid) -> Option<&P> { - self.txid_to_pos.get(&txid) - } - - /// Return a [`BTreeMap`] of all checkpoints (block hashes by height). - pub fn checkpoints(&self) -> &BTreeMap { - &self.checkpoints - } - - /// Return an iterator over checkpoints in a height range, in ascending height order. - pub fn range_checkpoints( - &self, - range: impl RangeBounds, - ) -> impl DoubleEndedIterator + '_ { - self.checkpoints - .range(range) - .map(|(&height, &hash)| BlockId { height, hash }) - } - - /// Preview changes of updating [`Self`] with another chain that connects to it. - /// - /// If the `update` wishes to introduce confirmed transactions, it must contain a checkpoint - /// that is exactly the same height as one of `self`'s checkpoints. - /// - /// To invalidate from a given checkpoint, `update` must contain a checkpoint of the same height - /// but different hash. Invalidated checkpoints result in invalidated transactions becoming - /// "unconfirmed". - /// - /// An error will be returned if an update results in inconsistencies or if the update does - /// not correctly connect with `self`. - /// - /// Refer to [module-level documentation] for more. - /// - /// [module-level documentation]: crate::sparse_chain - pub fn determine_changeset(&self, update: &Self) -> Result, UpdateError

> { - let agreement_point = update - .checkpoints - .iter() - .rev() - .find(|&(height, hash)| self.checkpoints.get(height) == Some(hash)) - .map(|(&h, _)| h); - - let last_update_cp = update.checkpoints.iter().last().map(|(&h, _)| h); - - // the lower bound of the invalidation range - let invalid_lb = if last_update_cp.is_none() || last_update_cp == agreement_point { - // if the agreement point is the last update checkpoint, or there are no update checkpoints, - // no invalidation is required - u32::MAX - } else { - agreement_point.map(|h| h + 1).unwrap_or(0) - }; - - // the first checkpoint of the sparsechain to invalidate (if any) - let invalid_from = self.checkpoints.range(invalid_lb..).next().map(|(&h, _)| h); - - // the first checkpoint to invalidate (if any) should be represented in the update - if let Some(first_invalid) = invalid_from { - if !update.checkpoints.contains_key(&first_invalid) { - return Err(UpdateError::NotConnected(first_invalid)); - } - } - - for (&txid, update_pos) in &update.txid_to_pos { - // ensure all currently confirmed txs are still at the same height (unless they are - // within invalidation range, or to be confirmed) - if let Some(original_pos) = &self.txid_to_pos.get(&txid) { - if original_pos.height() < TxHeight::Confirmed(invalid_lb) - && original_pos != &update_pos - { - return Err(UpdateError::TxInconsistent { - txid, - original_pos: P::clone(original_pos), - update_pos: update_pos.clone(), - }); - } - } - } - - // create initial change-set based on checkpoints and txids that are to be "invalidated". - let mut changeset = invalid_from - .map(|from_height| self.invalidate_checkpoints_preview(from_height)) - .unwrap_or_default(); - - for (&height, &new_hash) in &update.checkpoints { - let original_hash = self.checkpoints.get(&height).cloned(); - - let update_hash = *changeset - .checkpoints - .entry(height) - .and_modify(|change| *change = Some(new_hash)) - .or_insert_with(|| Some(new_hash)); - - if original_hash == update_hash { - changeset.checkpoints.remove(&height); - } - } - - for (txid, new_pos) in &update.txid_to_pos { - let original_pos = self.txid_to_pos.get(txid).cloned(); - - let update_pos = changeset - .txids - .entry(*txid) - .and_modify(|change| *change = Some(new_pos.clone())) - .or_insert_with(|| Some(new_pos.clone())); - - if original_pos == *update_pos { - changeset.txids.remove(txid); - } - } - - Ok(changeset) - } - - /// Updates [`SparseChain`] with another chain that connects to it. - /// - /// This is equivilant to calling [`determine_changeset`] and [`apply_changeset`] in sequence. - /// - /// [`determine_changeset`]: Self::determine_changeset - /// [`apply_changeset`]: Self::apply_changeset - pub fn apply_update(&mut self, update: Self) -> Result, UpdateError

> { - let changeset = self.determine_changeset(&update)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - pub fn apply_changeset(&mut self, changeset: ChangeSet

) { - for (height, update_hash) in changeset.checkpoints { - let _original_hash = match update_hash { - Some(update_hash) => self.checkpoints.insert(height, update_hash), - None => self.checkpoints.remove(&height), - }; - } - - for (txid, update_pos) in changeset.txids { - let original_pos = self.txid_to_pos.remove(&txid); - - if let Some(pos) = original_pos { - self.ordered_txids.remove(&(pos, txid)); - } - - if let Some(pos) = update_pos { - self.txid_to_pos.insert(txid, pos.clone()); - self.ordered_txids.insert((pos.clone(), txid)); - } - } - - self.prune_checkpoints(); - } - - /// Derives a [`ChangeSet`] that assumes that there are no preceding changesets. - /// - /// The changeset returned will record additions of all [`Txid`]s and checkpoints included in - /// [`Self`]. - pub fn initial_changeset(&self) -> ChangeSet

{ - ChangeSet { - checkpoints: self - .checkpoints - .iter() - .map(|(height, hash)| (*height, Some(*hash))) - .collect(), - txids: self - .ordered_txids - .iter() - .map(|(pos, txid)| (*txid, Some(pos.clone()))) - .collect(), - } - } - - /// Determines the [`ChangeSet`] when checkpoints `from_height` (inclusive) and above are - /// invalidated. Displaced [`Txid`]s will be repositioned to [`TxHeight::Unconfirmed`]. - pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet

{ - ChangeSet::

{ - checkpoints: self - .checkpoints - .range(from_height..) - .map(|(height, _)| (*height, None)) - .collect(), - // invalidated transactions become unconfirmed - txids: self - .range_txids_by_height(TxHeight::Confirmed(from_height)..TxHeight::Unconfirmed) - .map(|(_, txid)| (*txid, Some(P::max_ord_of_height(TxHeight::Unconfirmed)))) - .collect(), - } - } - - /// Invalidate checkpoints `from_height` (inclusive) and above. - /// - /// This is equivalent to calling [`invalidate_checkpoints_preview`] and [`apply_changeset`] in - /// sequence. - /// - /// [`invalidate_checkpoints_preview`]: Self::invalidate_checkpoints_preview - /// [`apply_changeset`]: Self::apply_changeset - pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet

{ - let changeset = self.invalidate_checkpoints_preview(from_height); - self.apply_changeset(changeset.clone()); - changeset - } - - /// Determines the [`ChangeSet`] when all transactions of height [`TxHeight::Unconfirmed`] are - /// removed completely. - pub fn clear_mempool_preview(&self) -> ChangeSet

{ - let mempool_range = &( - P::min_ord_of_height(TxHeight::Unconfirmed), - Txid::all_zeros(), - )..; - - let txids = self - .ordered_txids - .range(mempool_range) - .map(|(_, txid)| (*txid, None)) - .collect(); - - ChangeSet::

{ - txids, - ..Default::default() - } - } - - /// Clears all transactions of height [`TxHeight::Unconfirmed`]. - /// - /// This is equivalent to calling [`clear_mempool_preview`] and [`apply_changeset`] in sequence. - /// - /// [`clear_mempool_preview`]: Self::clear_mempool_preview - /// [`apply_changeset`]: Self::apply_changeset - /// [`ChangeSet`]. - pub fn clear_mempool(&mut self) -> ChangeSet

{ - let changeset = self.clear_mempool_preview(); - self.apply_changeset(changeset.clone()); - changeset - } - - /// Determines the resultant [`ChangeSet`] if [`Txid`] was inserted at position `pos`. - /// - /// Changes to the [`Txid`]'s position are allowed (under the rules noted in - /// [module-level documentation]) and will be reflected in the [`ChangeSet`]. - /// - /// [module-level documentation]: crate::sparse_chain - pub fn insert_tx_preview(&self, txid: Txid, pos: P) -> Result, InsertTxError

> { - let mut update = Self::default(); - - if let Some(block_id) = self.latest_checkpoint() { - let _old_hash = update.checkpoints.insert(block_id.height, block_id.hash); - debug_assert!(_old_hash.is_none()); - } - - let tip_height = self.checkpoints.iter().last().map(|(h, _)| *h); - if let TxHeight::Confirmed(tx_height) = pos.height() { - if Some(tx_height) > tip_height { - return Err(InsertTxError::TxTooHigh { - txid, - tx_height, - tip_height, - }); - } - } - - let _old_pos = update.txid_to_pos.insert(txid, pos.clone()); - debug_assert!(_old_pos.is_none()); - - let _inserted = update.ordered_txids.insert((pos, txid)); - debug_assert!(_inserted, "must insert tx"); - - match self.determine_changeset(&update) { - Ok(changeset) => Ok(changeset), - Err(UpdateError::NotConnected(_)) => panic!("should always connect"), - Err(UpdateError::TxInconsistent { - txid: inconsistent_txid, - original_pos, - update_pos, - }) => Err(InsertTxError::TxMovedUnexpectedly { - txid: inconsistent_txid, - original_pos, - update_pos, - }), - } - } - - /// Inserts a given [`Txid`] at `pos`. - /// - /// This is equivilant to calling [`insert_tx_preview`] and [`apply_changeset`] in sequence. - /// - /// [`insert_tx_preview`]: Self::insert_tx_preview - /// [`apply_changeset`]: Self::apply_changeset - pub fn insert_tx(&mut self, txid: Txid, pos: P) -> Result, InsertTxError

> { - let changeset = self.insert_tx_preview(txid, pos)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - /// Determines the resultant [`ChangeSet`] if [`BlockId`] was inserted. - /// - /// If the change would result in a change in block hash of a certain height, insertion would - /// fail. - pub fn insert_checkpoint_preview( - &self, - block_id: BlockId, - ) -> Result, InsertCheckpointError> { - let mut update = Self::default(); - - if let Some(block_id) = self.latest_checkpoint() { - let _old_hash = update.checkpoints.insert(block_id.height, block_id.hash); - debug_assert!(_old_hash.is_none()); - } - - if let Some(original_hash) = update.checkpoints.insert(block_id.height, block_id.hash) { - if original_hash != block_id.hash { - return Err(InsertCheckpointError::HashNotMatching { - height: block_id.height, - original_hash, - update_hash: block_id.hash, - }); - } - } - - match self.determine_changeset(&update) { - Ok(changeset) => Ok(changeset), - Err(UpdateError::NotConnected(_)) => panic!("error should have caught above"), - Err(UpdateError::TxInconsistent { .. }) => panic!("should never add txs"), - } - } - - /// Insert a checkpoint ([`BlockId`]). - /// - /// This is equivalent to calling [`insert_checkpoint_preview`] and [`apply_changeset`] in - /// sequence. - /// - /// [`insert_checkpoint_preview`]: Self::insert_checkpoint_preview - /// [`apply_changeset`]: Self::apply_changeset - pub fn insert_checkpoint( - &mut self, - block_id: BlockId, - ) -> Result, InsertCheckpointError> { - let changeset = self.insert_checkpoint_preview(block_id)?; - self.apply_changeset(changeset.clone()); - Ok(changeset) - } - - /// Iterate over all [`Txid`]s ordered by their [`ChainPosition`]. - pub fn txids(&self) -> impl DoubleEndedIterator + ExactSizeIterator + '_ { - self.ordered_txids.iter() - } - - /// Iterate over a sub-range of positioned [`Txid`]s. - pub fn range_txids(&self, range: R) -> impl DoubleEndedIterator + '_ - where - R: RangeBounds<(P, Txid)>, - { - let map_bound = |b: Bound<&(P, Txid)>| match b { - Bound::Included((pos, txid)) => Bound::Included((pos.clone(), *txid)), - Bound::Excluded((pos, txid)) => Bound::Excluded((pos.clone(), *txid)), - Bound::Unbounded => Bound::Unbounded, - }; - - self.ordered_txids - .range((map_bound(range.start_bound()), map_bound(range.end_bound()))) - } - - /// Iterate over a sub-range of positioned [`Txid`]s, where the range is defined by - /// [`ChainPosition`] only. - pub fn range_txids_by_position( - &self, - range: R, - ) -> impl DoubleEndedIterator + '_ - where - R: RangeBounds

, - { - let map_bound = |b: Bound<&P>, inc: Txid, exc: Txid| match b { - Bound::Included(pos) => Bound::Included((pos.clone(), inc)), - Bound::Excluded(pos) => Bound::Excluded((pos.clone(), exc)), - Bound::Unbounded => Bound::Unbounded, - }; - - self.ordered_txids.range(( - map_bound(range.start_bound(), min_txid(), max_txid()), - map_bound(range.end_bound(), max_txid(), min_txid()), - )) - } - - /// Iterate over a sub-range of positioned [`Txid`]s, where the range is defined by [`TxHeight`] - /// only. - pub fn range_txids_by_height( - &self, - range: R, - ) -> impl DoubleEndedIterator + '_ - where - R: RangeBounds, - { - let ord_it = |height, is_max| match is_max { - true => P::max_ord_of_height(height), - false => P::min_ord_of_height(height), - }; - - let map_bound = |b: Bound<&TxHeight>, inc: (bool, Txid), exc: (bool, Txid)| match b { - Bound::Included(&h) => Bound::Included((ord_it(h, inc.0), inc.1)), - Bound::Excluded(&h) => Bound::Excluded((ord_it(h, exc.0), exc.1)), - Bound::Unbounded => Bound::Unbounded, - }; - - self.ordered_txids.range(( - map_bound(range.start_bound(), (false, min_txid()), (true, max_txid())), - map_bound(range.end_bound(), (true, max_txid()), (false, min_txid())), - )) - } - - /// Attempt to retrieve a [`FullTxOut`] of the given `outpoint`. - /// - /// This will return `Some` only if the output's transaction is in both `self` and `graph`. - pub fn full_txout(&self, graph: &TxGraph, outpoint: OutPoint) -> Option> { - let chain_pos = self.tx_position(outpoint.txid)?; - - let tx = graph.get_tx(outpoint.txid)?; - let is_on_coinbase = tx.is_coin_base(); - let txout = tx.output.get(outpoint.vout as usize)?.clone(); - - let spent_by = self - .spent_by(graph, outpoint) - .map(|(pos, txid)| (pos.clone(), txid)); - - Some(FullTxOut { - outpoint, - txout, - chain_position: chain_pos.clone(), - spent_by, - is_on_coinbase, - }) - } - - /// Returns the value set as the checkpoint limit. - /// - /// Refer to [`set_checkpoint_limit`]. - /// - /// [`set_checkpoint_limit`]: Self::set_checkpoint_limit - pub fn checkpoint_limit(&self) -> Option { - self.checkpoint_limit - } - - /// Set the checkpoint limit. - /// - /// The checkpoint limit restricts the number of checkpoints that can be stored in [`Self`]. - /// Oldest checkpoints are pruned first. - pub fn set_checkpoint_limit(&mut self, limit: Option) { - self.checkpoint_limit = limit; - self.prune_checkpoints(); - } - - /// Return [`Txid`]s that would be added to the sparse chain if this `changeset` was applied. - pub fn changeset_additions<'a>( - &'a self, - changeset: &'a ChangeSet

, - ) -> impl Iterator + 'a { - changeset - .txids - .iter() - .filter(move |(&txid, pos)| { - pos.is_some() /*it was not a deletion*/ && - self.tx_position(txid).is_none() /* we don't have the txid already */ - }) - .map(|(&txid, _)| txid) - } - - fn prune_checkpoints(&mut self) -> Option> { - let limit = self.checkpoint_limit?; - - // find the last height to be pruned - let last_height = *self.checkpoints.keys().rev().nth(limit)?; - // first height to be kept - let keep_height = last_height + 1; - - let mut split = self.checkpoints.split_off(&keep_height); - core::mem::swap(&mut self.checkpoints, &mut split); - - Some(split) - } - - /// Finds the transaction in the chain that spends `outpoint`. - /// - /// [`TxGraph`] is used to provide the spend relationships. - /// - /// Note that the transaction including `outpoint` does not need to be in the `graph` or the - /// `chain` for this to return `Some`. - pub fn spent_by(&self, graph: &TxGraph, outpoint: OutPoint) -> Option<(&P, Txid)> { - graph - .outspends(outpoint) - .iter() - .find_map(|&txid| Some((self.tx_position(txid)?, txid))) - } - - /// Returns whether the sparse chain contains any checkpoints or transactions. - pub fn is_empty(&self) -> bool { - self.checkpoints.is_empty() && self.txid_to_pos.is_empty() - } -} - -/// The return value of [`determine_changeset`]. -/// -/// [`determine_changeset`]: SparseChain::determine_changeset. -#[derive(Debug, Clone, PartialEq)] -#[cfg_attr( - feature = "serde", - derive(serde::Deserialize, serde::Serialize), - serde(crate = "serde_crate") -)] -#[must_use] -pub struct ChangeSet

{ - pub checkpoints: BTreeMap>, - pub txids: BTreeMap>, -} - -impl Default for ChangeSet { - fn default() -> Self { - Self { - checkpoints: Default::default(), - txids: Default::default(), - } - } -} - -impl

ChangeSet

{ - /// Appends the changes of `other` into self such that applying `self` afterward has the same - /// effect as sequentially applying the original `self` and `other`. - pub fn append(&mut self, mut other: Self) - where - P: ChainPosition, - { - self.checkpoints.append(&mut other.checkpoints); - self.txids.append(&mut other.txids); - } - - /// Whether this changeset contains no changes. - pub fn is_empty(&self) -> bool { - self.checkpoints.is_empty() && self.txids.is_empty() - } -} - -fn min_txid() -> Txid { - Txid::from_inner([0x00; 32]) -} - -fn max_txid() -> Txid { - Txid::from_inner([0xff; 32]) -} - -/// Represents a position in which transactions are ordered in [`SparseChain`]. -/// -/// [`ChainPosition`] implementations must be [`Ord`] by [`TxHeight`] first. -pub trait ChainPosition: - core::fmt::Debug + Clone + Eq + PartialOrd + Ord + core::hash::Hash + Send + Sync + 'static -{ - /// Get the transaction height of the position. - fn height(&self) -> TxHeight; - - /// Get the position's upper bound of a given height. - fn max_ord_of_height(height: TxHeight) -> Self; - - /// Get the position's lower bound of a given height. - fn min_ord_of_height(height: TxHeight) -> Self; - - /// Get the unconfirmed position. - fn unconfirmed() -> Self { - Self::max_ord_of_height(TxHeight::Unconfirmed) - } -} - -#[cfg(test)] -pub mod verify_chain_position { - use crate::{sparse_chain::ChainPosition, ConfirmationTime, TxHeight}; - use alloc::vec::Vec; - - pub fn verify_chain_position(head_count: u32, tail_count: u32) { - let values = (0..head_count) - .chain(u32::MAX - tail_count..u32::MAX) - .flat_map(|i| { - [ - P::min_ord_of_height(TxHeight::Confirmed(i)), - P::max_ord_of_height(TxHeight::Confirmed(i)), - ] - }) - .chain([ - P::min_ord_of_height(TxHeight::Unconfirmed), - P::max_ord_of_height(TxHeight::Unconfirmed), - ]) - .collect::>(); - - for i in 0..values.len() { - for j in 0..values.len() { - if i == j { - assert_eq!(values[i], values[j]); - } - if i < j { - assert!(values[i] <= values[j]); - } - if i > j { - assert!(values[i] >= values[j]); - } - } - } - } - - #[test] - fn verify_tx_height() { - verify_chain_position::(1000, 1000); - } - - #[test] - fn verify_confirmation_time() { - verify_chain_position::(1000, 1000); - } -} diff --git a/crates/chain/src/spk_txout_index.rs b/crates/chain/src/spk_txout_index.rs index 0eaec4bb..31fd7883 100644 --- a/crates/chain/src/spk_txout_index.rs +++ b/crates/chain/src/spk_txout_index.rs @@ -20,13 +20,13 @@ use bitcoin::{self, OutPoint, Script, Transaction, TxOut, Txid}; /// Note there is no harm in scanning transactions that disappear from the blockchain or were never /// in there in the first place. `SpkTxOutIndex` is intentionally *monotone* -- you cannot delete or /// modify txouts that have been indexed. To find out which txouts from the index are actually in the -/// chain or unspent, you must use other sources of information like a [`SparseChain`]. +/// chain or unspent, you must use other sources of information like a [`TxGraph`]. /// /// [`TxOut`]: bitcoin::TxOut /// [`insert_spk`]: Self::insert_spk /// [`Ord`]: core::cmp::Ord /// [`scan`]: Self::scan -/// [`SparseChain`]: crate::sparse_chain::SparseChain +/// [`TxGraph`]: crate::tx_graph::TxGraph #[derive(Clone, Debug)] pub struct SpkTxOutIndex { /// script pubkeys ordered by index diff --git a/crates/chain/src/tx_graph.rs b/crates/chain/src/tx_graph.rs index 335a1919..56a55568 100644 --- a/crates/chain/src/tx_graph.rs +++ b/crates/chain/src/tx_graph.rs @@ -56,8 +56,8 @@ //! ``` use crate::{ - collections::*, keychain::Balance, Anchor, Append, BlockId, ChainOracle, ForEachTxOut, - FullTxOut, ObservedAs, + collections::*, keychain::Balance, Anchor, Append, BlockId, ChainOracle, ChainPosition, + ForEachTxOut, FullTxOut, }; use alloc::vec::Vec; use bitcoin::{OutPoint, Script, Transaction, TxOut, Txid}; @@ -135,7 +135,7 @@ impl Default for TxNodeInternal { #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct CanonicalTx<'a, T, A> { /// How the transaction is observed as (confirmed or unconfirmed). - pub observed_as: ObservedAs<&'a A>, + pub observed_as: ChainPosition<&'a A>, /// The transaction node (as part of the graph). pub node: TxNode<'a, T, A>, } @@ -614,7 +614,7 @@ impl TxGraph { chain: &C, chain_tip: BlockId, txid: Txid, - ) -> Result>, C::Error> { + ) -> Result>, C::Error> { let (tx_node, anchors, last_seen) = match self.txs.get(&txid) { Some(v) => v, None => return Ok(None), @@ -622,7 +622,7 @@ impl TxGraph { for anchor in anchors { match chain.is_block_in_chain(anchor.anchor_block(), chain_tip)? { - Some(true) => return Ok(Some(ObservedAs::Confirmed(anchor))), + Some(true) => return Ok(Some(ChainPosition::Confirmed(anchor))), _ => continue, } } @@ -651,7 +651,7 @@ impl TxGraph { } } - Ok(Some(ObservedAs::Unconfirmed(*last_seen))) + Ok(Some(ChainPosition::Unconfirmed(*last_seen))) } /// Get the position of the transaction in `chain` with tip `chain_tip`. @@ -664,7 +664,7 @@ impl TxGraph { chain: &C, chain_tip: BlockId, txid: Txid, - ) -> Option> { + ) -> Option> { self.try_get_chain_position(chain, chain_tip, txid) .expect("error is infallible") } @@ -686,7 +686,7 @@ impl TxGraph { chain: &C, chain_tip: BlockId, outpoint: OutPoint, - ) -> Result, Txid)>, C::Error> { + ) -> Result, Txid)>, C::Error> { if self .try_get_chain_position(chain, chain_tip, outpoint.txid)? .is_none() @@ -714,7 +714,7 @@ impl TxGraph { chain: &C, static_block: BlockId, outpoint: OutPoint, - ) -> Option<(ObservedAs<&A>, Txid)> { + ) -> Option<(ChainPosition<&A>, Txid)> { self.try_get_chain_spend(chain, static_block, outpoint) .expect("error is infallible") } @@ -786,7 +786,7 @@ impl TxGraph { chain: &'a C, chain_tip: BlockId, outpoints: impl IntoIterator + 'a, - ) -> impl Iterator>), C::Error>> + 'a { + ) -> impl Iterator), C::Error>> + 'a { outpoints .into_iter() .map( @@ -837,7 +837,7 @@ impl TxGraph { chain: &'a C, chain_tip: BlockId, outpoints: impl IntoIterator + 'a, - ) -> impl Iterator>)> + 'a { + ) -> impl Iterator)> + 'a { self.try_filter_chain_txouts(chain, chain_tip, outpoints) .map(|r| r.expect("oracle is infallible")) } @@ -865,7 +865,7 @@ impl TxGraph { chain: &'a C, chain_tip: BlockId, outpoints: impl IntoIterator + 'a, - ) -> impl Iterator>), C::Error>> + 'a { + ) -> impl Iterator), C::Error>> + 'a { self.try_filter_chain_txouts(chain, chain_tip, outpoints) .filter(|r| match r { // keep unspents, drop spents @@ -886,7 +886,7 @@ impl TxGraph { chain: &'a C, chain_tip: BlockId, txouts: impl IntoIterator + 'a, - ) -> impl Iterator>)> + 'a { + ) -> impl Iterator)> + 'a { self.try_filter_chain_unspents(chain, chain_tip, txouts) .map(|r| r.expect("oracle is infallible")) } @@ -919,14 +919,14 @@ impl TxGraph { let (spk_i, txout) = res?; match &txout.chain_position { - ObservedAs::Confirmed(_) => { + ChainPosition::Confirmed(_) => { if txout.is_confirmed_and_spendable(chain_tip.height) { confirmed += txout.txout.value; } else if !txout.is_mature(chain_tip.height) { immature += txout.txout.value; } } - ObservedAs::Unconfirmed(_) => { + ChainPosition::Unconfirmed(_) => { if trust_predicate(&spk_i, &txout.txout.script_pubkey) { trusted_pending += txout.txout.value; } else { diff --git a/crates/chain/tests/test_chain_graph.rs b/crates/chain/tests/test_chain_graph.rs deleted file mode 100644 index b5cbf5b9..00000000 --- a/crates/chain/tests/test_chain_graph.rs +++ /dev/null @@ -1,655 +0,0 @@ -#[macro_use] -mod common; - -use bdk_chain::{ - chain_graph::*, - collections::HashSet, - sparse_chain, - tx_graph::{self, TxGraph}, - BlockId, TxHeight, -}; -use bitcoin::{OutPoint, PackedLockTime, Script, Sequence, Transaction, TxIn, TxOut, Witness}; - -#[test] -fn test_spent_by() { - let tx1 = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut::default()], - }; - - let op = OutPoint { - txid: tx1.txid(), - vout: 0, - }; - - let tx2 = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![TxIn { - previous_output: op, - ..Default::default() - }], - output: vec![], - }; - let tx3 = Transaction { - version: 0x01, - lock_time: PackedLockTime(42), - input: vec![TxIn { - previous_output: op, - ..Default::default() - }], - output: vec![], - }; - - let mut cg1 = ChainGraph::default(); - let _ = cg1 - .insert_tx(tx1, TxHeight::Unconfirmed) - .expect("should insert"); - let mut cg2 = cg1.clone(); - let _ = cg1 - .insert_tx(tx2.clone(), TxHeight::Unconfirmed) - .expect("should insert"); - let _ = cg2 - .insert_tx(tx3.clone(), TxHeight::Unconfirmed) - .expect("should insert"); - - assert_eq!(cg1.spent_by(op), Some((&TxHeight::Unconfirmed, tx2.txid()))); - assert_eq!(cg2.spent_by(op), Some((&TxHeight::Unconfirmed, tx3.txid()))); -} - -#[test] -fn update_evicts_conflicting_tx() { - let cp_a = BlockId { - height: 0, - hash: h!("A"), - }; - let cp_b = BlockId { - height: 1, - hash: h!("B"), - }; - let cp_b2 = BlockId { - height: 1, - hash: h!("B'"), - }; - - let tx_a = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut::default()], - }; - - let tx_b = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![TxIn { - previous_output: OutPoint::new(tx_a.txid(), 0), - script_sig: Script::new(), - sequence: Sequence::default(), - witness: Witness::new(), - }], - output: vec![TxOut::default()], - }; - - let tx_b2 = Transaction { - version: 0x02, - lock_time: PackedLockTime(0), - input: vec![TxIn { - previous_output: OutPoint::new(tx_a.txid(), 0), - script_sig: Script::new(), - sequence: Sequence::default(), - witness: Witness::new(), - }], - output: vec![TxOut::default(), TxOut::default()], - }; - { - let mut cg1 = { - let mut cg = ChainGraph::default(); - let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); - let _ = cg - .insert_tx(tx_a.clone(), TxHeight::Confirmed(0)) - .expect("should insert tx"); - let _ = cg - .insert_tx(tx_b.clone(), TxHeight::Unconfirmed) - .expect("should insert tx"); - cg - }; - let cg2 = { - let mut cg = ChainGraph::default(); - let _ = cg - .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed) - .expect("should insert tx"); - cg - }; - - let changeset = ChangeSet:: { - chain: sparse_chain::ChangeSet { - checkpoints: Default::default(), - txids: [ - (tx_b.txid(), None), - (tx_b2.txid(), Some(TxHeight::Unconfirmed)), - ] - .into(), - }, - graph: tx_graph::Additions { - tx: [tx_b2.clone()].into(), - txout: [].into(), - ..Default::default() - }, - }; - assert_eq!( - cg1.determine_changeset(&cg2), - Ok(changeset.clone()), - "tx should be evicted from mempool" - ); - - cg1.apply_changeset(changeset); - } - - { - let cg1 = { - let mut cg = ChainGraph::default(); - let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); - let _ = cg.insert_checkpoint(cp_b).expect("should insert cp"); - let _ = cg - .insert_tx(tx_a.clone(), TxHeight::Confirmed(0)) - .expect("should insert tx"); - let _ = cg - .insert_tx(tx_b.clone(), TxHeight::Confirmed(1)) - .expect("should insert tx"); - cg - }; - let cg2 = { - let mut cg = ChainGraph::default(); - let _ = cg - .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed) - .expect("should insert tx"); - cg - }; - assert_eq!( - cg1.determine_changeset(&cg2), - Err(UpdateError::UnresolvableConflict(UnresolvableConflict { - already_confirmed_tx: (TxHeight::Confirmed(1), tx_b.txid()), - update_tx: (TxHeight::Unconfirmed, tx_b2.txid()), - })), - "fail if tx is evicted from valid block" - ); - } - - { - // Given 2 blocks `{A, B}`, and an update that invalidates block B with - // `{A, B'}`, we expect txs that exist in `B` that conflicts with txs - // introduced in the update to be successfully evicted. - let mut cg1 = { - let mut cg = ChainGraph::default(); - let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); - let _ = cg.insert_checkpoint(cp_b).expect("should insert cp"); - let _ = cg - .insert_tx(tx_a, TxHeight::Confirmed(0)) - .expect("should insert tx"); - let _ = cg - .insert_tx(tx_b.clone(), TxHeight::Confirmed(1)) - .expect("should insert tx"); - cg - }; - let cg2 = { - let mut cg = ChainGraph::default(); - let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); - let _ = cg.insert_checkpoint(cp_b2).expect("should insert cp"); - let _ = cg - .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed) - .expect("should insert tx"); - cg - }; - - let changeset = ChangeSet:: { - chain: sparse_chain::ChangeSet { - checkpoints: [(1, Some(h!("B'")))].into(), - txids: [ - (tx_b.txid(), None), - (tx_b2.txid(), Some(TxHeight::Unconfirmed)), - ] - .into(), - }, - graph: tx_graph::Additions { - tx: [tx_b2].into(), - txout: [].into(), - ..Default::default() - }, - }; - assert_eq!( - cg1.determine_changeset(&cg2), - Ok(changeset.clone()), - "tx should be evicted from B", - ); - - cg1.apply_changeset(changeset); - } -} - -#[test] -fn chain_graph_new_missing() { - let tx_a = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut::default()], - }; - let tx_b = Transaction { - version: 0x02, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut::default()], - }; - - let update = chain!( - index: TxHeight, - checkpoints: [[0, h!("A")]], - txids: [ - (tx_a.txid(), TxHeight::Confirmed(0)), - (tx_b.txid(), TxHeight::Confirmed(0)) - ] - ); - let mut graph = TxGraph::default(); - - let mut expected_missing = HashSet::new(); - expected_missing.insert(tx_a.txid()); - expected_missing.insert(tx_b.txid()); - - assert_eq!( - ChainGraph::new(update.clone(), graph.clone()), - Err(NewError::Missing(expected_missing.clone())) - ); - - let _ = graph.insert_tx(tx_b.clone()); - expected_missing.remove(&tx_b.txid()); - - assert_eq!( - ChainGraph::new(update.clone(), graph.clone()), - Err(NewError::Missing(expected_missing.clone())) - ); - - let _ = graph.insert_txout( - OutPoint { - txid: tx_a.txid(), - vout: 0, - }, - tx_a.output[0].clone(), - ); - - assert_eq!( - ChainGraph::new(update.clone(), graph.clone()), - Err(NewError::Missing(expected_missing)), - "inserting an output instead of full tx doesn't satisfy constraint" - ); - - let _ = graph.insert_tx(tx_a.clone()); - - let new_graph = ChainGraph::new(update.clone(), graph.clone()).unwrap(); - let expected_graph = { - let mut cg = ChainGraph::::default(); - let _ = cg - .insert_checkpoint(update.latest_checkpoint().unwrap()) - .unwrap(); - let _ = cg.insert_tx(tx_a, TxHeight::Confirmed(0)).unwrap(); - let _ = cg.insert_tx(tx_b, TxHeight::Confirmed(0)).unwrap(); - cg - }; - - assert_eq!(new_graph, expected_graph); -} - -#[test] -fn chain_graph_new_conflicts() { - let tx_a = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut::default()], - }; - - let tx_b = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![TxIn { - previous_output: OutPoint::new(tx_a.txid(), 0), - script_sig: Script::new(), - sequence: Sequence::default(), - witness: Witness::new(), - }], - output: vec![TxOut::default()], - }; - - let tx_b2 = Transaction { - version: 0x02, - lock_time: PackedLockTime(0), - input: vec![TxIn { - previous_output: OutPoint::new(tx_a.txid(), 0), - script_sig: Script::new(), - sequence: Sequence::default(), - witness: Witness::new(), - }], - output: vec![TxOut::default(), TxOut::default()], - }; - - let chain = chain!( - index: TxHeight, - checkpoints: [[5, h!("A")]], - txids: [ - (tx_a.txid(), TxHeight::Confirmed(1)), - (tx_b.txid(), TxHeight::Confirmed(2)), - (tx_b2.txid(), TxHeight::Confirmed(3)) - ] - ); - - let graph = TxGraph::new([tx_a, tx_b, tx_b2]); - - assert!(matches!( - ChainGraph::new(chain, graph), - Err(NewError::Conflict { .. }) - )); -} - -#[test] -fn test_get_tx_in_chain() { - let mut cg = ChainGraph::default(); - let tx = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut::default()], - }; - - let _ = cg.insert_tx(tx.clone(), TxHeight::Unconfirmed).unwrap(); - assert_eq!( - cg.get_tx_in_chain(tx.txid()), - Some((&TxHeight::Unconfirmed, &tx,)) - ); -} - -#[test] -fn test_iterate_transactions() { - let mut cg = ChainGraph::default(); - let txs = (0..3) - .map(|i| Transaction { - version: i, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut::default()], - }) - .collect::>(); - let _ = cg - .insert_checkpoint(BlockId { - height: 1, - hash: h!("A"), - }) - .unwrap(); - let _ = cg - .insert_tx(txs[0].clone(), TxHeight::Confirmed(1)) - .unwrap(); - let _ = cg.insert_tx(txs[1].clone(), TxHeight::Unconfirmed).unwrap(); - let _ = cg - .insert_tx(txs[2].clone(), TxHeight::Confirmed(0)) - .unwrap(); - - assert_eq!( - cg.transactions_in_chain().collect::>(), - vec![ - (&TxHeight::Confirmed(0), &txs[2],), - (&TxHeight::Confirmed(1), &txs[0],), - (&TxHeight::Unconfirmed, &txs[1],), - ] - ); -} - -/// Start with: block1, block2a, tx1, tx2a -/// Update 1: block2a -> block2b , tx2a -> tx2b -/// Update 2: block2b -> block2c , tx2b -> tx2a -#[test] -fn test_apply_changes_reintroduce_tx() { - let block1 = BlockId { - height: 1, - hash: h!("block 1"), - }; - let block2a = BlockId { - height: 2, - hash: h!("block 2a"), - }; - let block2b = BlockId { - height: 2, - hash: h!("block 2b"), - }; - let block2c = BlockId { - height: 2, - hash: h!("block 2c"), - }; - - let tx1 = Transaction { - version: 0, - lock_time: PackedLockTime(1), - input: Vec::new(), - output: [TxOut { - value: 1, - script_pubkey: Script::new(), - }] - .into(), - }; - - let tx2a = Transaction { - version: 0, - lock_time: PackedLockTime('a'.into()), - input: [TxIn { - previous_output: OutPoint::new(tx1.txid(), 0), - ..Default::default() - }] - .into(), - output: [TxOut { - value: 0, - ..Default::default() - }] - .into(), - }; - - let tx2b = Transaction { - lock_time: PackedLockTime('b'.into()), - ..tx2a.clone() - }; - - // block1, block2a, tx1, tx2a - let mut cg = { - let mut cg = ChainGraph::default(); - let _ = cg.insert_checkpoint(block1).unwrap(); - let _ = cg.insert_checkpoint(block2a).unwrap(); - let _ = cg.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap(); - let _ = cg.insert_tx(tx2a.clone(), TxHeight::Confirmed(2)).unwrap(); - cg - }; - - // block2a -> block2b , tx2a -> tx2b - let update = { - let mut update = ChainGraph::default(); - let _ = update.insert_checkpoint(block1).unwrap(); - let _ = update.insert_checkpoint(block2b).unwrap(); - let _ = update - .insert_tx(tx2b.clone(), TxHeight::Confirmed(2)) - .unwrap(); - update - }; - assert_eq!( - cg.apply_update(update).expect("should update"), - ChangeSet { - chain: changeset! { - checkpoints: [(2, Some(block2b.hash))], - txids: [(tx2a.txid(), None), (tx2b.txid(), Some(TxHeight::Confirmed(2)))] - }, - graph: tx_graph::Additions { - tx: [tx2b.clone()].into(), - ..Default::default() - }, - } - ); - - // block2b -> block2c , tx2b -> tx2a - let update = { - let mut update = ChainGraph::default(); - let _ = update.insert_checkpoint(block1).unwrap(); - let _ = update.insert_checkpoint(block2c).unwrap(); - let _ = update - .insert_tx(tx2a.clone(), TxHeight::Confirmed(2)) - .unwrap(); - update - }; - assert_eq!( - cg.apply_update(update).expect("should update"), - ChangeSet { - chain: changeset! { - checkpoints: [(2, Some(block2c.hash))], - txids: [(tx2b.txid(), None), (tx2a.txid(), Some(TxHeight::Confirmed(2)))] - }, - ..Default::default() - } - ); -} - -#[test] -fn test_evict_descendants() { - let block_1 = BlockId { - height: 1, - hash: h!("block 1"), - }; - - let block_2a = BlockId { - height: 2, - hash: h!("block 2 a"), - }; - - let block_2b = BlockId { - height: 2, - hash: h!("block 2 b"), - }; - - let tx_1 = Transaction { - input: vec![TxIn { - previous_output: OutPoint::new(h!("fake tx"), 0), - ..Default::default() - }], - output: vec![TxOut { - value: 10_000, - script_pubkey: Script::new(), - }], - ..common::new_tx(1) - }; - let tx_2 = Transaction { - input: vec![TxIn { - previous_output: OutPoint::new(tx_1.txid(), 0), - ..Default::default() - }], - output: vec![ - TxOut { - value: 20_000, - script_pubkey: Script::new(), - }, - TxOut { - value: 30_000, - script_pubkey: Script::new(), - }, - ], - ..common::new_tx(2) - }; - let tx_3 = Transaction { - input: vec![TxIn { - previous_output: OutPoint::new(tx_2.txid(), 0), - ..Default::default() - }], - output: vec![TxOut { - value: 40_000, - script_pubkey: Script::new(), - }], - ..common::new_tx(3) - }; - let tx_4 = Transaction { - input: vec![TxIn { - previous_output: OutPoint::new(tx_2.txid(), 1), - ..Default::default() - }], - output: vec![TxOut { - value: 40_000, - script_pubkey: Script::new(), - }], - ..common::new_tx(4) - }; - let tx_5 = Transaction { - input: vec![TxIn { - previous_output: OutPoint::new(tx_4.txid(), 0), - ..Default::default() - }], - output: vec![TxOut { - value: 40_000, - script_pubkey: Script::new(), - }], - ..common::new_tx(5) - }; - - let tx_conflict = Transaction { - input: vec![TxIn { - previous_output: OutPoint::new(tx_1.txid(), 0), - ..Default::default() - }], - output: vec![TxOut { - value: 12345, - script_pubkey: Script::new(), - }], - ..common::new_tx(6) - }; - - // 1 is spent by 2, 2 is spent by 3 and 4, 4 is spent by 5 - let _txid_1 = tx_1.txid(); - let txid_2 = tx_2.txid(); - let txid_3 = tx_3.txid(); - let txid_4 = tx_4.txid(); - let txid_5 = tx_5.txid(); - - // this tx conflicts with 2 - let txid_conflict = tx_conflict.txid(); - - let cg = { - let mut cg = ChainGraph::::default(); - let _ = cg.insert_checkpoint(block_1); - let _ = cg.insert_checkpoint(block_2a); - let _ = cg.insert_tx(tx_1, TxHeight::Confirmed(1)); - let _ = cg.insert_tx(tx_2, TxHeight::Confirmed(2)); - let _ = cg.insert_tx(tx_3, TxHeight::Confirmed(2)); - let _ = cg.insert_tx(tx_4, TxHeight::Confirmed(2)); - let _ = cg.insert_tx(tx_5, TxHeight::Confirmed(2)); - cg - }; - - let update = { - let mut cg = ChainGraph::::default(); - let _ = cg.insert_checkpoint(block_1); - let _ = cg.insert_checkpoint(block_2b); - let _ = cg.insert_tx(tx_conflict.clone(), TxHeight::Confirmed(2)); - cg - }; - - assert_eq!( - cg.determine_changeset(&update), - Ok(ChangeSet { - chain: changeset! { - checkpoints: [(2, Some(block_2b.hash))], - txids: [(txid_2, None), (txid_3, None), (txid_4, None), (txid_5, None), (txid_conflict, Some(TxHeight::Confirmed(2)))] - }, - graph: tx_graph::Additions { - tx: [tx_conflict.clone()].into(), - ..Default::default() - } - }) - ); - - let err = cg - .insert_tx_preview(tx_conflict, TxHeight::Unconfirmed) - .expect_err("must fail due to conflicts"); - assert!(matches!(err, InsertTxError::UnresolvableConflict(_))); -} diff --git a/crates/chain/tests/test_indexed_tx_graph.rs b/crates/chain/tests/test_indexed_tx_graph.rs index f231f768..dde66ddc 100644 --- a/crates/chain/tests/test_indexed_tx_graph.rs +++ b/crates/chain/tests/test_indexed_tx_graph.rs @@ -8,7 +8,7 @@ use bdk_chain::{ keychain::{Balance, DerivationAdditions, KeychainTxOutIndex}, local_chain::LocalChain, tx_graph::Additions, - BlockId, ConfirmationHeightAnchor, ObservedAs, + BlockId, ChainPosition, ConfirmationHeightAnchor, }; use bitcoin::{secp256k1::Secp256k1, BlockHash, OutPoint, Script, Transaction, TxIn, TxOut}; use miniscript::Descriptor; @@ -266,7 +266,7 @@ fn test_list_owned_txouts() { let confirmed_txouts_txid = txouts .iter() .filter_map(|(_, full_txout)| { - if matches!(full_txout.chain_position, ObservedAs::Confirmed(_)) { + if matches!(full_txout.chain_position, ChainPosition::Confirmed(_)) { Some(full_txout.outpoint.txid) } else { None @@ -277,7 +277,7 @@ fn test_list_owned_txouts() { let unconfirmed_txouts_txid = txouts .iter() .filter_map(|(_, full_txout)| { - if matches!(full_txout.chain_position, ObservedAs::Unconfirmed(_)) { + if matches!(full_txout.chain_position, ChainPosition::Unconfirmed(_)) { Some(full_txout.outpoint.txid) } else { None @@ -288,7 +288,7 @@ fn test_list_owned_txouts() { let confirmed_utxos_txid = utxos .iter() .filter_map(|(_, full_txout)| { - if matches!(full_txout.chain_position, ObservedAs::Confirmed(_)) { + if matches!(full_txout.chain_position, ChainPosition::Confirmed(_)) { Some(full_txout.outpoint.txid) } else { None @@ -299,7 +299,7 @@ fn test_list_owned_txouts() { let unconfirmed_utxos_txid = utxos .iter() .filter_map(|(_, full_txout)| { - if matches!(full_txout.chain_position, ObservedAs::Unconfirmed(_)) { + if matches!(full_txout.chain_position, ChainPosition::Unconfirmed(_)) { Some(full_txout.outpoint.txid) } else { None diff --git a/crates/chain/tests/test_keychain_tracker.rs b/crates/chain/tests/test_keychain_tracker.rs deleted file mode 100644 index fe725ea4..00000000 --- a/crates/chain/tests/test_keychain_tracker.rs +++ /dev/null @@ -1,240 +0,0 @@ -#![cfg(feature = "miniscript")] -#[macro_use] -mod common; - -use bdk_chain::{ - keychain::{Balance, KeychainTracker}, - miniscript::{ - bitcoin::{secp256k1::Secp256k1, OutPoint, PackedLockTime, Transaction, TxOut}, - Descriptor, - }, - BlockId, ConfirmationTime, TxHeight, -}; -use bitcoin::TxIn; - -#[test] -fn test_insert_tx() { - let mut tracker = KeychainTracker::default(); - let secp = Secp256k1::new(); - let (descriptor, _) = Descriptor::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap(); - tracker.add_keychain((), descriptor.clone()); - let txout = TxOut { - value: 100_000, - script_pubkey: descriptor.at_derivation_index(5).script_pubkey(), - }; - - let tx = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![txout], - }; - - let _ = tracker.txout_index.reveal_to_target(&(), 5); - - let changeset = tracker - .insert_tx_preview(tx.clone(), ConfirmationTime::Unconfirmed { last_seen: 0 }) - .unwrap(); - tracker.apply_changeset(changeset); - assert_eq!( - tracker - .chain_graph() - .transactions_in_chain() - .collect::>(), - vec![(&ConfirmationTime::Unconfirmed { last_seen: 0 }, &tx,)] - ); - - assert_eq!( - tracker - .txout_index - .txouts_of_keychain(&()) - .collect::>(), - vec![( - 5, - OutPoint { - txid: tx.txid(), - vout: 0 - } - )] - ); -} - -#[test] -fn test_balance() { - use core::str::FromStr; - #[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd)] - enum Keychain { - One, - Two, - } - let mut tracker = KeychainTracker::default(); - let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap(); - let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap(); - tracker.add_keychain(Keychain::One, one); - tracker.add_keychain(Keychain::Two, two); - - let tx1 = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut { - value: 13_000, - script_pubkey: tracker - .txout_index - .reveal_next_spk(&Keychain::One) - .0 - .1 - .clone(), - }], - }; - - let tx2 = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![], - output: vec![TxOut { - value: 7_000, - script_pubkey: tracker - .txout_index - .reveal_next_spk(&Keychain::Two) - .0 - .1 - .clone(), - }], - }; - - let tx_coinbase = Transaction { - version: 0x01, - lock_time: PackedLockTime(0), - input: vec![TxIn::default()], - output: vec![TxOut { - value: 11_000, - script_pubkey: tracker - .txout_index - .reveal_next_spk(&Keychain::Two) - .0 - .1 - .clone(), - }], - }; - - assert!(tx_coinbase.is_coin_base()); - - let _ = tracker - .insert_checkpoint(BlockId { - height: 5, - hash: h!("1"), - }) - .unwrap(); - - let should_trust = |keychain: &Keychain| match *keychain { - Keychain::One => false, - Keychain::Two => true, - }; - - assert_eq!(tracker.balance(should_trust), Balance::default()); - - let _ = tracker - .insert_tx(tx1.clone(), TxHeight::Unconfirmed) - .unwrap(); - - assert_eq!( - tracker.balance(should_trust), - Balance { - untrusted_pending: 13_000, - ..Default::default() - } - ); - - let _ = tracker - .insert_tx(tx2.clone(), TxHeight::Unconfirmed) - .unwrap(); - - assert_eq!( - tracker.balance(should_trust), - Balance { - trusted_pending: 7_000, - untrusted_pending: 13_000, - ..Default::default() - } - ); - - let _ = tracker - .insert_tx(tx_coinbase, TxHeight::Confirmed(0)) - .unwrap(); - - assert_eq!( - tracker.balance(should_trust), - Balance { - trusted_pending: 7_000, - untrusted_pending: 13_000, - immature: 11_000, - ..Default::default() - } - ); - - let _ = tracker.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap(); - - assert_eq!( - tracker.balance(should_trust), - Balance { - trusted_pending: 7_000, - untrusted_pending: 0, - immature: 11_000, - confirmed: 13_000, - } - ); - - let _ = tracker.insert_tx(tx2, TxHeight::Confirmed(2)).unwrap(); - - assert_eq!( - tracker.balance(should_trust), - Balance { - trusted_pending: 0, - untrusted_pending: 0, - immature: 11_000, - confirmed: 20_000, - } - ); - - let _ = tracker - .insert_checkpoint(BlockId { - height: 98, - hash: h!("98"), - }) - .unwrap(); - - assert_eq!( - tracker.balance(should_trust), - Balance { - trusted_pending: 0, - untrusted_pending: 0, - immature: 11_000, - confirmed: 20_000, - } - ); - - let _ = tracker - .insert_checkpoint(BlockId { - height: 99, - hash: h!("99"), - }) - .unwrap(); - - assert_eq!( - tracker.balance(should_trust), - Balance { - trusted_pending: 0, - untrusted_pending: 0, - immature: 0, - confirmed: 31_000, - } - ); - - assert_eq!(tracker.balance_at(0), 0); - assert_eq!(tracker.balance_at(1), 13_000); - assert_eq!(tracker.balance_at(2), 20_000); - assert_eq!(tracker.balance_at(98), 20_000); - assert_eq!(tracker.balance_at(99), 31_000); - assert_eq!(tracker.balance_at(100), 31_000); -} diff --git a/crates/chain/tests/test_sparse_chain.rs b/crates/chain/tests/test_sparse_chain.rs deleted file mode 100644 index ba8b23b8..00000000 --- a/crates/chain/tests/test_sparse_chain.rs +++ /dev/null @@ -1,773 +0,0 @@ -#[macro_use] -mod common; - -use bdk_chain::{collections::BTreeSet, sparse_chain::*, BlockId, TxHeight}; -use bitcoin::{hashes::Hash, Txid}; -use core::ops::Bound; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)] -pub struct TestIndex(TxHeight, u32); - -impl ChainPosition for TestIndex { - fn height(&self) -> TxHeight { - self.0 - } - - fn max_ord_of_height(height: TxHeight) -> Self { - Self(height, u32::MAX) - } - - fn min_ord_of_height(height: TxHeight) -> Self { - Self(height, u32::MIN) - } -} - -impl TestIndex { - pub fn new(height: H, ext: u32) -> Self - where - H: Into, - { - Self(height.into(), ext) - } -} - -#[test] -fn add_first_checkpoint() { - let chain = SparseChain::default(); - assert_eq!( - chain.determine_changeset(&chain!([0, h!("A")])), - Ok(changeset! { - checkpoints: [(0, Some(h!("A")))], - txids: [] - },), - "add first tip" - ); -} - -#[test] -fn add_second_tip() { - let chain = chain!([0, h!("A")]); - assert_eq!( - chain.determine_changeset(&chain!([0, h!("A")], [1, h!("B")])), - Ok(changeset! { - checkpoints: [(1, Some(h!("B")))], - txids: [] - },), - "extend tip by one" - ); -} - -#[test] -fn two_disjoint_chains_cannot_merge() { - let chain1 = chain!([0, h!("A")]); - let chain2 = chain!([1, h!("B")]); - assert_eq!( - chain1.determine_changeset(&chain2), - Err(UpdateError::NotConnected(0)) - ); -} - -#[test] -fn duplicate_chains_should_merge() { - let chain1 = chain!([0, h!("A")]); - let chain2 = chain!([0, h!("A")]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(ChangeSet::default()) - ); -} - -#[test] -fn duplicate_chains_with_txs_should_merge() { - let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(ChangeSet::default()) - ); -} - -#[test] -fn duplicate_chains_with_different_txs_should_merge() { - let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx1"), TxHeight::Confirmed(0))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [], - txids: [(h!("tx1"), Some(TxHeight::Confirmed(0)))] - }) - ); -} - -#[test] -fn invalidate_first_and_only_checkpoint_without_tx_changes() { - let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - let chain2 = chain!(checkpoints: [[0,h!("A'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(0, Some(h!("A'")))], - txids: [] - },) - ); -} - -#[test] -fn invalidate_first_and_only_checkpoint_with_tx_move_forward() { - let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - let chain2 = chain!(checkpoints: [[0,h!("A'")],[1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(0, Some(h!("A'"))), (1, Some(h!("B")))], - txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))] - },) - ); -} - -#[test] -fn invalidate_first_and_only_checkpoint_with_tx_move_backward() { - let chain1 = chain!(checkpoints: [[1,h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); - let chain2 = chain!(checkpoints: [[0,h!("A")],[1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(0, Some(h!("A"))), (1, Some(h!("B'")))], - txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))] - },) - ); -} - -#[test] -fn invalidate_a_checkpoint_and_try_and_move_tx_when_it_wasnt_within_invalidation() { - let chain1 = chain!(checkpoints: [[0, h!("A")], [1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - let chain2 = chain!(checkpoints: [[0, h!("A")], [1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Err(UpdateError::TxInconsistent { - txid: h!("tx0"), - original_pos: TxHeight::Confirmed(0), - update_pos: TxHeight::Confirmed(1), - }) - ); -} - -/// This test doesn't make much sense. We're invalidating a block at height 1 and moving it to -/// height 0. It should be impossible for it to be at height 1 at any point if it was at height 0 -/// all along. -#[test] -fn move_invalidated_tx_into_earlier_checkpoint() { - let chain1 = chain!(checkpoints: [[0, h!("A")], [1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); - let chain2 = chain!(checkpoints: [[0, h!("A")], [1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(1, Some(h!("B'")))], - txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))] - },) - ); -} - -#[test] -fn invalidate_first_and_only_checkpoint_with_tx_move_to_mempool() { - let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - let chain2 = chain!(checkpoints: [[0,h!("A'")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(0, Some(h!("A'")))], - txids: [(h!("tx0"), Some(TxHeight::Unconfirmed))] - },) - ); -} - -#[test] -fn confirm_tx_without_extending_chain() { - let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); - let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [], - txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))] - },) - ); -} - -#[test] -fn confirm_tx_backwards_while_extending_chain() { - let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); - let chain2 = chain!(checkpoints: [[0,h!("A")],[1,h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(1, Some(h!("B")))], - txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))] - },) - ); -} - -#[test] -fn confirm_tx_in_new_block() { - let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); - let chain2 = chain! { - checkpoints: [[0,h!("A")], [1,h!("B")]], - txids: [(h!("tx0"), TxHeight::Confirmed(1))] - }; - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(1, Some(h!("B")))], - txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))] - },) - ); -} - -#[test] -fn merging_mempool_of_empty_chains_doesnt_fail() { - let chain1 = chain!(checkpoints: [], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); - let chain2 = chain!(checkpoints: [], txids: [(h!("tx1"), TxHeight::Unconfirmed)]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [], - txids: [(h!("tx1"), Some(TxHeight::Unconfirmed))] - },) - ); -} - -#[test] -fn cannot_insert_confirmed_tx_without_checkpoints() { - let chain = SparseChain::default(); - assert_eq!( - chain.insert_tx_preview(h!("A"), TxHeight::Confirmed(0)), - Err(InsertTxError::TxTooHigh { - txid: h!("A"), - tx_height: 0, - tip_height: None - }) - ); -} - -#[test] -fn empty_chain_can_add_unconfirmed_transactions() { - let chain1 = chain!(checkpoints: [[0, h!("A")]], txids: []); - let chain2 = chain!(checkpoints: [], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [], - txids: [ (h!("tx0"), Some(TxHeight::Unconfirmed)) ] - },) - ); -} - -#[test] -fn can_update_with_shorter_chain() { - let chain1 = chain!(checkpoints: [[1, h!("B")],[2, h!("C")]], txids: []); - let chain2 = chain!(checkpoints: [[1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [], - txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))] - },) - ) -} - -#[test] -fn can_introduce_older_checkpoints() { - let chain1 = chain!(checkpoints: [[2, h!("C")], [3, h!("D")]], txids: []); - let chain2 = chain!(checkpoints: [[1, h!("B")], [2, h!("C")]], txids: []); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(1, Some(h!("B")))], - txids: [] - },) - ); -} - -#[test] -fn fix_blockhash_before_agreement_point() { - let chain1 = chain!([0, h!("im-wrong")], [1, h!("we-agree")]); - let chain2 = chain!([0, h!("fix")], [1, h!("we-agree")]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(0, Some(h!("fix")))], - txids: [] - },) - ) -} - -// TODO: Use macro -#[test] -fn cannot_change_ext_index_of_confirmed_tx() { - let chain1 = chain!( - index: TestIndex, - checkpoints: [[1, h!("A")]], - txids: [(h!("tx0"), TestIndex(TxHeight::Confirmed(1), 10))] - ); - let chain2 = chain!( - index: TestIndex, - checkpoints: [[1, h!("A")]], - txids: [(h!("tx0"), TestIndex(TxHeight::Confirmed(1), 20))] - ); - - assert_eq!( - chain1.determine_changeset(&chain2), - Err(UpdateError::TxInconsistent { - txid: h!("tx0"), - original_pos: TestIndex(TxHeight::Confirmed(1), 10), - update_pos: TestIndex(TxHeight::Confirmed(1), 20), - }), - ) -} - -#[test] -fn can_change_index_of_unconfirmed_tx() { - let chain1 = chain!( - index: TestIndex, - checkpoints: [[1, h!("A")]], - txids: [(h!("tx1"), TestIndex(TxHeight::Unconfirmed, 10))] - ); - let chain2 = chain!( - index: TestIndex, - checkpoints: [[1, h!("A")]], - txids: [(h!("tx1"), TestIndex(TxHeight::Unconfirmed, 20))] - ); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(ChangeSet { - checkpoints: [].into(), - txids: [(h!("tx1"), Some(TestIndex(TxHeight::Unconfirmed, 20)),)].into() - },), - ) -} - -/// B and C are in both chain and update -/// ``` -/// | 0 | 1 | 2 | 3 | 4 -/// chain | B C -/// update | A B C D -/// ``` -/// This should succeed with the point of agreement being C and A should be added in addition. -#[test] -fn two_points_of_agreement() { - let chain1 = chain!([1, h!("B")], [2, h!("C")]); - let chain2 = chain!([0, h!("A")], [1, h!("B")], [2, h!("C")], [3, h!("D")]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [(0, Some(h!("A"))), (3, Some(h!("D")))] - },), - ); -} - -/// Update and chain does not connect: -/// ``` -/// | 0 | 1 | 2 | 3 | 4 -/// chain | B C -/// update | A B D -/// ``` -/// This should fail as we cannot figure out whether C & D are on the same chain -#[test] -fn update_and_chain_does_not_connect() { - let chain1 = chain!([1, h!("B")], [2, h!("C")]); - let chain2 = chain!([0, h!("A")], [1, h!("B")], [3, h!("D")]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Err(UpdateError::NotConnected(2)), - ); -} - -/// Transient invalidation: -/// ``` -/// | 0 | 1 | 2 | 3 | 4 | 5 -/// chain | A B C E -/// update | A B' C' D -/// ``` -/// This should succeed and invalidate B,C and E with point of agreement being A. -/// It should also invalidate transactions at height 1. -#[test] -fn transitive_invalidation_applies_to_checkpoints_higher_than_invalidation() { - let chain1 = chain! { - checkpoints: [[0, h!("A")], [2, h!("B")], [3, h!("C")], [5, h!("E")]], - txids: [ - (h!("a"), TxHeight::Confirmed(0)), - (h!("b1"), TxHeight::Confirmed(1)), - (h!("b2"), TxHeight::Confirmed(2)), - (h!("d"), TxHeight::Confirmed(3)), - (h!("e"), TxHeight::Confirmed(5)) - ] - }; - let chain2 = chain! { - checkpoints: [[0, h!("A")], [2, h!("B'")], [3, h!("C'")], [4, h!("D")]], - txids: [(h!("b1"), TxHeight::Confirmed(4)), (h!("b2"), TxHeight::Confirmed(3))] - }; - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [ - (2, Some(h!("B'"))), - (3, Some(h!("C'"))), - (4, Some(h!("D"))), - (5, None) - ], - txids: [ - (h!("b1"), Some(TxHeight::Confirmed(4))), - (h!("b2"), Some(TxHeight::Confirmed(3))), - (h!("d"), Some(TxHeight::Unconfirmed)), - (h!("e"), Some(TxHeight::Unconfirmed)) - ] - },) - ); -} - -/// Transient invalidation: -/// ``` -/// | 0 | 1 | 2 | 3 | 4 -/// chain | B C E -/// update | B' C' D -/// ``` -/// -/// This should succeed and invalidate B, C and E with no point of agreement -#[test] -fn transitive_invalidation_applies_to_checkpoints_higher_than_invalidation_no_point_of_agreement() { - let chain1 = chain!([1, h!("B")], [2, h!("C")], [4, h!("E")]); - let chain2 = chain!([1, h!("B'")], [2, h!("C'")], [3, h!("D")]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [ - (1, Some(h!("B'"))), - (2, Some(h!("C'"))), - (3, Some(h!("D"))), - (4, None) - ] - },) - ) -} - -/// Transient invalidation: -/// ``` -/// | 0 | 1 | 2 | 3 | 4 -/// chain | A B C E -/// update | B' C' D -/// ``` -/// -/// This should fail since although it tells us that B and C are invalid it doesn't tell us whether -/// A was invalid. -#[test] -fn invalidation_but_no_connection() { - let chain1 = chain!([0, h!("A")], [1, h!("B")], [2, h!("C")], [4, h!("E")]); - let chain2 = chain!([1, h!("B'")], [2, h!("C'")], [3, h!("D")]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Err(UpdateError::NotConnected(0)) - ) -} - -#[test] -fn checkpoint_limit_is_respected() { - let mut chain1 = SparseChain::default(); - let _ = chain1 - .apply_update(chain!( - [1, h!("A")], - [2, h!("B")], - [3, h!("C")], - [4, h!("D")], - [5, h!("E")] - )) - .unwrap(); - - assert_eq!(chain1.checkpoints().len(), 5); - chain1.set_checkpoint_limit(Some(4)); - assert_eq!(chain1.checkpoints().len(), 4); - - let _ = chain1 - .insert_checkpoint(BlockId { - height: 6, - hash: h!("F"), - }) - .unwrap(); - assert_eq!(chain1.checkpoints().len(), 4); - - let changeset = chain1.determine_changeset(&chain!([6, h!("F")], [7, h!("G")])); - assert_eq!(changeset, Ok(changeset!(checkpoints: [(7, Some(h!("G")))]))); - - chain1.apply_changeset(changeset.unwrap()); - - assert_eq!(chain1.checkpoints().len(), 4); -} - -#[test] -fn range_txids_by_height() { - let mut chain = chain!(index: TestIndex, checkpoints: [[1, h!("block 1")], [2, h!("block 2")]]); - - let txids: [(TestIndex, Txid); 4] = [ - ( - TestIndex(TxHeight::Confirmed(1), u32::MIN), - Txid::from_inner([0x00; 32]), - ), - ( - TestIndex(TxHeight::Confirmed(1), u32::MAX), - Txid::from_inner([0xfe; 32]), - ), - ( - TestIndex(TxHeight::Confirmed(2), u32::MIN), - Txid::from_inner([0x01; 32]), - ), - ( - TestIndex(TxHeight::Confirmed(2), u32::MAX), - Txid::from_inner([0xff; 32]), - ), - ]; - - // populate chain with txids - for (index, txid) in txids { - let _ = chain.insert_tx(txid, index).expect("should succeed"); - } - - // inclusive start - assert_eq!( - chain - .range_txids_by_height(TxHeight::Confirmed(1)..) - .collect::>(), - txids.iter().collect::>(), - ); - - // exclusive start - assert_eq!( - chain - .range_txids_by_height((Bound::Excluded(TxHeight::Confirmed(1)), Bound::Unbounded,)) - .collect::>(), - txids[2..].iter().collect::>(), - ); - - // inclusive end - assert_eq!( - chain - .range_txids_by_height((Bound::Unbounded, Bound::Included(TxHeight::Confirmed(2)))) - .collect::>(), - txids[..4].iter().collect::>(), - ); - - // exclusive end - assert_eq!( - chain - .range_txids_by_height(..TxHeight::Confirmed(2)) - .collect::>(), - txids[..2].iter().collect::>(), - ); -} - -#[test] -fn range_txids_by_index() { - let mut chain = chain!(index: TestIndex, checkpoints: [[1, h!("block 1")],[2, h!("block 2")]]); - - let txids: [(TestIndex, Txid); 4] = [ - (TestIndex(TxHeight::Confirmed(1), u32::MIN), h!("tx 1 min")), - (TestIndex(TxHeight::Confirmed(1), u32::MAX), h!("tx 1 max")), - (TestIndex(TxHeight::Confirmed(2), u32::MIN), h!("tx 2 min")), - (TestIndex(TxHeight::Confirmed(2), u32::MAX), h!("tx 2 max")), - ]; - - // populate chain with txids - for (index, txid) in txids { - let _ = chain.insert_tx(txid, index).expect("should succeed"); - } - - // inclusive start - assert_eq!( - chain - .range_txids_by_position(TestIndex(TxHeight::Confirmed(1), u32::MIN)..) - .collect::>(), - txids.iter().collect::>(), - ); - assert_eq!( - chain - .range_txids_by_position(TestIndex(TxHeight::Confirmed(1), u32::MAX)..) - .collect::>(), - txids[1..].iter().collect::>(), - ); - - // exclusive start - assert_eq!( - chain - .range_txids_by_position(( - Bound::Excluded(TestIndex(TxHeight::Confirmed(1), u32::MIN)), - Bound::Unbounded - )) - .collect::>(), - txids[1..].iter().collect::>(), - ); - assert_eq!( - chain - .range_txids_by_position(( - Bound::Excluded(TestIndex(TxHeight::Confirmed(1), u32::MAX)), - Bound::Unbounded - )) - .collect::>(), - txids[2..].iter().collect::>(), - ); - - // inclusive end - assert_eq!( - chain - .range_txids_by_position(( - Bound::Unbounded, - Bound::Included(TestIndex(TxHeight::Confirmed(2), u32::MIN)) - )) - .collect::>(), - txids[..3].iter().collect::>(), - ); - assert_eq!( - chain - .range_txids_by_position(( - Bound::Unbounded, - Bound::Included(TestIndex(TxHeight::Confirmed(2), u32::MAX)) - )) - .collect::>(), - txids[..4].iter().collect::>(), - ); - - // exclusive end - assert_eq!( - chain - .range_txids_by_position(..TestIndex(TxHeight::Confirmed(2), u32::MIN)) - .collect::>(), - txids[..2].iter().collect::>(), - ); - assert_eq!( - chain - .range_txids_by_position(..TestIndex(TxHeight::Confirmed(2), u32::MAX)) - .collect::>(), - txids[..3].iter().collect::>(), - ); -} - -#[test] -fn range_txids() { - let mut chain = SparseChain::default(); - - let txids = (0..100) - .map(|v| Txid::hash(v.to_string().as_bytes())) - .collect::>(); - - // populate chain - for txid in &txids { - let _ = chain - .insert_tx(*txid, TxHeight::Unconfirmed) - .expect("should succeed"); - } - - for txid in &txids { - assert_eq!( - chain - .range_txids((TxHeight::Unconfirmed, *txid)..) - .map(|(_, txid)| txid) - .collect::>(), - txids.range(*txid..).collect::>(), - "range with inclusive start should succeed" - ); - - assert_eq!( - chain - .range_txids(( - Bound::Excluded((TxHeight::Unconfirmed, *txid)), - Bound::Unbounded, - )) - .map(|(_, txid)| txid) - .collect::>(), - txids - .range((Bound::Excluded(*txid), Bound::Unbounded,)) - .collect::>(), - "range with exclusive start should succeed" - ); - - assert_eq!( - chain - .range_txids(..(TxHeight::Unconfirmed, *txid)) - .map(|(_, txid)| txid) - .collect::>(), - txids.range(..*txid).collect::>(), - "range with exclusive end should succeed" - ); - - assert_eq!( - chain - .range_txids(( - Bound::Included((TxHeight::Unconfirmed, *txid)), - Bound::Unbounded, - )) - .map(|(_, txid)| txid) - .collect::>(), - txids - .range((Bound::Included(*txid), Bound::Unbounded,)) - .collect::>(), - "range with inclusive end should succeed" - ); - } -} - -#[test] -fn invalidated_txs_move_to_unconfirmed() { - let chain1 = chain! { - checkpoints: [[0, h!("A")], [1, h!("B")], [2, h!("C")]], - txids: [ - (h!("a"), TxHeight::Confirmed(0)), - (h!("b"), TxHeight::Confirmed(1)), - (h!("c"), TxHeight::Confirmed(2)), - (h!("d"), TxHeight::Unconfirmed) - ] - }; - - let chain2 = chain!([0, h!("A")], [1, h!("B'")]); - - assert_eq!( - chain1.determine_changeset(&chain2), - Ok(changeset! { - checkpoints: [ - (1, Some(h!("B'"))), - (2, None) - ], - txids: [ - (h!("b"), Some(TxHeight::Unconfirmed)), - (h!("c"), Some(TxHeight::Unconfirmed)) - ] - },) - ); -} - -#[test] -fn change_tx_position_from_unconfirmed_to_confirmed() { - let mut chain = SparseChain::::default(); - let txid = h!("txid"); - - let _ = chain.insert_tx(txid, TxHeight::Unconfirmed).unwrap(); - - assert_eq!(chain.tx_position(txid), Some(&TxHeight::Unconfirmed)); - let _ = chain - .insert_checkpoint(BlockId { - height: 0, - hash: h!("0"), - }) - .unwrap(); - let _ = chain.insert_tx(txid, TxHeight::Confirmed(0)).unwrap(); - - assert_eq!(chain.tx_position(txid), Some(&TxHeight::Confirmed(0))); -} diff --git a/crates/chain/tests/test_tx_graph.rs b/crates/chain/tests/test_tx_graph.rs index 2b845611..82a0f97d 100644 --- a/crates/chain/tests/test_tx_graph.rs +++ b/crates/chain/tests/test_tx_graph.rs @@ -4,7 +4,7 @@ use bdk_chain::{ collections::*, local_chain::LocalChain, tx_graph::{Additions, TxGraph}, - Append, BlockId, ConfirmationHeightAnchor, ObservedAs, + Append, BlockId, ChainPosition, ConfirmationHeightAnchor, }; use bitcoin::{ hashes::Hash, BlockHash, OutPoint, PackedLockTime, Script, Transaction, TxIn, TxOut, Txid, @@ -56,17 +56,17 @@ fn insert_txouts() { }; // Conf anchor used to mark the full transaction as confirmed. - let conf_anchor = ObservedAs::Confirmed(BlockId { + let conf_anchor = ChainPosition::Confirmed(BlockId { height: 100, hash: h!("random blockhash"), }); // Unconfirmed anchor to mark the partial transactions as unconfirmed - let unconf_anchor = ObservedAs::::Unconfirmed(1000000); + let unconf_anchor = ChainPosition::::Unconfirmed(1000000); // Make the original graph let mut graph = { - let mut graph = TxGraph::>::default(); + let mut graph = TxGraph::>::default(); for (outpoint, txout) in &original_ops { assert_eq!( graph.insert_txout(*outpoint, txout.clone()), @@ -707,7 +707,7 @@ fn test_chain_spends() { assert_eq!( graph.get_chain_spend(&local_chain, tip, OutPoint::new(tx_0.txid(), 0)), Some(( - ObservedAs::Confirmed(&ConfirmationHeightAnchor { + ChainPosition::Confirmed(&ConfirmationHeightAnchor { anchor_block: tip, confirmation_height: 98 }), @@ -719,7 +719,7 @@ fn test_chain_spends() { assert_eq!( graph.get_chain_position(&local_chain, tip, tx_0.txid()), // Some(ObservedAs::Confirmed(&local_chain.get_block(95).expect("block expected"))), - Some(ObservedAs::Confirmed(&ConfirmationHeightAnchor { + Some(ChainPosition::Confirmed(&ConfirmationHeightAnchor { anchor_block: tip, confirmation_height: 95 })) @@ -728,7 +728,7 @@ fn test_chain_spends() { // Even if unconfirmed tx has a last_seen of 0, it can still be part of a chain spend. assert_eq!( graph.get_chain_spend(&local_chain, tip, OutPoint::new(tx_0.txid(), 1)), - Some((ObservedAs::Unconfirmed(0), tx_2.txid())), + Some((ChainPosition::Unconfirmed(0), tx_2.txid())), ); // Mark the unconfirmed as seen and check correct ObservedAs status is returned. @@ -739,7 +739,7 @@ fn test_chain_spends() { graph .get_chain_spend(&local_chain, tip, OutPoint::new(tx_0.txid(), 1)) .unwrap(), - (ObservedAs::Unconfirmed(1234567), tx_2.txid()) + (ChainPosition::Unconfirmed(1234567), tx_2.txid()) ); // A conflicting transaction that conflicts with tx_1. @@ -775,7 +775,7 @@ fn test_chain_spends() { graph .get_chain_position(&local_chain, tip, tx_2_conflict.txid()) .expect("position expected"), - ObservedAs::Unconfirmed(1234568) + ChainPosition::Unconfirmed(1234568) ); // Chain_spend now catches the new transaction as the spend. @@ -783,7 +783,7 @@ fn test_chain_spends() { graph .get_chain_spend(&local_chain, tip, OutPoint::new(tx_0.txid(), 1)) .expect("expect observation"), - (ObservedAs::Unconfirmed(1234568), tx_2_conflict.txid()) + (ChainPosition::Unconfirmed(1234568), tx_2_conflict.txid()) ); // Chain position of the `tx_2` is now none, as it is older than `tx_2_conflict` diff --git a/crates/electrum/src/v2.rs b/crates/electrum/src/electrum_ext.rs similarity index 88% rename from crates/electrum/src/v2.rs rename to crates/electrum/src/electrum_ext.rs index bedfdfac..cc1cf987 100644 --- a/crates/electrum/src/v2.rs +++ b/crates/electrum/src/electrum_ext.rs @@ -11,8 +11,6 @@ use std::{ fmt::Debug, }; -use crate::InternalError; - #[derive(Debug, Clone)] pub struct ElectrumUpdate { pub graph_update: HashMap>, @@ -209,57 +207,42 @@ impl ElectrumExt for Client { if !request_spks.is_empty() { if !scanned_spks.is_empty() { - let mut scanned_spk_iter = scanned_spks - .iter() - .map(|(i, (spk, _))| (i.clone(), spk.clone())); - match populate_with_spks( + scanned_spks.append(&mut populate_with_spks( self, anchor_block, &mut update, - &mut scanned_spk_iter, + &mut scanned_spks + .iter() + .map(|(i, (spk, _))| (i.clone(), spk.clone())), stop_gap, batch_size, - ) { - Err(InternalError::Reorg) => continue, - Err(InternalError::ElectrumError(e)) => return Err(e), - Ok(mut spks) => scanned_spks.append(&mut spks), - }; + )?); } for (keychain, keychain_spks) in &mut request_spks { - match populate_with_spks( - self, - anchor_block, - &mut update, - keychain_spks, - stop_gap, - batch_size, - ) { - Err(InternalError::Reorg) => continue, - Err(InternalError::ElectrumError(e)) => return Err(e), - Ok(spks) => scanned_spks.extend( - spks.into_iter() - .map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)), - ), - }; + scanned_spks.extend( + populate_with_spks( + self, + anchor_block, + &mut update, + keychain_spks, + stop_gap, + batch_size, + )? + .into_iter() + .map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)), + ); } } - match populate_with_txids(self, anchor_block, &mut update, &mut txids.iter().cloned()) { - Err(InternalError::Reorg) => continue, - Err(InternalError::ElectrumError(e)) => return Err(e), - Ok(_) => {} - } + populate_with_txids(self, anchor_block, &mut update, &mut txids.iter().cloned())?; - match populate_with_outpoints( + // [TODO] cache transactions to reduce bandwidth + let _txs = populate_with_outpoints( self, anchor_block, &mut update, &mut outpoints.iter().cloned(), - ) { - Err(InternalError::Reorg) => continue, - Err(InternalError::ElectrumError(e)) => return Err(e), - Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ } - } + )?; // check for reorgs during scan process let server_blockhash = self @@ -366,7 +349,7 @@ fn populate_with_outpoints( anchor_block: BlockId, update: &mut ElectrumUpdate, outpoints: &mut impl Iterator, -) -> Result, InternalError> { +) -> Result, Error> { let mut full_txs = HashMap::new(); for outpoint in outpoints { let txid = outpoint.txid; @@ -428,12 +411,12 @@ fn populate_with_txids( anchor_block: BlockId, update: &mut ElectrumUpdate, txids: &mut impl Iterator, -) -> Result<(), InternalError> { +) -> Result<(), Error> { for txid in txids { let tx = match client.transaction_get(&txid) { Ok(tx) => tx, Err(electrum_client::Error::Protocol(_)) => continue, - Err(other_err) => return Err(other_err.into()), + Err(other_err) => return Err(other_err), }; let spk = tx @@ -466,7 +449,7 @@ fn populate_with_spks( spks: &mut impl Iterator, stop_gap: usize, batch_size: usize, -) -> Result, InternalError> { +) -> Result, Error> { let mut unused_spk_count = 0_usize; let mut scanned_spks = BTreeMap::new(); diff --git a/crates/electrum/src/lib.rs b/crates/electrum/src/lib.rs index df5e1d74..4826c6dd 100644 --- a/crates/electrum/src/lib.rs +++ b/crates/electrum/src/lib.rs @@ -20,306 +20,12 @@ //! [`batch_transaction_get`]: ElectrumApi::batch_transaction_get //! [`bdk_electrum_example`]: https://github.com/LLFourn/bdk_core_staging/tree/master/bdk_electrum_example -use bdk_chain::{ - bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid}, - chain_graph::{self, ChainGraph}, - keychain::KeychainScan, - sparse_chain::{self, ChainPosition, SparseChain}, - tx_graph::TxGraph, - BlockId, ConfirmationTime, TxHeight, -}; +use bdk_chain::bitcoin::BlockHash; use electrum_client::{Client, ElectrumApi, Error}; -use std::{ - collections::{BTreeMap, HashMap}, - fmt::Debug, -}; - -pub mod v2; +mod electrum_ext; pub use bdk_chain; pub use electrum_client; - -/// Trait to extend [`electrum_client::Client`] functionality. -/// -/// Refer to [crate-level documentation] for more. -/// -/// [crate-level documentation]: crate -pub trait ElectrumExt { - /// Fetch the latest block height. - fn get_tip(&self) -> Result<(u32, BlockHash), Error>; - - /// Scan the blockchain (via electrum) for the data specified. This returns a [`ElectrumUpdate`] - /// which can be transformed into a [`KeychainScan`] after we find all the missing full - /// transactions. - /// - /// - `local_chain`: the most recent block hashes present locally - /// - `keychain_spks`: keychains that we want to scan transactions for - /// - `txids`: transactions for which we want the updated [`ChainPosition`]s - /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we - /// want to included in the update - fn scan( - &self, - local_chain: &BTreeMap, - keychain_spks: BTreeMap>, - txids: impl IntoIterator, - outpoints: impl IntoIterator, - stop_gap: usize, - batch_size: usize, - ) -> Result, Error>; - - /// Convenience method to call [`scan`] without requiring a keychain. - /// - /// [`scan`]: ElectrumExt::scan - fn scan_without_keychain( - &self, - local_chain: &BTreeMap, - misc_spks: impl IntoIterator, - txids: impl IntoIterator, - outpoints: impl IntoIterator, - batch_size: usize, - ) -> Result { - let spk_iter = misc_spks - .into_iter() - .enumerate() - .map(|(i, spk)| (i as u32, spk)); - - self.scan( - local_chain, - [((), spk_iter)].into(), - txids, - outpoints, - usize::MAX, - batch_size, - ) - .map(|u| u.chain_update) - } -} - -impl ElectrumExt for Client { - fn get_tip(&self) -> Result<(u32, BlockHash), Error> { - // TODO: unsubscribe when added to the client, or is there a better call to use here? - self.block_headers_subscribe() - .map(|data| (data.height as u32, data.header.block_hash())) - } - - fn scan( - &self, - local_chain: &BTreeMap, - keychain_spks: BTreeMap>, - txids: impl IntoIterator, - outpoints: impl IntoIterator, - stop_gap: usize, - batch_size: usize, - ) -> Result, Error> { - let mut request_spks = keychain_spks - .into_iter() - .map(|(k, s)| { - let iter = s.into_iter(); - (k, iter) - }) - .collect::>(); - let mut scanned_spks = BTreeMap::<(K, u32), (Script, bool)>::new(); - - let txids = txids.into_iter().collect::>(); - let outpoints = outpoints.into_iter().collect::>(); - - let update = loop { - let mut update = prepare_update(self, local_chain)?; - - if !request_spks.is_empty() { - if !scanned_spks.is_empty() { - let mut scanned_spk_iter = scanned_spks - .iter() - .map(|(i, (spk, _))| (i.clone(), spk.clone())); - match populate_with_spks::<_, _>( - self, - &mut update, - &mut scanned_spk_iter, - stop_gap, - batch_size, - ) { - Err(InternalError::Reorg) => continue, - Err(InternalError::ElectrumError(e)) => return Err(e), - Ok(mut spks) => scanned_spks.append(&mut spks), - }; - } - for (keychain, keychain_spks) in &mut request_spks { - match populate_with_spks::( - self, - &mut update, - keychain_spks, - stop_gap, - batch_size, - ) { - Err(InternalError::Reorg) => continue, - Err(InternalError::ElectrumError(e)) => return Err(e), - Ok(spks) => scanned_spks.extend( - spks.into_iter() - .map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)), - ), - }; - } - } - - match populate_with_txids(self, &mut update, &mut txids.iter().cloned()) { - Err(InternalError::Reorg) => continue, - Err(InternalError::ElectrumError(e)) => return Err(e), - Ok(_) => {} - } - - match populate_with_outpoints(self, &mut update, &mut outpoints.iter().cloned()) { - Err(InternalError::Reorg) => continue, - Err(InternalError::ElectrumError(e)) => return Err(e), - Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ } - } - - // check for reorgs during scan process - let our_tip = update - .latest_checkpoint() - .expect("update must have atleast one checkpoint"); - let server_blockhash = self.block_header(our_tip.height as usize)?.block_hash(); - if our_tip.hash != server_blockhash { - continue; // reorg - } else { - break update; - } - }; - - let last_active_index = request_spks - .into_keys() - .filter_map(|k| { - scanned_spks - .range((k.clone(), u32::MIN)..=(k.clone(), u32::MAX)) - .rev() - .find(|(_, (_, active))| *active) - .map(|((_, i), _)| (k, *i)) - }) - .collect::>(); - - Ok(ElectrumUpdate { - chain_update: update, - last_active_indices: last_active_index, - }) - } -} - -/// The result of [`ElectrumExt::scan`]. -pub struct ElectrumUpdate { - /// The internal [`SparseChain`] update. - pub chain_update: SparseChain

, - /// The last keychain script pubkey indices, which had transaction histories. - pub last_active_indices: BTreeMap, -} - -impl Default for ElectrumUpdate { - fn default() -> Self { - Self { - chain_update: Default::default(), - last_active_indices: Default::default(), - } - } -} - -impl AsRef> for ElectrumUpdate { - fn as_ref(&self) -> &SparseChain

{ - &self.chain_update - } -} - -impl ElectrumUpdate { - /// Return a list of missing full transactions that are required to [`inflate_update`]. - /// - /// [`inflate_update`]: bdk_chain::chain_graph::ChainGraph::inflate_update - pub fn missing_full_txs(&self, graph: G) -> Vec<&Txid> - where - G: AsRef, - { - self.chain_update - .txids() - .filter(|(_, txid)| graph.as_ref().get_tx(*txid).is_none()) - .map(|(_, txid)| txid) - .collect() - } - - /// Transform the [`ElectrumUpdate`] into a [`KeychainScan`], which can be applied to a - /// `tracker`. - /// - /// This will fail if there are missing full transactions not provided via `new_txs`. - pub fn into_keychain_scan( - self, - new_txs: Vec, - chain_graph: &CG, - ) -> Result, chain_graph::NewError

> - where - CG: AsRef>, - { - Ok(KeychainScan { - update: chain_graph - .as_ref() - .inflate_update(self.chain_update, new_txs)?, - last_active_indices: self.last_active_indices, - }) - } -} - -impl ElectrumUpdate { - /// Creates [`ElectrumUpdate`] from [`ElectrumUpdate`]. - pub fn into_confirmation_time_update( - self, - client: &electrum_client::Client, - ) -> Result, Error> { - let heights = self - .chain_update - .range_txids_by_height(..TxHeight::Unconfirmed) - .map(|(h, _)| match h { - TxHeight::Confirmed(h) => *h, - _ => unreachable!("already filtered out unconfirmed"), - }) - .collect::>(); - - let height_to_time = heights - .clone() - .into_iter() - .zip( - client - .batch_block_header(heights)? - .into_iter() - .map(|bh| bh.time as u64), - ) - .collect::>(); - - let mut new_update = SparseChain::::from_checkpoints( - self.chain_update.range_checkpoints(..), - ); - - for &(tx_height, txid) in self.chain_update.txids() { - let conf_time = match tx_height { - TxHeight::Confirmed(height) => ConfirmationTime::Confirmed { - height, - time: height_to_time[&height], - }, - TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed { last_seen: 0 }, - }; - let _ = new_update.insert_tx(txid, conf_time).expect("must insert"); - } - - Ok(ElectrumUpdate { - chain_update: new_update, - last_active_indices: self.last_active_indices, - }) - } -} - -#[derive(Debug)] -enum InternalError { - ElectrumError(Error), - Reorg, -} - -impl From for InternalError { - fn from(value: electrum_client::Error) -> Self { - Self::ElectrumError(value) - } -} +pub use electrum_ext::*; fn get_tip(client: &Client) -> Result<(u32, BlockHash), Error> { // TODO: unsubscribe when added to the client, or is there a better call to use here? @@ -327,262 +33,3 @@ fn get_tip(client: &Client) -> Result<(u32, BlockHash), Error> { .block_headers_subscribe() .map(|data| (data.height as u32, data.header.block_hash())) } - -/// Prepare an update sparsechain "template" based on the checkpoints of the `local_chain`. -fn prepare_update( - client: &Client, - local_chain: &BTreeMap, -) -> Result { - let mut update = SparseChain::default(); - - // Find the local chain block that is still there so our update can connect to the local chain. - for (&existing_height, &existing_hash) in local_chain.iter().rev() { - // TODO: a batch request may be safer, as a reorg that happens when we are obtaining - // `block_header`s will result in inconsistencies - let current_hash = client.block_header(existing_height as usize)?.block_hash(); - let _ = update - .insert_checkpoint(BlockId { - height: existing_height, - hash: current_hash, - }) - .expect("This never errors because we are working with a fresh chain"); - - if current_hash == existing_hash { - break; - } - } - - // Insert the new tip so new transactions will be accepted into the sparsechain. - let tip = { - let (height, hash) = get_tip(client)?; - BlockId { height, hash } - }; - if let Err(failure) = update.insert_checkpoint(tip) { - match failure { - sparse_chain::InsertCheckpointError::HashNotMatching { .. } => { - // There has been a re-org before we even begin scanning addresses. - // Just recursively call (this should never happen). - return prepare_update(client, local_chain); - } - } - } - - Ok(update) -} - -/// This atrocity is required because electrum thinks a height of 0 means "unconfirmed", but there is -/// such thing as a genesis block. -/// -/// We contain an expectation for the genesis coinbase txid to always have a chain position of -/// [`TxHeight::Confirmed(0)`]. -fn determine_tx_height(raw_height: i32, tip_height: u32, txid: Txid) -> TxHeight { - if txid - == Txid::from_hex("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b") - .expect("must deserialize genesis coinbase txid") - { - return TxHeight::Confirmed(0); - } - match raw_height { - h if h <= 0 => { - debug_assert!( - h == 0 || h == -1, - "unexpected height ({}) from electrum server", - h - ); - TxHeight::Unconfirmed - } - h => { - let h = h as u32; - if h > tip_height { - TxHeight::Unconfirmed - } else { - TxHeight::Confirmed(h) - } - } - } -} - -/// Populates the update [`SparseChain`] with related transactions and associated [`ChainPosition`]s -/// of the provided `outpoints` (this is the tx which contains the outpoint and the one spending the -/// outpoint). -/// -/// Unfortunately, this is awkward to implement as electrum does not provide such an API. Instead, we -/// will get the tx history of the outpoint's spk and try to find the containing tx and the -/// spending tx. -fn populate_with_outpoints( - client: &Client, - update: &mut SparseChain, - outpoints: &mut impl Iterator, -) -> Result, InternalError> { - let tip = update - .latest_checkpoint() - .expect("update must atleast have one checkpoint"); - - let mut full_txs = HashMap::new(); - for outpoint in outpoints { - let txid = outpoint.txid; - let tx = client.transaction_get(&txid)?; - debug_assert_eq!(tx.txid(), txid); - let txout = match tx.output.get(outpoint.vout as usize) { - Some(txout) => txout, - None => continue, - }; - - // attempt to find the following transactions (alongside their chain positions), and - // add to our sparsechain `update`: - let mut has_residing = false; // tx in which the outpoint resides - let mut has_spending = false; // tx that spends the outpoint - for res in client.script_get_history(&txout.script_pubkey)? { - if has_residing && has_spending { - break; - } - - if res.tx_hash == txid { - if has_residing { - continue; - } - has_residing = true; - full_txs.insert(res.tx_hash, tx.clone()); - } else { - if has_spending { - continue; - } - let res_tx = match full_txs.get(&res.tx_hash) { - Some(tx) => tx, - None => { - let res_tx = client.transaction_get(&res.tx_hash)?; - full_txs.insert(res.tx_hash, res_tx); - full_txs.get(&res.tx_hash).expect("just inserted") - } - }; - has_spending = res_tx - .input - .iter() - .any(|txin| txin.previous_output == outpoint); - if !has_spending { - continue; - } - }; - - let tx_height = determine_tx_height(res.height, tip.height, res.tx_hash); - - if let Err(failure) = update.insert_tx(res.tx_hash, tx_height) { - match failure { - sparse_chain::InsertTxError::TxTooHigh { .. } => { - unreachable!("we should never encounter this as we ensured height <= tip"); - } - sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => { - return Err(InternalError::Reorg); - } - } - } - } - } - Ok(full_txs) -} - -/// Populate an update [`SparseChain`] with transactions (and associated block positions) from -/// the given `txids`. -fn populate_with_txids( - client: &Client, - update: &mut SparseChain, - txids: &mut impl Iterator, -) -> Result<(), InternalError> { - let tip = update - .latest_checkpoint() - .expect("update must have atleast one checkpoint"); - for txid in txids { - let tx = match client.transaction_get(&txid) { - Ok(tx) => tx, - Err(electrum_client::Error::Protocol(_)) => continue, - Err(other_err) => return Err(other_err.into()), - }; - - let spk = tx - .output - .get(0) - .map(|txo| &txo.script_pubkey) - .expect("tx must have an output"); - - let tx_height = match client - .script_get_history(spk)? - .into_iter() - .find(|r| r.tx_hash == txid) - { - Some(r) => determine_tx_height(r.height, tip.height, r.tx_hash), - None => continue, - }; - - if let Err(failure) = update.insert_tx(txid, tx_height) { - match failure { - sparse_chain::InsertTxError::TxTooHigh { .. } => { - unreachable!("we should never encounter this as we ensured height <= tip"); - } - sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => { - return Err(InternalError::Reorg); - } - } - } - } - Ok(()) -} - -/// Populate an update [`SparseChain`] with transactions (and associated block positions) from -/// the transaction history of the provided `spk`s. -fn populate_with_spks( - client: &Client, - update: &mut SparseChain, - spks: &mut S, - stop_gap: usize, - batch_size: usize, -) -> Result, InternalError> -where - I: Ord + Clone, - S: Iterator, -{ - let tip = update.latest_checkpoint().map_or(0, |cp| cp.height); - let mut unused_spk_count = 0_usize; - let mut scanned_spks = BTreeMap::new(); - - loop { - let spks = (0..batch_size) - .map_while(|_| spks.next()) - .collect::>(); - if spks.is_empty() { - return Ok(scanned_spks); - } - - let spk_histories = client.batch_script_get_history(spks.iter().map(|(_, s)| s))?; - - for ((spk_index, spk), spk_history) in spks.into_iter().zip(spk_histories) { - if spk_history.is_empty() { - scanned_spks.insert(spk_index, (spk, false)); - unused_spk_count += 1; - if unused_spk_count > stop_gap { - return Ok(scanned_spks); - } - continue; - } else { - scanned_spks.insert(spk_index, (spk, true)); - unused_spk_count = 0; - } - - for tx in spk_history { - let tx_height = determine_tx_height(tx.height, tip, tx.tx_hash); - - if let Err(failure) = update.insert_tx(tx.tx_hash, tx_height) { - match failure { - sparse_chain::InsertTxError::TxTooHigh { .. } => { - unreachable!( - "we should never encounter this as we ensured height <= tip" - ); - } - sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => { - return Err(InternalError::Reorg); - } - } - } - } - } - } -} diff --git a/crates/esplora/Cargo.toml b/crates/esplora/Cargo.toml index e55b6fa0..c5d42d35 100644 --- a/crates/esplora/Cargo.toml +++ b/crates/esplora/Cargo.toml @@ -13,12 +13,12 @@ readme = "README.md" [dependencies] bdk_chain = { path = "../chain", version = "0.4.0", features = ["serde", "miniscript"] } -esplora-client = { version = "0.3", default-features = false } +esplora-client = { version = "0.5", default-features = false } async-trait = { version = "0.1.66", optional = true } futures = { version = "0.3.26", optional = true } [features] -default = ["async-https", "blocking"] +default = ["blocking"] async = ["async-trait", "futures", "esplora-client/async"] async-https = ["async", "esplora-client/async-https"] blocking = ["esplora-client/blocking"] diff --git a/crates/esplora/README.md b/crates/esplora/README.md index 253832a8..e2ffef59 100644 --- a/crates/esplora/README.md +++ b/crates/esplora/README.md @@ -27,7 +27,7 @@ To use the extension traits: // for blocking use bdk_esplora::EsploraExt; // for async -use bdk_esplora::EsploraAsyncExt; +// use bdk_esplora::EsploraAsyncExt; ``` For full examples, refer to [`example-crates/wallet_esplora`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_esplora) (blocking) and [`example-crates/wallet_esplora_async`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_esplora_async). diff --git a/crates/esplora/src/async_ext.rs b/crates/esplora/src/async_ext.rs index 475f4443..e496e415 100644 --- a/crates/esplora/src/async_ext.rs +++ b/crates/esplora/src/async_ext.rs @@ -1,16 +1,14 @@ -use std::collections::BTreeMap; - use async_trait::async_trait; use bdk_chain::{ bitcoin::{BlockHash, OutPoint, Script, Txid}, - chain_graph::ChainGraph, - keychain::KeychainScan, - sparse_chain, BlockId, ConfirmationTime, + collections::BTreeMap, + keychain::LocalUpdate, + BlockId, ConfirmationTimeAnchor, }; -use esplora_client::{Error, OutputStatus}; -use futures::stream::{FuturesOrdered, TryStreamExt}; +use esplora_client::{Error, OutputStatus, TxStatus}; +use futures::{stream::FuturesOrdered, TryStreamExt}; -use crate::map_confirmation_time; +use crate::map_confirmation_time_anchor; /// Trait to extend [`esplora_client::AsyncClient`] functionality. /// @@ -22,19 +20,18 @@ use crate::map_confirmation_time; #[cfg_attr(target_arch = "wasm32", async_trait(?Send))] #[cfg_attr(not(target_arch = "wasm32"), async_trait)] pub trait EsploraAsyncExt { - /// Scan the blockchain (via esplora) for the data specified and returns a [`KeychainScan`]. + /// Scan the blockchain (via esplora) for the data specified and returns a + /// [`LocalUpdate`]. /// /// - `local_chain`: the most recent block hashes present locally /// - `keychain_spks`: keychains that we want to scan transactions for - /// - `txids`: transactions for which we want updated [`ChainPosition`]s + /// - `txids`: transactions for which we want updated [`ConfirmationTimeAnchor`]s /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we /// want to included in the update /// /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in /// parallel. - /// - /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition #[allow(clippy::result_large_err)] // FIXME async fn scan( &self, @@ -47,7 +44,7 @@ pub trait EsploraAsyncExt { outpoints: impl IntoIterator + Send> + Send, stop_gap: usize, parallel_requests: usize, - ) -> Result, Error>; + ) -> Result, Error>; /// Convenience method to call [`scan`] without requiring a keychain. /// @@ -60,26 +57,23 @@ pub trait EsploraAsyncExt { txids: impl IntoIterator + Send> + Send, outpoints: impl IntoIterator + Send> + Send, parallel_requests: usize, - ) -> Result, Error> { - let wallet_scan = self - .scan( - local_chain, - [( - (), - misc_spks - .into_iter() - .enumerate() - .map(|(i, spk)| (i as u32, spk)), - )] - .into(), - txids, - outpoints, - usize::MAX, - parallel_requests, - ) - .await?; - - Ok(wallet_scan.update) + ) -> Result, Error> { + self.scan( + local_chain, + [( + (), + misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)), + )] + .into(), + txids, + outpoints, + usize::MAX, + parallel_requests, + ) + .await } } @@ -98,47 +92,35 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { outpoints: impl IntoIterator + Send> + Send, stop_gap: usize, parallel_requests: usize, - ) -> Result, Error> { - let txids = txids.into_iter(); - let outpoints = outpoints.into_iter(); + ) -> Result, Error> { let parallel_requests = Ord::max(parallel_requests, 1); - let mut scan = KeychainScan::default(); - let update = &mut scan.update; - let last_active_indices = &mut scan.last_active_indices; - for (&height, &original_hash) in local_chain.iter().rev() { - let update_block_id = BlockId { - height, - hash: self.get_block_hash(height).await?, - }; - let _ = update - .insert_checkpoint(update_block_id) - .expect("cannot repeat height here"); - if update_block_id.hash == original_hash { - break; - } - } - let tip_at_start = BlockId { - height: self.get_height().await?, - hash: self.get_tip_hash().await?, - }; - if let Err(failure) = update.insert_checkpoint(tip_at_start) { - match failure { - sparse_chain::InsertCheckpointError::HashNotMatching { .. } => { - // there was a re-org before we started scanning. We haven't consumed any iterators, so calling this function recursively is safe. - return EsploraAsyncExt::scan( - self, - local_chain, - keychain_spks, - txids, - outpoints, - stop_gap, - parallel_requests, - ) - .await; + let (mut update, tip_at_start) = loop { + let mut update = LocalUpdate::::default(); + + for (&height, &original_hash) in local_chain.iter().rev() { + let update_block_id = BlockId { + height, + hash: self.get_block_hash(height).await?, + }; + let _ = update + .chain + .insert_block(update_block_id) + .expect("cannot repeat height here"); + if update_block_id.hash == original_hash { + break; } } - } + + let tip_at_start = BlockId { + height: self.get_height().await?, + hash: self.get_tip_hash().await?, + }; + + if update.chain.insert_block(tip_at_start).is_ok() { + break (update, tip_at_start); + } + }; for (keychain, spks) in keychain_spks { let mut spks = spks.into_iter(); @@ -147,7 +129,7 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { type IndexWithTxs = (u32, Vec); loop { - let futures: FuturesOrdered<_> = (0..parallel_requests) + let futures = (0..parallel_requests) .filter_map(|_| { let (index, script) = spks.next()?; let client = self.clone(); @@ -178,13 +160,11 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { Result::<_, esplora_client::Error>::Ok((index, related_txs)) }) }) - .collect(); + .collect::>(); let n_futures = futures.len(); - let idx_with_tx: Vec = futures.try_collect().await?; - - for (index, related_txs) in idx_with_tx { + for (index, related_txs) in futures.try_collect::>().await? { if related_txs.is_empty() { empty_scripts += 1; } else { @@ -192,22 +172,11 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { empty_scripts = 0; } for tx in related_txs { - let confirmation_time = - map_confirmation_time(&tx.status, tip_at_start.height); + let anchor = map_confirmation_time_anchor(&tx.status, tip_at_start); - if let Err(failure) = update.insert_tx(tx.to_tx(), confirmation_time) { - use bdk_chain::{ - chain_graph::InsertTxError, sparse_chain::InsertTxError::*, - }; - match failure { - InsertTxError::Chain(TxTooHigh { .. }) => { - unreachable!("chain position already checked earlier") - } - InsertTxError::Chain(TxMovedUnexpectedly { .. }) - | InsertTxError::UnresolvableConflict(_) => { - /* implies reorg during a scan. We deal with that below */ - } - } + let _ = update.graph.insert_tx(tx.to_tx()); + if let Some(anchor) = anchor { + let _ = update.graph.insert_anchor(tx.txid, anchor); } } } @@ -218,36 +187,37 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { } if let Some(last_active_index) = last_active_index { - last_active_indices.insert(keychain, last_active_index); + update.keychain.insert(keychain, last_active_index); } } - for txid in txids { - let (tx, tx_status) = - match (self.get_tx(&txid).await?, self.get_tx_status(&txid).await?) { - (Some(tx), Some(tx_status)) => (tx, tx_status), - _ => continue, - }; - - let confirmation_time = map_confirmation_time(&tx_status, tip_at_start.height); - - if let Err(failure) = update.insert_tx(tx, confirmation_time) { - use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; - match failure { - InsertTxError::Chain(TxTooHigh { .. }) => { - unreachable!("chain position already checked earlier") - } - InsertTxError::Chain(TxMovedUnexpectedly { .. }) - | InsertTxError::UnresolvableConflict(_) => { - /* implies reorg during a scan. We deal with that below */ + for txid in txids.into_iter() { + if update.graph.get_tx(txid).is_none() { + match self.get_tx(&txid).await? { + Some(tx) => { + let _ = update.graph.insert_tx(tx); } + None => continue, } } + match self.get_tx_status(&txid).await? { + tx_status if tx_status.confirmed => { + if let Some(anchor) = map_confirmation_time_anchor(&tx_status, tip_at_start) { + let _ = update.graph.insert_anchor(txid, anchor); + } + } + _ => continue, + } } - for op in outpoints { + for op in outpoints.into_iter() { let mut op_txs = Vec::with_capacity(2); - if let (Some(tx), Some(tx_status)) = ( + if let ( + Some(tx), + tx_status @ TxStatus { + confirmed: true, .. + }, + ) = ( self.get_tx(&op.txid).await?, self.get_tx_status(&op.txid).await?, ) { @@ -265,39 +235,24 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { } for (tx, status) in op_txs { - let confirmation_time = map_confirmation_time(&status, tip_at_start.height); + let txid = tx.txid(); + let anchor = map_confirmation_time_anchor(&status, tip_at_start); - if let Err(failure) = update.insert_tx(tx, confirmation_time) { - use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; - match failure { - InsertTxError::Chain(TxTooHigh { .. }) => { - unreachable!("chain position already checked earlier") - } - InsertTxError::Chain(TxMovedUnexpectedly { .. }) - | InsertTxError::UnresolvableConflict(_) => { - /* implies reorg during a scan. We deal with that below */ - } - } + let _ = update.graph.insert_tx(tx); + if let Some(anchor) = anchor { + let _ = update.graph.insert_anchor(txid, anchor); } } } - let reorg_occurred = { - if let Some(checkpoint) = ChainGraph::chain(update).latest_checkpoint() { - self.get_block_hash(checkpoint.height).await? != checkpoint.hash - } else { - false - } - }; - - if reorg_occurred { - // A reorg occurred, so let's find out where all the txids we found are in the chain now. - // XXX: collect required because of weird type naming issues - let txids_found = ChainGraph::chain(update) - .txids() - .map(|(_, txid)| *txid) + if tip_at_start.hash != self.get_block_hash(tip_at_start.height).await? { + // A reorg occurred, so let's find out where all the txids we found are now in the chain + let txids_found = update + .graph + .full_txs() + .map(|tx_node| tx_node.txid) .collect::>(); - scan.update = EsploraAsyncExt::scan_without_keychain( + update.chain = EsploraAsyncExt::scan_without_keychain( self, local_chain, [], @@ -305,9 +260,10 @@ impl EsploraAsyncExt for esplora_client::AsyncClient { [], parallel_requests, ) - .await?; + .await? + .chain; } - Ok(scan) + Ok(update) } } diff --git a/crates/esplora/src/blocking_ext.rs b/crates/esplora/src/blocking_ext.rs index 092c6069..6e1c6199 100644 --- a/crates/esplora/src/blocking_ext.rs +++ b/crates/esplora/src/blocking_ext.rs @@ -1,14 +1,10 @@ -use std::collections::BTreeMap; +use bdk_chain::bitcoin::{BlockHash, OutPoint, Script, Txid}; +use bdk_chain::collections::BTreeMap; +use bdk_chain::BlockId; +use bdk_chain::{keychain::LocalUpdate, ConfirmationTimeAnchor}; +use esplora_client::{Error, OutputStatus, TxStatus}; -use bdk_chain::{ - bitcoin::{BlockHash, OutPoint, Script, Txid}, - chain_graph::ChainGraph, - keychain::KeychainScan, - sparse_chain, BlockId, ConfirmationTime, -}; -use esplora_client::{Error, OutputStatus}; - -use crate::map_confirmation_time; +use crate::map_confirmation_time_anchor; /// Trait to extend [`esplora_client::BlockingClient`] functionality. /// @@ -16,19 +12,18 @@ use crate::map_confirmation_time; /// /// [crate-level documentation]: crate pub trait EsploraExt { - /// Scan the blockchain (via esplora) for the data specified and returns a [`KeychainScan`]. + /// Scan the blockchain (via esplora) for the data specified and returns a + /// [`LocalUpdate`]. /// /// - `local_chain`: the most recent block hashes present locally /// - `keychain_spks`: keychains that we want to scan transactions for - /// - `txids`: transactions for which we want updated [`ChainPosition`]s + /// - `txids`: transactions for which we want updated [`ConfirmationTimeAnchor`]s /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we /// want to included in the update /// /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in /// parallel. - /// - /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition #[allow(clippy::result_large_err)] // FIXME fn scan( &self, @@ -38,7 +33,7 @@ pub trait EsploraExt { outpoints: impl IntoIterator, stop_gap: usize, parallel_requests: usize, - ) -> Result, Error>; + ) -> Result, Error>; /// Convenience method to call [`scan`] without requiring a keychain. /// @@ -51,8 +46,8 @@ pub trait EsploraExt { txids: impl IntoIterator, outpoints: impl IntoIterator, parallel_requests: usize, - ) -> Result, Error> { - let wallet_scan = self.scan( + ) -> Result, Error> { + self.scan( local_chain, [( (), @@ -66,9 +61,7 @@ pub trait EsploraExt { outpoints, usize::MAX, parallel_requests, - )?; - - Ok(wallet_scan.update) + ) } } @@ -81,44 +74,35 @@ impl EsploraExt for esplora_client::BlockingClient { outpoints: impl IntoIterator, stop_gap: usize, parallel_requests: usize, - ) -> Result, Error> { + ) -> Result, Error> { let parallel_requests = Ord::max(parallel_requests, 1); - let mut scan = KeychainScan::default(); - let update = &mut scan.update; - let last_active_indices = &mut scan.last_active_indices; - for (&height, &original_hash) in local_chain.iter().rev() { - let update_block_id = BlockId { - height, - hash: self.get_block_hash(height)?, - }; - let _ = update - .insert_checkpoint(update_block_id) - .expect("cannot repeat height here"); - if update_block_id.hash == original_hash { - break; - } - } - let tip_at_start = BlockId { - height: self.get_height()?, - hash: self.get_tip_hash()?, - }; - if let Err(failure) = update.insert_checkpoint(tip_at_start) { - match failure { - sparse_chain::InsertCheckpointError::HashNotMatching { .. } => { - // there was a re-org before we started scanning. We haven't consumed any iterators, so calling this function recursively is safe. - return EsploraExt::scan( - self, - local_chain, - keychain_spks, - txids, - outpoints, - stop_gap, - parallel_requests, - ); + let (mut update, tip_at_start) = loop { + let mut update = LocalUpdate::::default(); + + for (&height, &original_hash) in local_chain.iter().rev() { + let update_block_id = BlockId { + height, + hash: self.get_block_hash(height)?, + }; + let _ = update + .chain + .insert_block(update_block_id) + .expect("cannot repeat height here"); + if update_block_id.hash == original_hash { + break; } } - } + + let tip_at_start = BlockId { + height: self.get_height()?, + hash: self.get_tip_hash()?, + }; + + if update.chain.insert_block(tip_at_start).is_ok() { + break (update, tip_at_start); + } + }; for (keychain, spks) in keychain_spks { let mut spks = spks.into_iter(); @@ -171,22 +155,11 @@ impl EsploraExt for esplora_client::BlockingClient { empty_scripts = 0; } for tx in related_txs { - let confirmation_time = - map_confirmation_time(&tx.status, tip_at_start.height); + let anchor = map_confirmation_time_anchor(&tx.status, tip_at_start); - if let Err(failure) = update.insert_tx(tx.to_tx(), confirmation_time) { - use bdk_chain::{ - chain_graph::InsertTxError, sparse_chain::InsertTxError::*, - }; - match failure { - InsertTxError::Chain(TxTooHigh { .. }) => { - unreachable!("chain position already checked earlier") - } - InsertTxError::Chain(TxMovedUnexpectedly { .. }) - | InsertTxError::UnresolvableConflict(_) => { - /* implies reorg during a scan. We deal with that below */ - } - } + let _ = update.graph.insert_tx(tx.to_tx()); + if let Some(anchor) = anchor { + let _ = update.graph.insert_anchor(tx.txid, anchor); } } } @@ -197,36 +170,39 @@ impl EsploraExt for esplora_client::BlockingClient { } if let Some(last_active_index) = last_active_index { - last_active_indices.insert(keychain, last_active_index); + update.keychain.insert(keychain, last_active_index); } } for txid in txids.into_iter() { - let (tx, tx_status) = match (self.get_tx(&txid)?, self.get_tx_status(&txid)?) { - (Some(tx), Some(tx_status)) => (tx, tx_status), - _ => continue, - }; - - let confirmation_time = map_confirmation_time(&tx_status, tip_at_start.height); - - if let Err(failure) = update.insert_tx(tx, confirmation_time) { - use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; - match failure { - InsertTxError::Chain(TxTooHigh { .. }) => { - unreachable!("chain position already checked earlier") + if update.graph.get_tx(txid).is_none() { + match self.get_tx(&txid)? { + Some(tx) => { + let _ = update.graph.insert_tx(tx); } - InsertTxError::Chain(TxMovedUnexpectedly { .. }) - | InsertTxError::UnresolvableConflict(_) => { - /* implies reorg during a scan. We deal with that below */ + None => continue, + } + } + match self.get_tx_status(&txid)? { + tx_status @ TxStatus { + confirmed: true, .. + } => { + if let Some(anchor) = map_confirmation_time_anchor(&tx_status, tip_at_start) { + let _ = update.graph.insert_anchor(txid, anchor); } } + _ => continue, } } for op in outpoints.into_iter() { let mut op_txs = Vec::with_capacity(2); - if let (Some(tx), Some(tx_status)) = - (self.get_tx(&op.txid)?, self.get_tx_status(&op.txid)?) + if let ( + Some(tx), + tx_status @ TxStatus { + confirmed: true, .. + }, + ) = (self.get_tx(&op.txid)?, self.get_tx_status(&op.txid)?) { op_txs.push((tx, tx_status)); if let Some(OutputStatus { @@ -242,48 +218,34 @@ impl EsploraExt for esplora_client::BlockingClient { } for (tx, status) in op_txs { - let confirmation_time = map_confirmation_time(&status, tip_at_start.height); + let txid = tx.txid(); + let anchor = map_confirmation_time_anchor(&status, tip_at_start); - if let Err(failure) = update.insert_tx(tx, confirmation_time) { - use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; - match failure { - InsertTxError::Chain(TxTooHigh { .. }) => { - unreachable!("chain position already checked earlier") - } - InsertTxError::Chain(TxMovedUnexpectedly { .. }) - | InsertTxError::UnresolvableConflict(_) => { - /* implies reorg during a scan. We deal with that below */ - } - } + let _ = update.graph.insert_tx(tx); + if let Some(anchor) = anchor { + let _ = update.graph.insert_anchor(txid, anchor); } } } - let reorg_occurred = { - if let Some(checkpoint) = ChainGraph::chain(update).latest_checkpoint() { - self.get_block_hash(checkpoint.height)? != checkpoint.hash - } else { - false - } - }; - - if reorg_occurred { - // A reorg occurred, so let's find out where all the txids we found are now in the chain. - // XXX: collect required because of weird type naming issues - let txids_found = ChainGraph::chain(update) - .txids() - .map(|(_, txid)| *txid) + if tip_at_start.hash != self.get_block_hash(tip_at_start.height)? { + // A reorg occurred, so let's find out where all the txids we found are now in the chain + let txids_found = update + .graph + .full_txs() + .map(|tx_node| tx_node.txid) .collect::>(); - scan.update = EsploraExt::scan_without_keychain( + update.chain = EsploraExt::scan_without_keychain( self, local_chain, [], txids_found, [], parallel_requests, - )?; + )? + .chain; } - Ok(scan) + Ok(update) } } diff --git a/crates/esplora/src/lib.rs b/crates/esplora/src/lib.rs index 315d050d..d5f8d8af 100644 --- a/crates/esplora/src/lib.rs +++ b/crates/esplora/src/lib.rs @@ -1,9 +1,8 @@ #![doc = include_str!("../README.md")] -use bdk_chain::{BlockId, ConfirmationTime, ConfirmationTimeAnchor}; +use bdk_chain::{BlockId, ConfirmationTimeAnchor}; use esplora_client::TxStatus; pub use esplora_client; -pub mod v2; #[cfg(feature = "blocking")] mod blocking_ext; @@ -15,18 +14,6 @@ mod async_ext; #[cfg(feature = "async")] pub use async_ext::*; -pub(crate) fn map_confirmation_time( - tx_status: &TxStatus, - height_at_start: u32, -) -> ConfirmationTime { - match (tx_status.block_time, tx_status.block_height) { - (Some(time), Some(height)) if height <= height_at_start => { - ConfirmationTime::Confirmed { height, time } - } - _ => ConfirmationTime::Unconfirmed { last_seen: 0 }, - } -} - pub(crate) fn map_confirmation_time_anchor( tx_status: &TxStatus, tip_at_start: BlockId, diff --git a/crates/esplora/src/v2/async_ext.rs b/crates/esplora/src/v2/async_ext.rs deleted file mode 100644 index ac6eed91..00000000 --- a/crates/esplora/src/v2/async_ext.rs +++ /dev/null @@ -1,266 +0,0 @@ -use async_trait::async_trait; -use bdk_chain::{ - bitcoin::{BlockHash, OutPoint, Script, Txid}, - collections::BTreeMap, - keychain::LocalUpdate, - BlockId, ConfirmationTimeAnchor, -}; -use esplora_client::{Error, OutputStatus}; -use futures::{stream::FuturesOrdered, TryStreamExt}; - -use crate::map_confirmation_time_anchor; - -/// Trait to extend [`esplora_client::AsyncClient`] functionality. -/// -/// This is the async version of [`EsploraExt`]. Refer to -/// [crate-level documentation] for more. -/// -/// [`EsploraExt`]: crate::EsploraExt -/// [crate-level documentation]: crate -#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] -#[cfg_attr(not(target_arch = "wasm32"), async_trait)] -pub trait EsploraAsyncExt { - /// Scan the blockchain (via esplora) for the data specified and returns a - /// [`LocalUpdate`]. - /// - /// - `local_chain`: the most recent block hashes present locally - /// - `keychain_spks`: keychains that we want to scan transactions for - /// - `txids`: transactions for which we want updated [`ChainPosition`]s - /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we - /// want to included in the update - /// - /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated - /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in - /// parallel. - /// - /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition - #[allow(clippy::result_large_err)] // FIXME - async fn scan( - &self, - local_chain: &BTreeMap, - keychain_spks: BTreeMap< - K, - impl IntoIterator + Send> + Send, - >, - txids: impl IntoIterator + Send> + Send, - outpoints: impl IntoIterator + Send> + Send, - stop_gap: usize, - parallel_requests: usize, - ) -> Result, Error>; - - /// Convenience method to call [`scan`] without requiring a keychain. - /// - /// [`scan`]: EsploraAsyncExt::scan - #[allow(clippy::result_large_err)] // FIXME - async fn scan_without_keychain( - &self, - local_chain: &BTreeMap, - misc_spks: impl IntoIterator + Send> + Send, - txids: impl IntoIterator + Send> + Send, - outpoints: impl IntoIterator + Send> + Send, - parallel_requests: usize, - ) -> Result, Error> { - self.scan( - local_chain, - [( - (), - misc_spks - .into_iter() - .enumerate() - .map(|(i, spk)| (i as u32, spk)), - )] - .into(), - txids, - outpoints, - usize::MAX, - parallel_requests, - ) - .await - } -} - -#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] -#[cfg_attr(not(target_arch = "wasm32"), async_trait)] -impl EsploraAsyncExt for esplora_client::AsyncClient { - #[allow(clippy::result_large_err)] // FIXME - async fn scan( - &self, - local_chain: &BTreeMap, - keychain_spks: BTreeMap< - K, - impl IntoIterator + Send> + Send, - >, - txids: impl IntoIterator + Send> + Send, - outpoints: impl IntoIterator + Send> + Send, - stop_gap: usize, - parallel_requests: usize, - ) -> Result, Error> { - let parallel_requests = Ord::max(parallel_requests, 1); - - let (mut update, tip_at_start) = loop { - let mut update = LocalUpdate::::default(); - - for (&height, &original_hash) in local_chain.iter().rev() { - let update_block_id = BlockId { - height, - hash: self.get_block_hash(height).await?, - }; - let _ = update - .chain - .insert_block(update_block_id) - .expect("cannot repeat height here"); - if update_block_id.hash == original_hash { - break; - } - } - - let tip_at_start = BlockId { - height: self.get_height().await?, - hash: self.get_tip_hash().await?, - }; - - if update.chain.insert_block(tip_at_start).is_ok() { - break (update, tip_at_start); - } - }; - - for (keychain, spks) in keychain_spks { - let mut spks = spks.into_iter(); - let mut last_active_index = None; - let mut empty_scripts = 0; - type IndexWithTxs = (u32, Vec); - - loop { - let futures = (0..parallel_requests) - .filter_map(|_| { - let (index, script) = spks.next()?; - let client = self.clone(); - Some(async move { - let mut related_txs = client.scripthash_txs(&script, None).await?; - - let n_confirmed = - related_txs.iter().filter(|tx| tx.status.confirmed).count(); - // esplora pages on 25 confirmed transactions. If there are 25 or more we - // keep requesting to see if there's more. - if n_confirmed >= 25 { - loop { - let new_related_txs = client - .scripthash_txs( - &script, - Some(related_txs.last().unwrap().txid), - ) - .await?; - let n = new_related_txs.len(); - related_txs.extend(new_related_txs); - // we've reached the end - if n < 25 { - break; - } - } - } - - Result::<_, esplora_client::Error>::Ok((index, related_txs)) - }) - }) - .collect::>(); - - let n_futures = futures.len(); - - for (index, related_txs) in futures.try_collect::>().await? { - if related_txs.is_empty() { - empty_scripts += 1; - } else { - last_active_index = Some(index); - empty_scripts = 0; - } - for tx in related_txs { - let anchor = map_confirmation_time_anchor(&tx.status, tip_at_start); - - let _ = update.graph.insert_tx(tx.to_tx()); - if let Some(anchor) = anchor { - let _ = update.graph.insert_anchor(tx.txid, anchor); - } - } - } - - if n_futures == 0 || empty_scripts >= stop_gap { - break; - } - } - - if let Some(last_active_index) = last_active_index { - update.keychain.insert(keychain, last_active_index); - } - } - - for txid in txids.into_iter() { - if update.graph.get_tx(txid).is_none() { - match self.get_tx(&txid).await? { - Some(tx) => { - let _ = update.graph.insert_tx(tx); - } - None => continue, - } - } - match self.get_tx_status(&txid).await? { - Some(tx_status) => { - if let Some(anchor) = map_confirmation_time_anchor(&tx_status, tip_at_start) { - let _ = update.graph.insert_anchor(txid, anchor); - } - } - None => continue, - } - } - - for op in outpoints.into_iter() { - let mut op_txs = Vec::with_capacity(2); - if let (Some(tx), Some(tx_status)) = ( - self.get_tx(&op.txid).await?, - self.get_tx_status(&op.txid).await?, - ) { - op_txs.push((tx, tx_status)); - if let Some(OutputStatus { - txid: Some(txid), - status: Some(spend_status), - .. - }) = self.get_output_status(&op.txid, op.vout as _).await? - { - if let Some(spend_tx) = self.get_tx(&txid).await? { - op_txs.push((spend_tx, spend_status)); - } - } - } - - for (tx, status) in op_txs { - let txid = tx.txid(); - let anchor = map_confirmation_time_anchor(&status, tip_at_start); - - let _ = update.graph.insert_tx(tx); - if let Some(anchor) = anchor { - let _ = update.graph.insert_anchor(txid, anchor); - } - } - } - - if tip_at_start.hash != self.get_block_hash(tip_at_start.height).await? { - // A reorg occurred, so let's find out where all the txids we found are now in the chain - let txids_found = update - .graph - .full_txs() - .map(|tx_node| tx_node.txid) - .collect::>(); - update.chain = EsploraAsyncExt::scan_without_keychain( - self, - local_chain, - [], - txids_found, - [], - parallel_requests, - ) - .await? - .chain; - } - - Ok(update) - } -} diff --git a/crates/esplora/src/v2/blocking_ext.rs b/crates/esplora/src/v2/blocking_ext.rs deleted file mode 100644 index 63e4c923..00000000 --- a/crates/esplora/src/v2/blocking_ext.rs +++ /dev/null @@ -1,247 +0,0 @@ -use bdk_chain::bitcoin::{BlockHash, OutPoint, Script, Txid}; -use bdk_chain::collections::BTreeMap; -use bdk_chain::BlockId; -use bdk_chain::{keychain::LocalUpdate, ConfirmationTimeAnchor}; -use esplora_client::{Error, OutputStatus}; - -use crate::map_confirmation_time_anchor; - -/// Trait to extend [`esplora_client::BlockingClient`] functionality. -/// -/// Refer to [crate-level documentation] for more. -/// -/// [crate-level documentation]: crate -pub trait EsploraExt { - /// Scan the blockchain (via esplora) for the data specified and returns a - /// [`LocalUpdate`]. - /// - /// - `local_chain`: the most recent block hashes present locally - /// - `keychain_spks`: keychains that we want to scan transactions for - /// - `txids`: transactions for which we want updated [`ChainPosition`]s - /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we - /// want to included in the update - /// - /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated - /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in - /// parallel. - /// - /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition - #[allow(clippy::result_large_err)] // FIXME - fn scan( - &self, - local_chain: &BTreeMap, - keychain_spks: BTreeMap>, - txids: impl IntoIterator, - outpoints: impl IntoIterator, - stop_gap: usize, - parallel_requests: usize, - ) -> Result, Error>; - - /// Convenience method to call [`scan`] without requiring a keychain. - /// - /// [`scan`]: EsploraExt::scan - #[allow(clippy::result_large_err)] // FIXME - fn scan_without_keychain( - &self, - local_chain: &BTreeMap, - misc_spks: impl IntoIterator, - txids: impl IntoIterator, - outpoints: impl IntoIterator, - parallel_requests: usize, - ) -> Result, Error> { - self.scan( - local_chain, - [( - (), - misc_spks - .into_iter() - .enumerate() - .map(|(i, spk)| (i as u32, spk)), - )] - .into(), - txids, - outpoints, - usize::MAX, - parallel_requests, - ) - } -} - -impl EsploraExt for esplora_client::BlockingClient { - fn scan( - &self, - local_chain: &BTreeMap, - keychain_spks: BTreeMap>, - txids: impl IntoIterator, - outpoints: impl IntoIterator, - stop_gap: usize, - parallel_requests: usize, - ) -> Result, Error> { - let parallel_requests = Ord::max(parallel_requests, 1); - - let (mut update, tip_at_start) = loop { - let mut update = LocalUpdate::::default(); - - for (&height, &original_hash) in local_chain.iter().rev() { - let update_block_id = BlockId { - height, - hash: self.get_block_hash(height)?, - }; - let _ = update - .chain - .insert_block(update_block_id) - .expect("cannot repeat height here"); - if update_block_id.hash == original_hash { - break; - } - } - - let tip_at_start = BlockId { - height: self.get_height()?, - hash: self.get_tip_hash()?, - }; - - if update.chain.insert_block(tip_at_start).is_ok() { - break (update, tip_at_start); - } - }; - - for (keychain, spks) in keychain_spks { - let mut spks = spks.into_iter(); - let mut last_active_index = None; - let mut empty_scripts = 0; - type IndexWithTxs = (u32, Vec); - - loop { - let handles = (0..parallel_requests) - .filter_map( - |_| -> Option>> { - let (index, script) = spks.next()?; - let client = self.clone(); - Some(std::thread::spawn(move || { - let mut related_txs = client.scripthash_txs(&script, None)?; - - let n_confirmed = - related_txs.iter().filter(|tx| tx.status.confirmed).count(); - // esplora pages on 25 confirmed transactions. If there are 25 or more we - // keep requesting to see if there's more. - if n_confirmed >= 25 { - loop { - let new_related_txs = client.scripthash_txs( - &script, - Some(related_txs.last().unwrap().txid), - )?; - let n = new_related_txs.len(); - related_txs.extend(new_related_txs); - // we've reached the end - if n < 25 { - break; - } - } - } - - Result::<_, esplora_client::Error>::Ok((index, related_txs)) - })) - }, - ) - .collect::>(); - - let n_handles = handles.len(); - - for handle in handles { - let (index, related_txs) = handle.join().unwrap()?; // TODO: don't unwrap - if related_txs.is_empty() { - empty_scripts += 1; - } else { - last_active_index = Some(index); - empty_scripts = 0; - } - for tx in related_txs { - let anchor = map_confirmation_time_anchor(&tx.status, tip_at_start); - - let _ = update.graph.insert_tx(tx.to_tx()); - if let Some(anchor) = anchor { - let _ = update.graph.insert_anchor(tx.txid, anchor); - } - } - } - - if n_handles == 0 || empty_scripts >= stop_gap { - break; - } - } - - if let Some(last_active_index) = last_active_index { - update.keychain.insert(keychain, last_active_index); - } - } - - for txid in txids.into_iter() { - if update.graph.get_tx(txid).is_none() { - match self.get_tx(&txid)? { - Some(tx) => { - let _ = update.graph.insert_tx(tx); - } - None => continue, - } - } - match self.get_tx_status(&txid)? { - Some(tx_status) => { - if let Some(anchor) = map_confirmation_time_anchor(&tx_status, tip_at_start) { - let _ = update.graph.insert_anchor(txid, anchor); - } - } - None => continue, - } - } - - for op in outpoints.into_iter() { - let mut op_txs = Vec::with_capacity(2); - if let (Some(tx), Some(tx_status)) = - (self.get_tx(&op.txid)?, self.get_tx_status(&op.txid)?) - { - op_txs.push((tx, tx_status)); - if let Some(OutputStatus { - txid: Some(txid), - status: Some(spend_status), - .. - }) = self.get_output_status(&op.txid, op.vout as _)? - { - if let Some(spend_tx) = self.get_tx(&txid)? { - op_txs.push((spend_tx, spend_status)); - } - } - } - - for (tx, status) in op_txs { - let txid = tx.txid(); - let anchor = map_confirmation_time_anchor(&status, tip_at_start); - - let _ = update.graph.insert_tx(tx); - if let Some(anchor) = anchor { - let _ = update.graph.insert_anchor(txid, anchor); - } - } - } - - if tip_at_start.hash != self.get_block_hash(tip_at_start.height)? { - // A reorg occurred, so let's find out where all the txids we found are now in the chain - let txids_found = update - .graph - .full_txs() - .map(|tx_node| tx_node.txid) - .collect::>(); - update.chain = EsploraExt::scan_without_keychain( - self, - local_chain, - [], - txids_found, - [], - parallel_requests, - )? - .chain; - } - - Ok(update) - } -} diff --git a/crates/esplora/src/v2/mod.rs b/crates/esplora/src/v2/mod.rs deleted file mode 100644 index 5720e2dd..00000000 --- a/crates/esplora/src/v2/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -#[cfg(feature = "blocking")] -mod blocking_ext; -#[cfg(feature = "blocking")] -pub use blocking_ext::*; - -#[cfg(feature = "async")] -mod async_ext; -#[cfg(feature = "async")] -pub use async_ext::*; diff --git a/crates/file_store/README.md b/crates/file_store/README.md index 076142f8..4a334fcb 100644 --- a/crates/file_store/README.md +++ b/crates/file_store/README.md @@ -1,9 +1,9 @@ # BDK File Store This is a simple append-only flat file implementation of -[`Persist`](`bdk_chain::keychain::persist::Persist`). +[`Persist`](`bdk_chain::Persist`). -The main structure is [`KeychainStore`](`crate::KeychainStore`), which can be used with [`bdk`]'s +The main structure is [`Store`](`crate::Store`), which can be used with [`bdk`]'s `Wallet` to persist wallet data into a flat file. [`bdk`]: https://docs.rs/bdk/latest diff --git a/crates/file_store/src/keychain_store.rs b/crates/file_store/src/keychain_store.rs deleted file mode 100644 index 5f5074d5..00000000 --- a/crates/file_store/src/keychain_store.rs +++ /dev/null @@ -1,313 +0,0 @@ -//! Module for persisting data on disk. -//! -//! The star of the show is [`KeychainStore`], which maintains an append-only file of -//! [`KeychainChangeSet`]s which can be used to restore a [`KeychainTracker`]. -use bdk_chain::{ - keychain::{KeychainChangeSet, KeychainTracker}, - sparse_chain, -}; -use bincode::Options; -use std::{ - fs::{File, OpenOptions}, - io::{self, Read, Seek, Write}, - path::Path, -}; - -use crate::{bincode_options, EntryIter, IterError}; - -/// BDK File Store magic bytes length. -const MAGIC_BYTES_LEN: usize = 12; - -/// BDK File Store magic bytes. -const MAGIC_BYTES: [u8; MAGIC_BYTES_LEN] = [98, 100, 107, 102, 115, 48, 48, 48, 48, 48, 48, 48]; - -/// Persists an append only list of `KeychainChangeSet` to a single file. -/// [`KeychainChangeSet`] record the changes made to a [`KeychainTracker`]. -#[derive(Debug)] -pub struct KeychainStore { - db_file: File, - changeset_type_params: core::marker::PhantomData<(K, P)>, -} - -impl KeychainStore -where - K: Ord + Clone + core::fmt::Debug, - P: sparse_chain::ChainPosition, - KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, -{ - /// Creates a new store from a [`File`]. - /// - /// The file must have been opened with read and write permissions. - /// - /// [`File`]: std::fs::File - pub fn new(mut file: File) -> Result { - file.rewind()?; - - let mut magic_bytes = [0_u8; MAGIC_BYTES_LEN]; - file.read_exact(&mut magic_bytes)?; - - if magic_bytes != MAGIC_BYTES { - return Err(FileError::InvalidMagicBytes(magic_bytes)); - } - - Ok(Self { - db_file: file, - changeset_type_params: Default::default(), - }) - } - - /// Creates or loads a store from `db_path`. If no file exists there, it will be created. - pub fn new_from_path>(db_path: D) -> Result { - let already_exists = db_path.as_ref().exists(); - - let mut db_file = OpenOptions::new() - .read(true) - .write(true) - .create(true) - .open(db_path)?; - - if !already_exists { - db_file.write_all(&MAGIC_BYTES)?; - } - - Self::new(db_file) - } - - /// Iterates over the stored changeset from first to last, changing the seek position at each - /// iteration. - /// - /// The iterator may fail to read an entry and therefore return an error. However, the first time - /// it returns an error will be the last. After doing so, the iterator will always yield `None`. - /// - /// **WARNING**: This method changes the write position in the underlying file. You should - /// always iterate over all entries until `None` is returned if you want your next write to go - /// at the end; otherwise, you will write over existing entries. - pub fn iter_changesets(&mut self) -> Result>, io::Error> { - Ok(EntryIter::new(MAGIC_BYTES_LEN as u64, &mut self.db_file)) - } - - /// Loads all the changesets that have been stored as one giant changeset. - /// - /// This function returns a tuple of the aggregate changeset and a result that indicates - /// whether an error occurred while reading or deserializing one of the entries. If so the - /// changeset will consist of all of those it was able to read. - /// - /// You should usually check the error. In many applications, it may make sense to do a full - /// wallet scan with a stop-gap after getting an error, since it is likely that one of the - /// changesets it was unable to read changed the derivation indices of the tracker. - /// - /// **WARNING**: This method changes the write position of the underlying file. The next - /// changeset will be written over the erroring entry (or the end of the file if none existed). - pub fn aggregate_changeset(&mut self) -> (KeychainChangeSet, Result<(), IterError>) { - let mut changeset = KeychainChangeSet::default(); - let result = (|| { - let iter_changeset = self.iter_changesets()?; - for next_changeset in iter_changeset { - changeset.append(next_changeset?); - } - Ok(()) - })(); - - (changeset, result) - } - - /// Reads and applies all the changesets stored sequentially to the tracker, stopping when it fails - /// to read the next one. - /// - /// **WARNING**: This method changes the write position of the underlying file. The next - /// changeset will be written over the erroring entry (or the end of the file if none existed). - pub fn load_into_keychain_tracker( - &mut self, - tracker: &mut KeychainTracker, - ) -> Result<(), IterError> { - for changeset in self.iter_changesets()? { - tracker.apply_changeset(changeset?) - } - Ok(()) - } - - /// Append a new changeset to the file and truncate the file to the end of the appended changeset. - /// - /// The truncation is to avoid the possibility of having a valid but inconsistent changeset - /// directly after the appended changeset. - pub fn append_changeset( - &mut self, - changeset: &KeychainChangeSet, - ) -> Result<(), io::Error> { - if changeset.is_empty() { - return Ok(()); - } - - bincode_options() - .serialize_into(&mut self.db_file, changeset) - .map_err(|e| match *e { - bincode::ErrorKind::Io(inner) => inner, - unexpected_err => panic!("unexpected bincode error: {}", unexpected_err), - })?; - - // truncate file after this changeset addition - // if this is not done, data after this changeset may represent valid changesets, however - // applying those changesets on top of this one may result in an inconsistent state - let pos = self.db_file.stream_position()?; - self.db_file.set_len(pos)?; - - // We want to make sure that derivation indices changes are written to disk as soon as - // possible, so you know about the write failure before you give out the address in the application. - if !changeset.derivation_indices.is_empty() { - self.db_file.sync_data()?; - } - - Ok(()) - } -} - -/// Error that occurs due to problems encountered with the file. -#[derive(Debug)] -pub enum FileError { - /// IO error, this may mean that the file is too short. - Io(io::Error), - /// Magic bytes do not match what is expected. - InvalidMagicBytes([u8; MAGIC_BYTES_LEN]), -} - -impl core::fmt::Display for FileError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::Io(e) => write!(f, "io error trying to read file: {}", e), - Self::InvalidMagicBytes(b) => write!( - f, - "file has invalid magic bytes: expected={:?} got={:?}", - MAGIC_BYTES, b - ), - } - } -} - -impl From for FileError { - fn from(value: io::Error) -> Self { - Self::Io(value) - } -} - -impl std::error::Error for FileError {} - -#[cfg(test)] -mod test { - use super::*; - use bdk_chain::{ - keychain::{DerivationAdditions, KeychainChangeSet}, - TxHeight, - }; - use bincode::DefaultOptions; - use std::{ - io::{Read, Write}, - vec::Vec, - }; - use tempfile::NamedTempFile; - #[derive( - Debug, - Clone, - Copy, - PartialOrd, - Ord, - PartialEq, - Eq, - Hash, - serde::Serialize, - serde::Deserialize, - )] - enum TestKeychain { - External, - Internal, - } - - impl core::fmt::Display for TestKeychain { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::External => write!(f, "external"), - Self::Internal => write!(f, "internal"), - } - } - } - - #[test] - fn magic_bytes() { - assert_eq!(&MAGIC_BYTES, "bdkfs0000000".as_bytes()); - } - - #[test] - fn new_fails_if_file_is_too_short() { - let mut file = NamedTempFile::new().unwrap(); - file.write_all(&MAGIC_BYTES[..MAGIC_BYTES_LEN - 1]) - .expect("should write"); - - match KeychainStore::::new(file.reopen().unwrap()) { - Err(FileError::Io(e)) => assert_eq!(e.kind(), std::io::ErrorKind::UnexpectedEof), - unexpected => panic!("unexpected result: {:?}", unexpected), - }; - } - - #[test] - fn new_fails_if_magic_bytes_are_invalid() { - let invalid_magic_bytes = "ldkfs0000000"; - - let mut file = NamedTempFile::new().unwrap(); - file.write_all(invalid_magic_bytes.as_bytes()) - .expect("should write"); - - match KeychainStore::::new(file.reopen().unwrap()) { - Err(FileError::InvalidMagicBytes(b)) => { - assert_eq!(b, invalid_magic_bytes.as_bytes()) - } - unexpected => panic!("unexpected result: {:?}", unexpected), - }; - } - - #[test] - fn append_changeset_truncates_invalid_bytes() { - // initial data to write to file (magic bytes + invalid data) - let mut data = [255_u8; 2000]; - data[..MAGIC_BYTES_LEN].copy_from_slice(&MAGIC_BYTES); - - let changeset = KeychainChangeSet { - derivation_indices: DerivationAdditions( - vec![(TestKeychain::External, 42)].into_iter().collect(), - ), - chain_graph: Default::default(), - }; - - let mut file = NamedTempFile::new().unwrap(); - file.write_all(&data).expect("should write"); - - let mut store = KeychainStore::::new(file.reopen().unwrap()) - .expect("should open"); - match store.iter_changesets().expect("seek should succeed").next() { - Some(Err(IterError::Bincode(_))) => {} - unexpected_res => panic!("unexpected result: {:?}", unexpected_res), - } - - store.append_changeset(&changeset).expect("should append"); - - drop(store); - - let got_bytes = { - let mut buf = Vec::new(); - file.reopen() - .unwrap() - .read_to_end(&mut buf) - .expect("should read"); - buf - }; - - let expected_bytes = { - let mut buf = MAGIC_BYTES.to_vec(); - DefaultOptions::new() - .with_varint_encoding() - .serialize_into(&mut buf, &changeset) - .expect("should encode"); - buf - }; - - assert_eq!(got_bytes, expected_bytes); - } -} diff --git a/crates/file_store/src/lib.rs b/crates/file_store/src/lib.rs index b10c8c29..de1c73ad 100644 --- a/crates/file_store/src/lib.rs +++ b/crates/file_store/src/lib.rs @@ -1,16 +1,10 @@ #![doc = include_str!("../README.md")] mod entry_iter; -mod keychain_store; mod store; use std::io; -use bdk_chain::{ - keychain::{KeychainChangeSet, KeychainTracker, PersistBackend}, - sparse_chain::ChainPosition, -}; use bincode::{DefaultOptions, Options}; pub use entry_iter::*; -pub use keychain_store::*; pub use store::*; pub(crate) fn bincode_options() -> impl bincode::Options { @@ -46,28 +40,3 @@ impl<'a> From for FileError<'a> { } impl<'a> std::error::Error for FileError<'a> {} - -impl PersistBackend for KeychainStore -where - K: Ord + Clone + core::fmt::Debug, - P: ChainPosition, - KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, -{ - type WriteError = std::io::Error; - - type LoadError = IterError; - - fn append_changeset( - &mut self, - changeset: &KeychainChangeSet, - ) -> Result<(), Self::WriteError> { - KeychainStore::append_changeset(self, changeset) - } - - fn load_into_keychain_tracker( - &mut self, - tracker: &mut KeychainTracker, - ) -> Result<(), Self::LoadError> { - KeychainStore::load_into_keychain_tracker(self, tracker) - } -} diff --git a/example-crates/example_cli/src/lib.rs b/example-crates/example_cli/src/lib.rs index ea362a5e..4d5cd284 100644 --- a/example-crates/example_cli/src/lib.rs +++ b/example-crates/example_cli/src/lib.rs @@ -16,7 +16,7 @@ use bdk_chain::{ descriptor::{DescriptorSecretKey, KeyMap}, Descriptor, DescriptorPublicKey, }, - Anchor, Append, ChainOracle, DescriptorExt, FullTxOut, ObservedAs, Persist, PersistBackend, + Anchor, Append, ChainOracle, DescriptorExt, FullTxOut, Persist, PersistBackend, }; pub use bdk_file_store; pub use clap; @@ -607,7 +607,7 @@ pub fn planned_utxos, chain: &O, assets: &bdk_tmp_plan::Assets, -) -> Result, FullTxOut>)>, O::Error> { +) -> Result, FullTxOut)>, O::Error> { let chain_tip = chain.get_chain_tip()?.unwrap_or_default(); let outpoints = graph.index.outpoints().iter().cloned(); graph @@ -615,7 +615,7 @@ pub fn planned_utxos Option, FullTxOut>), _>> { + |r| -> Option, FullTxOut), _>> { let (k, i, full_txo) = match r { Err(err) => return Some(Err(err)), Ok(((k, i), full_txo)) => (k, i, full_txo), diff --git a/example-crates/example_electrum/src/main.rs b/example-crates/example_electrum/src/main.rs index cfd06c30..41d39423 100644 --- a/example-crates/example_electrum/src/main.rs +++ b/example-crates/example_electrum/src/main.rs @@ -13,7 +13,7 @@ use bdk_chain::{ }; use bdk_electrum::{ electrum_client::{self, ElectrumApi}, - v2::{ElectrumExt, ElectrumUpdate}, + ElectrumExt, ElectrumUpdate, }; use example_cli::{ anyhow::{self, Context}, diff --git a/example-crates/keychain_tracker_electrum/.gitignore b/example-crates/keychain_tracker_electrum/.gitignore deleted file mode 100644 index ea8c4bf7..00000000 --- a/example-crates/keychain_tracker_electrum/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target diff --git a/example-crates/keychain_tracker_electrum/Cargo.toml b/example-crates/keychain_tracker_electrum/Cargo.toml deleted file mode 100644 index 10226b43..00000000 --- a/example-crates/keychain_tracker_electrum/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -name = "keychain_tracker_electrum_example" -version = "0.1.0" -edition = "2021" - -[dependencies] -bdk_chain = { path = "../../crates/chain", features = ["serde"] } -bdk_electrum = { path = "../../crates/electrum" } -keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli"} diff --git a/example-crates/keychain_tracker_electrum/README.md b/example-crates/keychain_tracker_electrum/README.md deleted file mode 100644 index b8bdea21..00000000 --- a/example-crates/keychain_tracker_electrum/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Keychain Tracker with electrum - -This example shows how you use the `KeychainTracker` from `bdk_chain` to create a simple command -line wallet. - - diff --git a/example-crates/keychain_tracker_electrum/src/main.rs b/example-crates/keychain_tracker_electrum/src/main.rs deleted file mode 100644 index c8b9e068..00000000 --- a/example-crates/keychain_tracker_electrum/src/main.rs +++ /dev/null @@ -1,245 +0,0 @@ -use bdk_chain::bitcoin::{Address, OutPoint, Txid}; -use bdk_electrum::bdk_chain::{self, bitcoin::Network, TxHeight}; -use bdk_electrum::{ - electrum_client::{self, ElectrumApi}, - ElectrumExt, ElectrumUpdate, -}; -use keychain_tracker_example_cli::{ - self as cli, - anyhow::{self, Context}, - clap::{self, Parser, Subcommand}, -}; -use std::{collections::BTreeMap, fmt::Debug, io, io::Write}; - -#[derive(Subcommand, Debug, Clone)] -enum ElectrumCommands { - /// Scans the addresses in the wallet using the esplora API. - Scan { - /// When a gap this large has been found for a keychain, it will stop. - #[clap(long, default_value = "5")] - stop_gap: usize, - #[clap(flatten)] - scan_options: ScanOptions, - }, - /// Scans particular addresses using the esplora API. - Sync { - /// Scan all the unused addresses. - #[clap(long)] - unused_spks: bool, - /// Scan every address that you have derived. - #[clap(long)] - all_spks: bool, - /// Scan unspent outpoints for spends or changes to confirmation status of residing tx. - #[clap(long)] - utxos: bool, - /// Scan unconfirmed transactions for updates. - #[clap(long)] - unconfirmed: bool, - #[clap(flatten)] - scan_options: ScanOptions, - }, -} - -#[derive(Parser, Debug, Clone, PartialEq)] -pub struct ScanOptions { - /// Set batch size for each script_history call to electrum client. - #[clap(long, default_value = "25")] - pub batch_size: usize, -} - -fn main() -> anyhow::Result<()> { - let (args, keymap, tracker, db) = cli::init::()?; - - let electrum_url = match args.network { - Network::Bitcoin => "ssl://electrum.blockstream.info:50002", - Network::Testnet => "ssl://electrum.blockstream.info:60002", - Network::Regtest => "tcp://localhost:60401", - Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001", - }; - let config = electrum_client::Config::builder() - .validate_domain(matches!(args.network, Network::Bitcoin)) - .build(); - - let client = electrum_client::Client::from_config(electrum_url, config)?; - - let electrum_cmd = match args.command.clone() { - cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd, - general_command => { - return cli::handle_commands( - general_command, - |transaction| { - let _txid = client.transaction_broadcast(transaction)?; - Ok(()) - }, - &tracker, - &db, - args.network, - &keymap, - ) - } - }; - - let response = match electrum_cmd { - ElectrumCommands::Scan { - stop_gap, - scan_options: scan_option, - } => { - let (spk_iterators, local_chain) = { - // Get a short lock on the tracker to get the spks iterators - // and local chain state - let tracker = &*tracker.lock().unwrap(); - let spk_iterators = tracker - .txout_index - .spks_of_all_keychains() - .into_iter() - .map(|(keychain, iter)| { - let mut first = true; - let spk_iter = iter.inspect(move |(i, _)| { - if first { - eprint!("\nscanning {}: ", keychain); - first = false; - } - - eprint!("{} ", i); - let _ = io::stdout().flush(); - }); - (keychain, spk_iter) - }) - .collect::>(); - let local_chain = tracker.chain().checkpoints().clone(); - (spk_iterators, local_chain) - }; - - // we scan the spks **without** a lock on the tracker - client.scan( - &local_chain, - spk_iterators, - core::iter::empty(), - core::iter::empty(), - stop_gap, - scan_option.batch_size, - )? - } - ElectrumCommands::Sync { - mut unused_spks, - mut utxos, - mut unconfirmed, - all_spks, - scan_options, - } => { - // Get a short lock on the tracker to get the spks we're interested in - let tracker = tracker.lock().unwrap(); - - if !(all_spks || unused_spks || utxos || unconfirmed) { - unused_spks = true; - unconfirmed = true; - utxos = true; - } else if all_spks { - unused_spks = false; - } - - let mut spks: Box> = - Box::new(core::iter::empty()); - if all_spks { - let all_spks = tracker - .txout_index - .all_spks() - .iter() - .map(|(k, v)| (*k, v.clone())) - .collect::>(); - spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { - eprintln!("scanning {:?}", index); - script - }))); - } - if unused_spks { - let unused_spks = tracker - .txout_index - .unused_spks(..) - .map(|(k, v)| (*k, v.clone())) - .collect::>(); - spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { - eprintln!( - "Checking if address {} {:?} has been used", - Address::from_script(&script, args.network).unwrap(), - index - ); - - script - }))); - } - - let mut outpoints: Box> = Box::new(core::iter::empty()); - - if utxos { - let utxos = tracker - .full_utxos() - .map(|(_, utxo)| utxo) - .collect::>(); - outpoints = Box::new( - utxos - .into_iter() - .inspect(|utxo| { - eprintln!( - "Checking if outpoint {} (value: {}) has been spent", - utxo.outpoint, utxo.txout.value - ); - }) - .map(|utxo| utxo.outpoint), - ); - }; - - let mut txids: Box> = Box::new(core::iter::empty()); - - if unconfirmed { - let unconfirmed_txids = tracker - .chain() - .range_txids_by_height(TxHeight::Unconfirmed..) - .map(|(_, txid)| *txid) - .collect::>(); - - txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| { - eprintln!("Checking if {} is confirmed yet", txid); - })); - } - - let local_chain = tracker.chain().checkpoints().clone(); - // drop lock on tracker - drop(tracker); - - // we scan the spks **without** a lock on the tracker - ElectrumUpdate { - chain_update: client - .scan_without_keychain( - &local_chain, - spks, - txids, - outpoints, - scan_options.batch_size, - ) - .context("scanning the blockchain")?, - ..Default::default() - } - } - }; - - let missing_txids = response.missing_full_txs(&*tracker.lock().unwrap()); - - // fetch the missing full transactions **without** a lock on the tracker - let new_txs = client - .batch_transaction_get(missing_txids) - .context("fetching full transactions")?; - - { - // Get a final short lock to apply the changes - let mut tracker = tracker.lock().unwrap(); - let changeset = { - let scan = response.into_keychain_scan(new_txs, &*tracker)?; - tracker.determine_changeset(&scan)? - }; - db.lock().unwrap().append_changeset(&changeset)?; - tracker.apply_changeset(changeset); - }; - - Ok(()) -} diff --git a/example-crates/keychain_tracker_esplora/.gitignore b/example-crates/keychain_tracker_esplora/.gitignore deleted file mode 100644 index 8359723a..00000000 --- a/example-crates/keychain_tracker_esplora/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -/target -Cargo.lock -.bdk_example_db diff --git a/example-crates/keychain_tracker_esplora/Cargo.toml b/example-crates/keychain_tracker_esplora/Cargo.toml deleted file mode 100644 index e0a1e62d..00000000 --- a/example-crates/keychain_tracker_esplora/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "keychain_tracker_esplora_example" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -bdk_chain = { path = "../../crates/chain", features = ["serde", "miniscript"] } -bdk_esplora = { path = "../../crates/esplora" } -keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli" } diff --git a/example-crates/keychain_tracker_esplora/src/main.rs b/example-crates/keychain_tracker_esplora/src/main.rs deleted file mode 100644 index cae5e960..00000000 --- a/example-crates/keychain_tracker_esplora/src/main.rs +++ /dev/null @@ -1,241 +0,0 @@ -use bdk_chain::bitcoin::{Address, OutPoint, Txid}; -use bdk_chain::{bitcoin::Network, TxHeight}; -use bdk_esplora::esplora_client; -use bdk_esplora::EsploraExt; - -use std::io::{self, Write}; - -use keychain_tracker_example_cli::{ - self as cli, - anyhow::{self, Context}, - clap::{self, Parser, Subcommand}, -}; - -#[derive(Subcommand, Debug, Clone)] -enum EsploraCommands { - /// Scans the addresses in the wallet using the esplora API. - Scan { - /// When a gap this large has been found for a keychain, it will stop. - #[clap(long, default_value = "5")] - stop_gap: usize, - - #[clap(flatten)] - scan_options: ScanOptions, - }, - /// Scans particular addresses using esplora API. - Sync { - /// Scan all the unused addresses. - #[clap(long)] - unused_spks: bool, - /// Scan every address that you have derived. - #[clap(long)] - all_spks: bool, - /// Scan unspent outpoints for spends or changes to confirmation status of residing tx. - #[clap(long)] - utxos: bool, - /// Scan unconfirmed transactions for updates. - #[clap(long)] - unconfirmed: bool, - - #[clap(flatten)] - scan_options: ScanOptions, - }, -} - -#[derive(Parser, Debug, Clone, PartialEq)] -pub struct ScanOptions { - #[clap(long, default_value = "5")] - pub parallel_requests: usize, -} - -fn main() -> anyhow::Result<()> { - let (args, keymap, keychain_tracker, db) = cli::init::()?; - let esplora_url = match args.network { - Network::Bitcoin => "https://mempool.space/api", - Network::Testnet => "https://mempool.space/testnet/api", - Network::Regtest => "http://localhost:3002", - Network::Signet => "https://mempool.space/signet/api", - }; - - let client = esplora_client::Builder::new(esplora_url).build_blocking()?; - - let esplora_cmd = match args.command { - cli::Commands::ChainSpecific(esplora_cmd) => esplora_cmd, - general_command => { - return cli::handle_commands( - general_command, - |transaction| Ok(client.broadcast(transaction)?), - &keychain_tracker, - &db, - args.network, - &keymap, - ) - } - }; - - match esplora_cmd { - EsploraCommands::Scan { - stop_gap, - scan_options, - } => { - let (spk_iterators, local_chain) = { - // Get a short lock on the tracker to get the spks iterators - // and local chain state - let tracker = &*keychain_tracker.lock().unwrap(); - let spk_iterators = tracker - .txout_index - .spks_of_all_keychains() - .into_iter() - .map(|(keychain, iter)| { - let mut first = true; - ( - keychain, - iter.inspect(move |(i, _)| { - if first { - eprint!("\nscanning {}: ", keychain); - first = false; - } - - eprint!("{} ", i); - let _ = io::stdout().flush(); - }), - ) - }) - .collect(); - - let local_chain = tracker.chain().checkpoints().clone(); - (spk_iterators, local_chain) - }; - - // we scan the iterators **without** a lock on the tracker - let wallet_scan = client - .scan( - &local_chain, - spk_iterators, - core::iter::empty(), - core::iter::empty(), - stop_gap, - scan_options.parallel_requests, - ) - .context("scanning the blockchain")?; - eprintln!(); - - { - // we take a short lock to apply results to tracker and db - let tracker = &mut *keychain_tracker.lock().unwrap(); - let db = &mut *db.lock().unwrap(); - let changeset = tracker.apply_update(wallet_scan)?; - db.append_changeset(&changeset)?; - } - } - EsploraCommands::Sync { - mut unused_spks, - mut utxos, - mut unconfirmed, - all_spks, - scan_options, - } => { - // Get a short lock on the tracker to get the spks we're interested in - let tracker = keychain_tracker.lock().unwrap(); - - if !(all_spks || unused_spks || utxos || unconfirmed) { - unused_spks = true; - unconfirmed = true; - utxos = true; - } else if all_spks { - unused_spks = false; - } - - let mut spks: Box> = - Box::new(core::iter::empty()); - if all_spks { - let all_spks = tracker - .txout_index - .all_spks() - .iter() - .map(|(k, v)| (*k, v.clone())) - .collect::>(); - spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { - eprintln!("scanning {:?}", index); - script - }))); - } - if unused_spks { - let unused_spks = tracker - .txout_index - .unused_spks(..) - .map(|(k, v)| (*k, v.clone())) - .collect::>(); - spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { - eprintln!( - "Checking if address {} {:?} has been used", - Address::from_script(&script, args.network).unwrap(), - index - ); - - script - }))); - } - - let mut outpoints: Box> = Box::new(core::iter::empty()); - - if utxos { - let utxos = tracker - .full_utxos() - .map(|(_, utxo)| utxo) - .collect::>(); - outpoints = Box::new( - utxos - .into_iter() - .inspect(|utxo| { - eprintln!( - "Checking if outpoint {} (value: {}) has been spent", - utxo.outpoint, utxo.txout.value - ); - }) - .map(|utxo| utxo.outpoint), - ); - }; - - let mut txids: Box> = Box::new(core::iter::empty()); - - if unconfirmed { - let unconfirmed_txids = tracker - .chain() - .range_txids_by_height(TxHeight::Unconfirmed..) - .map(|(_, txid)| *txid) - .collect::>(); - - txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| { - eprintln!("Checking if {} is confirmed yet", txid); - })); - } - - let local_chain = tracker.chain().checkpoints().clone(); - - // drop lock on tracker - drop(tracker); - - // we scan the desired spks **without** a lock on the tracker - let scan = client - .scan_without_keychain( - &local_chain, - spks, - txids, - outpoints, - scan_options.parallel_requests, - ) - .context("scanning the blockchain")?; - - { - // we take a short lock to apply the results to the tracker and db - let tracker = &mut *keychain_tracker.lock().unwrap(); - let changeset = tracker.apply_update(scan.into())?; - let db = &mut *db.lock().unwrap(); - db.append_changeset(&changeset)?; - } - } - } - - Ok(()) -} diff --git a/example-crates/keychain_tracker_example_cli/.gitignore b/example-crates/keychain_tracker_example_cli/.gitignore deleted file mode 100644 index ea8c4bf7..00000000 --- a/example-crates/keychain_tracker_example_cli/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target diff --git a/example-crates/keychain_tracker_example_cli/Cargo.toml b/example-crates/keychain_tracker_example_cli/Cargo.toml deleted file mode 100644 index 7e51fec5..00000000 --- a/example-crates/keychain_tracker_example_cli/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "keychain_tracker_example_cli" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[dependencies] -bdk_chain = { path = "../../crates/chain", features = ["serde", "miniscript"]} -bdk_file_store = { path = "../../crates/file_store" } -bdk_tmp_plan = { path = "../../nursery/tmp_plan" } -bdk_coin_select = { path = "../../nursery/coin_select" } - -clap = { version = "3.2.23", features = ["derive", "env"] } -anyhow = "1" -serde = { version = "1", features = ["derive"] } -serde_json = { version = "^1.0" } diff --git a/example-crates/keychain_tracker_example_cli/README.md b/example-crates/keychain_tracker_example_cli/README.md deleted file mode 100644 index 1d9370d1..00000000 --- a/example-crates/keychain_tracker_example_cli/README.md +++ /dev/null @@ -1 +0,0 @@ -Provides common command line processing logic between examples using the `KeychainTracker` diff --git a/example-crates/keychain_tracker_example_cli/src/lib.rs b/example-crates/keychain_tracker_example_cli/src/lib.rs deleted file mode 100644 index 702cc2a2..00000000 --- a/example-crates/keychain_tracker_example_cli/src/lib.rs +++ /dev/null @@ -1,692 +0,0 @@ -pub extern crate anyhow; -use anyhow::{anyhow, Context, Result}; -use bdk_chain::{ - bitcoin::{ - secp256k1::Secp256k1, - util::sighash::{Prevouts, SighashCache}, - Address, LockTime, Network, Sequence, Transaction, TxIn, TxOut, - }, - chain_graph::InsertTxError, - keychain::{DerivationAdditions, KeychainChangeSet, KeychainTracker}, - miniscript::{ - descriptor::{DescriptorSecretKey, KeyMap}, - Descriptor, DescriptorPublicKey, - }, - sparse_chain::{self, ChainPosition}, - Append, DescriptorExt, FullTxOut, -}; -use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue}; -use bdk_file_store::KeychainStore; -use clap::{Parser, Subcommand}; -use std::{ - cmp::Reverse, collections::HashMap, fmt::Debug, path::PathBuf, sync::Mutex, time::Duration, -}; - -pub use bdk_file_store; -pub use clap; - -#[derive(Parser)] -#[clap(author, version, about, long_about = None)] -#[clap(propagate_version = true)] -pub struct Args { - #[clap(env = "DESCRIPTOR")] - pub descriptor: String, - #[clap(env = "CHANGE_DESCRIPTOR")] - pub change_descriptor: Option, - - #[clap(env = "BITCOIN_NETWORK", long, default_value = "signet")] - pub network: Network, - - #[clap(env = "BDK_DB_PATH", long, default_value = ".bdk_example_db")] - pub db_path: PathBuf, - - #[clap(env = "BDK_CP_LIMIT", long, default_value = "20")] - pub cp_limit: usize, - - #[clap(subcommand)] - pub command: Commands, -} - -#[derive(Subcommand, Debug, Clone)] -pub enum Commands { - #[clap(flatten)] - ChainSpecific(C), - /// Address generation and inspection. - Address { - #[clap(subcommand)] - addr_cmd: AddressCmd, - }, - /// Get the wallet balance. - Balance, - /// TxOut related commands. - #[clap(name = "txout")] - TxOut { - #[clap(subcommand)] - txout_cmd: TxOutCmd, - }, - /// Send coins to an address. - Send { - value: u64, - address: Address, - #[clap(short, default_value = "largest-first")] - coin_select: CoinSelectionAlgo, - }, -} - -#[derive(Clone, Debug)] -pub enum CoinSelectionAlgo { - LargestFirst, - SmallestFirst, - OldestFirst, - NewestFirst, - BranchAndBound, -} - -impl Default for CoinSelectionAlgo { - fn default() -> Self { - Self::LargestFirst - } -} - -impl core::str::FromStr for CoinSelectionAlgo { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - use CoinSelectionAlgo::*; - Ok(match s { - "largest-first" => LargestFirst, - "smallest-first" => SmallestFirst, - "oldest-first" => OldestFirst, - "newest-first" => NewestFirst, - "bnb" => BranchAndBound, - unknown => return Err(anyhow!("unknown coin selection algorithm '{}'", unknown)), - }) - } -} - -impl core::fmt::Display for CoinSelectionAlgo { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - use CoinSelectionAlgo::*; - write!( - f, - "{}", - match self { - LargestFirst => "largest-first", - SmallestFirst => "smallest-first", - OldestFirst => "oldest-first", - NewestFirst => "newest-first", - BranchAndBound => "bnb", - } - ) - } -} - -#[derive(Subcommand, Debug, Clone)] -pub enum AddressCmd { - /// Get the next unused address. - Next, - /// Get a new address regardless of the existing unused addresses. - New, - /// List all addresses - List { - #[clap(long)] - change: bool, - }, - Index, -} - -#[derive(Subcommand, Debug, Clone)] -pub enum TxOutCmd { - List { - /// Return only spent outputs. - #[clap(short, long)] - spent: bool, - /// Return only unspent outputs. - #[clap(short, long)] - unspent: bool, - /// Return only confirmed outputs. - #[clap(long)] - confirmed: bool, - /// Return only unconfirmed outputs. - #[clap(long)] - unconfirmed: bool, - }, -} - -#[derive( - Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, serde::Deserialize, serde::Serialize, -)] -pub enum Keychain { - External, - Internal, -} - -impl core::fmt::Display for Keychain { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Keychain::External => write!(f, "external"), - Keychain::Internal => write!(f, "internal"), - } - } -} - -/// A structure defining the output of an [`AddressCmd`]` execution. -#[derive(serde::Serialize, serde::Deserialize)] -pub struct AddrsOutput { - keychain: String, - index: u32, - addrs: Address, - used: bool, -} - -pub fn run_address_cmd

( - tracker: &Mutex>, - db: &Mutex>, - addr_cmd: AddressCmd, - network: Network, -) -> Result<()> -where - P: bdk_chain::sparse_chain::ChainPosition, - KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, -{ - let mut tracker = tracker.lock().unwrap(); - let txout_index = &mut tracker.txout_index; - - let addr_cmmd_output = match addr_cmd { - AddressCmd::Next => Some(txout_index.next_unused_spk(&Keychain::External)), - AddressCmd::New => Some(txout_index.reveal_next_spk(&Keychain::External)), - _ => None, - }; - - if let Some(((index, spk), additions)) = addr_cmmd_output { - let mut db = db.lock().unwrap(); - // update database since we're about to give out a new address - db.append_changeset(&additions.into())?; - - let spk = spk.clone(); - let address = - Address::from_script(&spk, network).expect("should always be able to derive address"); - eprintln!("This is the address at index {}", index); - println!("{}", address); - } - - match addr_cmd { - AddressCmd::Next | AddressCmd::New => { - /* covered */ - Ok(()) - } - AddressCmd::Index => { - for (keychain, derivation_index) in txout_index.last_revealed_indices() { - println!("{:?}: {}", keychain, derivation_index); - } - Ok(()) - } - AddressCmd::List { change } => { - let target_keychain = match change { - true => Keychain::Internal, - false => Keychain::External, - }; - for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) { - let address = Address::from_script(spk, network) - .expect("should always be able to derive address"); - println!( - "{:?} {} used:{}", - index, - address, - txout_index.is_used(&(target_keychain, index)) - ); - } - Ok(()) - } - } -} - -pub fn run_balance_cmd(tracker: &Mutex>) { - let tracker = tracker.lock().unwrap(); - let (confirmed, unconfirmed) = - tracker - .full_utxos() - .fold((0, 0), |(confirmed, unconfirmed), (_, utxo)| { - if utxo.chain_position.height().is_confirmed() { - (confirmed + utxo.txout.value, unconfirmed) - } else { - (confirmed, unconfirmed + utxo.txout.value) - } - }); - - println!("confirmed: {}", confirmed); - println!("unconfirmed: {}", unconfirmed); -} - -pub fn run_txo_cmd( - txout_cmd: TxOutCmd, - tracker: &Mutex>, - network: Network, -) { - match txout_cmd { - TxOutCmd::List { - unspent, - spent, - confirmed, - unconfirmed, - } => { - let tracker = tracker.lock().unwrap(); - #[allow(clippy::type_complexity)] // FIXME - let txouts: Box)>> = match (unspent, spent) - { - (true, false) => Box::new(tracker.full_utxos()), - (false, true) => Box::new( - tracker - .full_txouts() - .filter(|(_, txout)| txout.spent_by.is_some()), - ), - _ => Box::new(tracker.full_txouts()), - }; - - #[allow(clippy::type_complexity)] // FIXME - let txouts: Box)>> = - match (confirmed, unconfirmed) { - (true, false) => Box::new( - txouts.filter(|(_, txout)| txout.chain_position.height().is_confirmed()), - ), - (false, true) => Box::new( - txouts.filter(|(_, txout)| !txout.chain_position.height().is_confirmed()), - ), - _ => txouts, - }; - - for (spk_index, full_txout) in txouts { - let address = - Address::from_script(&full_txout.txout.script_pubkey, network).unwrap(); - - println!( - "{:?} {} {} {} spent:{:?}", - spk_index, - full_txout.txout.value, - full_txout.outpoint, - address, - full_txout.spent_by - ) - } - } - } -} - -#[allow(clippy::type_complexity)] // FIXME -pub fn create_tx( - value: u64, - address: Address, - coin_select: CoinSelectionAlgo, - keychain_tracker: &mut KeychainTracker, - keymap: &HashMap, -) -> Result<( - Transaction, - Option<(DerivationAdditions, (Keychain, u32))>, -)> { - let mut additions = DerivationAdditions::default(); - - let assets = bdk_tmp_plan::Assets { - keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(), - ..Default::default() - }; - - // TODO use planning module - let mut candidates = planned_utxos(keychain_tracker, &assets).collect::>(); - - // apply coin selection algorithm - match coin_select { - CoinSelectionAlgo::LargestFirst => { - candidates.sort_by_key(|(_, utxo)| Reverse(utxo.txout.value)) - } - CoinSelectionAlgo::SmallestFirst => candidates.sort_by_key(|(_, utxo)| utxo.txout.value), - CoinSelectionAlgo::OldestFirst => { - candidates.sort_by_key(|(_, utxo)| utxo.chain_position.clone()) - } - CoinSelectionAlgo::NewestFirst => { - candidates.sort_by_key(|(_, utxo)| Reverse(utxo.chain_position.clone())) - } - CoinSelectionAlgo::BranchAndBound => {} - } - - // turn the txos we chose into weight and value - let wv_candidates = candidates - .iter() - .map(|(plan, utxo)| { - WeightedValue::new( - utxo.txout.value, - plan.expected_weight() as _, - plan.witness_version().is_some(), - ) - }) - .collect(); - - let mut outputs = vec![TxOut { - value, - script_pubkey: address.script_pubkey(), - }]; - - let internal_keychain = if keychain_tracker - .txout_index - .keychains() - .get(&Keychain::Internal) - .is_some() - { - Keychain::Internal - } else { - Keychain::External - }; - - let ((change_index, change_script), change_additions) = keychain_tracker - .txout_index - .next_unused_spk(&internal_keychain); - additions.append(change_additions); - - // Clone to drop the immutable reference. - let change_script = change_script.clone(); - - let change_plan = bdk_tmp_plan::plan_satisfaction( - &keychain_tracker - .txout_index - .keychains() - .get(&internal_keychain) - .expect("must exist") - .at_derivation_index(change_index), - &assets, - ) - .expect("failed to obtain change plan"); - - let mut change_output = TxOut { - value: 0, - script_pubkey: change_script, - }; - - let cs_opts = CoinSelectorOpt { - target_feerate: 0.5, - min_drain_value: keychain_tracker - .txout_index - .keychains() - .get(&internal_keychain) - .expect("must exist") - .dust_value(), - ..CoinSelectorOpt::fund_outputs( - &outputs, - &change_output, - change_plan.expected_weight() as u32, - ) - }; - - // TODO: How can we make it easy to shuffle in order of inputs and outputs here? - // apply coin selection by saying we need to fund these outputs - let mut coin_selector = CoinSelector::new(&wv_candidates, &cs_opts); - - // just select coins in the order provided until we have enough - // only use the first result (least waste) - let selection = match coin_select { - CoinSelectionAlgo::BranchAndBound => { - coin_select_bnb(Duration::from_secs(10), coin_selector.clone()) - .map_or_else(|| coin_selector.select_until_finished(), |cs| cs.finish())? - } - _ => coin_selector.select_until_finished()?, - }; - let (_, selection_meta) = selection.best_strategy(); - - // get the selected utxos - let selected_txos = selection.apply_selection(&candidates).collect::>(); - - if let Some(drain_value) = selection_meta.drain_value { - change_output.value = drain_value; - // if the selection tells us to use change and the change value is sufficient, we add it as an output - outputs.push(change_output) - } - - let mut transaction = Transaction { - version: 0x02, - lock_time: keychain_tracker - .chain() - .latest_checkpoint() - .and_then(|block_id| LockTime::from_height(block_id.height).ok()) - .unwrap_or(LockTime::ZERO) - .into(), - input: selected_txos - .iter() - .map(|(_, utxo)| TxIn { - previous_output: utxo.outpoint, - sequence: Sequence::ENABLE_RBF_NO_LOCKTIME, - ..Default::default() - }) - .collect(), - output: outputs, - }; - - let prevouts = selected_txos - .iter() - .map(|(_, utxo)| utxo.txout.clone()) - .collect::>(); - let sighash_prevouts = Prevouts::All(&prevouts); - - // first, set tx values for the plan so that we don't change them while signing - for (i, (plan, _)) in selected_txos.iter().enumerate() { - if let Some(sequence) = plan.required_sequence() { - transaction.input[i].sequence = sequence - } - } - - // create a short lived transaction - let _sighash_tx = transaction.clone(); - let mut sighash_cache = SighashCache::new(&_sighash_tx); - - for (i, (plan, _)) in selected_txos.iter().enumerate() { - let requirements = plan.requirements(); - let mut auth_data = bdk_tmp_plan::SatisfactionMaterial::default(); - assert!( - !requirements.requires_hash_preimages(), - "can't have hash pre-images since we didn't provide any." - ); - assert!( - requirements.signatures.sign_with_keymap( - i, - keymap, - &sighash_prevouts, - None, - None, - &mut sighash_cache, - &mut auth_data, - &Secp256k1::default(), - )?, - "we should have signed with this input." - ); - - match plan.try_complete(&auth_data) { - bdk_tmp_plan::PlanState::Complete { - final_script_sig, - final_script_witness, - } => { - if let Some(witness) = final_script_witness { - transaction.input[i].witness = witness; - } - - if let Some(script_sig) = final_script_sig { - transaction.input[i].script_sig = script_sig; - } - } - bdk_tmp_plan::PlanState::Incomplete(_) => { - return Err(anyhow!( - "we weren't able to complete the plan with our keys." - )); - } - } - } - - let change_info = if selection_meta.drain_value.is_some() { - Some((additions, (internal_keychain, change_index))) - } else { - None - }; - - Ok((transaction, change_info)) -} - -pub fn handle_commands( - command: Commands, - broadcast: impl FnOnce(&Transaction) -> Result<()>, - // we Mutex around these not because we need them for a simple CLI app but to demonstrate how - // all the stuff we're doing can be made thread-safe and not keep locks up over an IO bound. - tracker: &Mutex>, - store: &Mutex>, - network: Network, - keymap: &HashMap, -) -> Result<()> -where - P: ChainPosition, - KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, -{ - match command { - // TODO: Make these functions return stuffs - Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network), - Commands::Balance => { - run_balance_cmd(tracker); - Ok(()) - } - Commands::TxOut { txout_cmd } => { - run_txo_cmd(txout_cmd, tracker, network); - Ok(()) - } - Commands::Send { - value, - address, - coin_select, - } => { - let (transaction, change_index) = { - // take mutable ref to construct tx -- it is only open for a short time while building it. - let tracker = &mut *tracker.lock().unwrap(); - let (transaction, change_info) = - create_tx(value, address, coin_select, tracker, keymap)?; - - if let Some((change_derivation_changes, (change_keychain, index))) = change_info { - // We must first persist to disk the fact that we've got a new address from the - // change keychain so future scans will find the tx we're about to broadcast. - // If we're unable to persist this, then we don't want to broadcast. - let store = &mut *store.lock().unwrap(); - store.append_changeset(&change_derivation_changes.into())?; - - // We don't want other callers/threads to use this address while we're using it - // but we also don't want to scan the tx we just created because it's not - // technically in the blockchain yet. - tracker.txout_index.mark_used(&change_keychain, index); - (transaction, Some((change_keychain, index))) - } else { - (transaction, None) - } - }; - - match (broadcast)(&transaction) { - Ok(_) => { - println!("Broadcasted Tx : {}", transaction.txid()); - let mut tracker = tracker.lock().unwrap(); - match tracker.insert_tx(transaction.clone(), P::unconfirmed()) { - Ok(changeset) => { - let store = &mut *store.lock().unwrap(); - // We know the tx is at least unconfirmed now. Note if persisting here fails, - // it's not a big deal since we can always find it again form - // blockchain. - store.append_changeset(&changeset)?; - Ok(()) - } - Err(e) => match e { - InsertTxError::Chain(e) => match e { - // TODO: add insert_unconfirmed_tx to the chaingraph and sparsechain - sparse_chain::InsertTxError::TxTooHigh { .. } => unreachable!("we are inserting at unconfirmed position"), - sparse_chain::InsertTxError::TxMovedUnexpectedly { txid, original_pos, ..} => Err(anyhow!("the tx we created {} has already been confirmed at block {:?}", txid, original_pos)), - }, - InsertTxError::UnresolvableConflict(e) => Err(e).context("another tx that conflicts with the one we tried to create has been confirmed"), - } - } - } - Err(e) => { - let tracker = &mut *tracker.lock().unwrap(); - if let Some((keychain, index)) = change_index { - // We failed to broadcast, so allow our change address to be used in the future - tracker.txout_index.unmark_used(&keychain, index); - } - Err(e) - } - } - } - Commands::ChainSpecific(_) => { - todo!("example code is meant to handle this!") - } - } -} - -#[allow(clippy::type_complexity)] // FIXME -pub fn init() -> anyhow::Result<( - Args, - KeyMap, - // These don't need to have mutexes around them, but we want the cli example code to make it obvious how they - // are thread-safe, forcing the example developers to show where they would lock and unlock things. - Mutex>, - Mutex>, -)> -where - P: sparse_chain::ChainPosition, - KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, -{ - let args = Args::::parse(); - let secp = Secp256k1::default(); - let (descriptor, mut keymap) = - Descriptor::::parse_descriptor(&secp, &args.descriptor)?; - - let mut tracker = KeychainTracker::default(); - tracker.set_checkpoint_limit(Some(args.cp_limit)); - - tracker - .txout_index - .add_keychain(Keychain::External, descriptor); - - let internal = args - .change_descriptor - .clone() - .map(|descriptor| Descriptor::::parse_descriptor(&secp, &descriptor)) - .transpose()?; - if let Some((internal_descriptor, internal_keymap)) = internal { - keymap.extend(internal_keymap); - tracker - .txout_index - .add_keychain(Keychain::Internal, internal_descriptor); - }; - - let mut db = KeychainStore::::new_from_path(args.db_path.as_path())?; - - if let Err(e) = db.load_into_keychain_tracker(&mut tracker) { - match tracker.chain().latest_checkpoint() { - Some(checkpoint) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), checkpoint.height, e), - None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e), - - } - eprintln!("⚠ Consider running a rescan of chain data."); - } - - Ok((args, keymap, Mutex::new(tracker), Mutex::new(db))) -} - -pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, P: ChainPosition>( - tracker: &'a KeychainTracker, - assets: &'a bdk_tmp_plan::Assets, -) -> impl Iterator, FullTxOut

)> + 'a { - tracker - .full_utxos() - .filter_map(move |((keychain, derivation_index), full_txout)| { - Some(( - bdk_tmp_plan::plan_satisfaction( - &tracker - .txout_index - .keychains() - .get(keychain) - .expect("must exist since we have a utxo for it") - .at_derivation_index(*derivation_index), - assets, - )?, - full_txout, - )) - }) -} diff --git a/example-crates/wallet_electrum/src/main.rs b/example-crates/wallet_electrum/src/main.rs index 7f352bc1..db80f106 100644 --- a/example-crates/wallet_electrum/src/main.rs +++ b/example-crates/wallet_electrum/src/main.rs @@ -10,7 +10,7 @@ use bdk::bitcoin::Address; use bdk::SignOptions; use bdk::{bitcoin::Network, Wallet}; use bdk_electrum::electrum_client::{self, ElectrumApi}; -use bdk_electrum::v2::ElectrumExt; +use bdk_electrum::ElectrumExt; use bdk_file_store::Store; fn main() -> Result<(), Box> { diff --git a/example-crates/wallet_esplora/src/main.rs b/example-crates/wallet_esplora/src/main.rs index f71033fb..119d9cbd 100644 --- a/example-crates/wallet_esplora/src/main.rs +++ b/example-crates/wallet_esplora/src/main.rs @@ -10,7 +10,7 @@ use bdk::{ wallet::AddressIndex, SignOptions, Wallet, }; -use bdk_esplora::{esplora_client, v2::EsploraExt}; +use bdk_esplora::{esplora_client, EsploraExt}; use bdk_file_store::Store; fn main() -> Result<(), Box> { diff --git a/example-crates/wallet_esplora_async/src/main.rs b/example-crates/wallet_esplora_async/src/main.rs index 02271c04..7cb218ec 100644 --- a/example-crates/wallet_esplora_async/src/main.rs +++ b/example-crates/wallet_esplora_async/src/main.rs @@ -5,7 +5,7 @@ use bdk::{ wallet::AddressIndex, SignOptions, Wallet, }; -use bdk_esplora::{esplora_client, v2::EsploraAsyncExt}; +use bdk_esplora::{esplora_client, EsploraAsyncExt}; use bdk_file_store::Store; const DB_MAGIC: &str = "bdk_wallet_esplora_async_example"; From ac80829caa4bc94de0acdd0459917d095358559c Mon Sep 17 00:00:00 2001 From: Shourya742 <84662239+Shourya742@users.noreply.github.com> Date: Sun, 30 Apr 2023 09:43:28 +0530 Subject: [PATCH 14/17] Rename fields of `tx_graph::Additions` * Changed `tx` to `txs` * Changed `txout` to `txouts` --- crates/chain/src/indexed_tx_graph.rs | 8 ++--- crates/chain/src/tx_graph.rs | 34 ++++++++++----------- crates/chain/tests/test_indexed_tx_graph.rs | 2 +- crates/chain/tests/test_tx_graph.rs | 22 ++++++------- crates/electrum/src/electrum_ext.rs | 4 +-- 5 files changed, 35 insertions(+), 35 deletions(-) diff --git a/crates/chain/src/indexed_tx_graph.rs b/crates/chain/src/indexed_tx_graph.rs index 371ba295..000c1a6e 100644 --- a/crates/chain/src/indexed_tx_graph.rs +++ b/crates/chain/src/indexed_tx_graph.rs @@ -51,10 +51,10 @@ impl IndexedTxGraph { self.index.apply_additions(index_additions); - for tx in &graph_additions.tx { + for tx in &graph_additions.txs { self.index.index_tx(tx); } - for (&outpoint, txout) in &graph_additions.txout { + for (&outpoint, txout) in &graph_additions.txouts { self.index.index_txout(outpoint, txout); } @@ -73,10 +73,10 @@ where let graph_additions = self.graph.apply_update(update); let mut index_additions = I::Additions::default(); - for added_tx in &graph_additions.tx { + for added_tx in &graph_additions.txs { index_additions.append(self.index.index_tx(added_tx)); } - for (&added_outpoint, added_txout) in &graph_additions.txout { + for (&added_outpoint, added_txout) in &graph_additions.txouts { index_additions.append(self.index.index_txout(added_outpoint, added_txout)); } diff --git a/crates/chain/src/tx_graph.rs b/crates/chain/src/tx_graph.rs index 56a55568..a9475b00 100644 --- a/crates/chain/src/tx_graph.rs +++ b/crates/chain/src/tx_graph.rs @@ -482,7 +482,7 @@ impl TxGraph { /// Applies [`Additions`] to [`TxGraph`]. pub fn apply_additions(&mut self, additions: Additions) { - for tx in additions.tx { + for tx in additions.txs { let txid = tx.txid(); tx.input @@ -513,7 +513,7 @@ impl TxGraph { } } - for (outpoint, txout) in additions.txout { + for (outpoint, txout) in additions.txouts { let tx_entry = self .txs .entry(outpoint.txid) @@ -553,11 +553,11 @@ impl TxGraph { for (&txid, (update_tx_node, _, update_last_seen)) in &update.txs { let prev_last_seen: u64 = match (self.txs.get(&txid), update_tx_node) { (None, TxNodeInternal::Whole(update_tx)) => { - additions.tx.insert(update_tx.clone()); + additions.txs.insert(update_tx.clone()); 0 } (None, TxNodeInternal::Partial(update_txos)) => { - additions.txout.extend( + additions.txouts.extend( update_txos .iter() .map(|(&vout, txo)| (OutPoint::new(txid, vout), txo.clone())), @@ -569,14 +569,14 @@ impl TxGraph { Some((TxNodeInternal::Partial(_), _, last_seen)), TxNodeInternal::Whole(update_tx), ) => { - additions.tx.insert(update_tx.clone()); + additions.txs.insert(update_tx.clone()); *last_seen } ( Some((TxNodeInternal::Partial(txos), _, last_seen)), TxNodeInternal::Partial(update_txos), ) => { - additions.txout.extend( + additions.txouts.extend( update_txos .iter() .filter(|(vout, _)| !txos.contains_key(*vout)) @@ -983,8 +983,8 @@ impl TxGraph { )] #[must_use] pub struct Additions { - pub tx: BTreeSet, - pub txout: BTreeMap, + pub txs: BTreeSet, + pub txouts: BTreeMap, pub anchors: BTreeSet<(A, Txid)>, pub last_seen: BTreeMap, } @@ -992,8 +992,8 @@ pub struct Additions { impl Default for Additions { fn default() -> Self { Self { - tx: Default::default(), - txout: Default::default(), + txs: Default::default(), + txouts: Default::default(), anchors: Default::default(), last_seen: Default::default(), } @@ -1003,12 +1003,12 @@ impl Default for Additions { impl Additions { /// Returns true if the [`Additions`] is empty (no transactions or txouts). pub fn is_empty(&self) -> bool { - self.tx.is_empty() && self.txout.is_empty() + self.txs.is_empty() && self.txouts.is_empty() } /// Iterates over all outpoints contained within [`Additions`]. pub fn txouts(&self) -> impl Iterator { - self.tx + self.txs .iter() .flat_map(|tx| { tx.output @@ -1016,14 +1016,14 @@ impl Additions { .enumerate() .map(move |(vout, txout)| (OutPoint::new(tx.txid(), vout as _), txout)) }) - .chain(self.txout.iter().map(|(op, txout)| (*op, txout))) + .chain(self.txouts.iter().map(|(op, txout)| (*op, txout))) } } impl Append for Additions { fn append(&mut self, mut other: Self) { - self.tx.append(&mut other.tx); - self.txout.append(&mut other.txout); + self.txs.append(&mut other.txs); + self.txouts.append(&mut other.txouts); self.anchors.append(&mut other.anchors); // last_seen timestamps should only increase @@ -1037,8 +1037,8 @@ impl Append for Additions { } fn is_empty(&self) -> bool { - self.tx.is_empty() - && self.txout.is_empty() + self.txs.is_empty() + && self.txouts.is_empty() && self.anchors.is_empty() && self.last_seen.is_empty() } diff --git a/crates/chain/tests/test_indexed_tx_graph.rs b/crates/chain/tests/test_indexed_tx_graph.rs index dde66ddc..2ebd913c 100644 --- a/crates/chain/tests/test_indexed_tx_graph.rs +++ b/crates/chain/tests/test_indexed_tx_graph.rs @@ -68,7 +68,7 @@ fn insert_relevant_txs() { graph.insert_relevant_txs(txs.iter().map(|tx| (tx, None)), None), IndexedAdditions { graph_additions: Additions { - tx: txs.into(), + txs: txs.into(), ..Default::default() }, index_additions: DerivationAdditions([((), 9_u32)].into()), diff --git a/crates/chain/tests/test_tx_graph.rs b/crates/chain/tests/test_tx_graph.rs index 82a0f97d..c272f97a 100644 --- a/crates/chain/tests/test_tx_graph.rs +++ b/crates/chain/tests/test_tx_graph.rs @@ -71,7 +71,7 @@ fn insert_txouts() { assert_eq!( graph.insert_txout(*outpoint, txout.clone()), Additions { - txout: [(*outpoint, txout.clone())].into(), + txouts: [(*outpoint, txout.clone())].into(), ..Default::default() } ); @@ -87,7 +87,7 @@ fn insert_txouts() { assert_eq!( graph.insert_txout(*outpoint, txout.clone()), Additions { - txout: [(*outpoint, txout.clone())].into(), + txouts: [(*outpoint, txout.clone())].into(), ..Default::default() } ); @@ -95,8 +95,8 @@ fn insert_txouts() { assert_eq!( graph.insert_anchor(outpoint.txid, unconf_anchor), Additions { - tx: [].into(), - txout: [].into(), + txs: [].into(), + txouts: [].into(), anchors: [(unconf_anchor, outpoint.txid)].into(), last_seen: [].into() } @@ -105,8 +105,8 @@ fn insert_txouts() { assert_eq!( graph.insert_seen_at(outpoint.txid, 1000000), Additions { - tx: [].into(), - txout: [].into(), + txs: [].into(), + txouts: [].into(), anchors: [].into(), last_seen: [(outpoint.txid, 1000000)].into() } @@ -116,7 +116,7 @@ fn insert_txouts() { assert_eq!( graph.insert_tx(update_txs.clone()), Additions { - tx: [update_txs.clone()].into(), + txs: [update_txs.clone()].into(), ..Default::default() } ); @@ -125,8 +125,8 @@ fn insert_txouts() { assert_eq!( graph.insert_anchor(update_txs.txid(), conf_anchor), Additions { - tx: [].into(), - txout: [].into(), + txs: [].into(), + txouts: [].into(), anchors: [(conf_anchor, update_txs.txid())].into(), last_seen: [].into() } @@ -140,8 +140,8 @@ fn insert_txouts() { assert_eq!( additions, Additions { - tx: [update_txs.clone()].into(), - txout: update_ops.into(), + txs: [update_txs.clone()].into(), + txouts: update_ops.into(), anchors: [(conf_anchor, update_txs.txid()), (unconf_anchor, h!("tx2"))].into(), last_seen: [(h!("tx2"), 1000000)].into() } diff --git a/crates/electrum/src/electrum_ext.rs b/crates/electrum/src/electrum_ext.rs index cc1cf987..908fdddc 100644 --- a/crates/electrum/src/electrum_ext.rs +++ b/crates/electrum/src/electrum_ext.rs @@ -101,8 +101,8 @@ impl ElectrumUpdate { let graph_additions = { let old_additions = TxGraph::default().determine_additions(&update.graph); tx_graph::Additions { - tx: old_additions.tx, - txout: old_additions.txout, + txs: old_additions.txs, + txouts: old_additions.txouts, last_seen: old_additions.last_seen, anchors: old_additions .anchors From a7668a2f3e98b8950a139b7c88fbebff56f49a5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Sun, 28 May 2023 15:57:46 +0800 Subject: [PATCH 15/17] [wallet_redesign] Modified `insert_tx` to use lowest checkpoint Also updated the documentation. --- crates/bdk/src/wallet/mod.rs | 48 ++++++++++++++++++++---------------- 1 file changed, 27 insertions(+), 21 deletions(-) diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index 550bc780..33546ce0 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -451,17 +451,20 @@ impl Wallet { Ok(changed) } - /// Add a transaction to the wallet's internal view of the chain. - /// This stages but does not [`commit`] the change. - /// - /// There are a number reasons `tx` could be rejected with an `Err(_)`. The most important one - /// is that the transaction is at a height that is greater than [`latest_checkpoint`]. Therefore - /// you should use [`insert_checkpoint`] to insert new checkpoints before manually inserting new - /// transactions. + /// Add a transaction to the wallet's internal view of the chain. This stages but does not + /// [`commit`] the change. /// /// Returns whether anything changed with the transaction insertion (e.g. `false` if the /// transaction was already inserted at the same position). /// + /// A `tx` can be rejected if `position` has a height greater than the [`latest_checkpoint`]. + /// Therefore you should use [`insert_checkpoint`] to insert new checkpoints before manually + /// inserting new transactions. + /// + /// **WARNING:** If `position` is confirmed, we anchor the `tx` to a the lowest checkpoint that + /// is >= the `position`'s height. The caller is responsible for ensuring the `tx` exists in our + /// local view of the best chain's history. + /// /// [`commit`]: Self::commit /// [`latest_checkpoint`]: Self::latest_checkpoint /// [`insert_checkpoint`]: Self::insert_checkpoint @@ -473,25 +476,28 @@ impl Wallet { where D: PersistBackend, { - let tip = self.chain.tip(); - let (anchor, last_seen) = match position { ConfirmationTime::Confirmed { height, time } => { - let tip_height = tip.map(|b| b.height); - if Some(height) > tip_height { - return Err(InsertTxError::ConfirmationHeightCannotBeGreaterThanTip { - tip_height, + // anchor tx to checkpoint with lowest height that is >= position's height + let anchor = self + .chain + .blocks() + .range(height..) + .next() + .ok_or(InsertTxError::ConfirmationHeightCannotBeGreaterThanTip { + tip_height: self.chain.tip().map(|b| b.height), tx_height: height, - }); - } - ( - Some(ConfirmationTimeAnchor { - anchor_block: tip.expect("already checked if tip_height > height"), + }) + .map(|(&anchor_height, &anchor_hash)| ConfirmationTimeAnchor { + anchor_block: BlockId { + height: anchor_height, + hash: anchor_hash, + }, confirmation_height: height, confirmation_time: time, - }), - None, - ) + })?; + + (Some(anchor), None) } ConfirmationTime::Unconfirmed { last_seen } => (None, Some(last_seen)), }; From cff92111d500fbcdd70015c03bf57b386d473fba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Mon, 29 May 2023 13:20:12 +0800 Subject: [PATCH 16/17] [wallet_redesign] Clean up and document address methods --- crates/bdk/src/types.rs | 4 +-- crates/bdk/src/wallet/mod.rs | 70 +++++++++++++++++++++--------------- 2 files changed, 44 insertions(+), 30 deletions(-) diff --git a/crates/bdk/src/types.rs b/crates/bdk/src/types.rs index 870734d6..e21bef90 100644 --- a/crates/bdk/src/types.rs +++ b/crates/bdk/src/types.rs @@ -22,9 +22,9 @@ use serde::{Deserialize, Serialize}; /// Types of keychains #[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] pub enum KeychainKind { - /// External + /// External keychain, used for deriving recipient addresses. External = 0, - /// Internal, usually used for change outputs + /// Internal keychain, used for deriving change addresses. Internal = 1, } diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index 33546ce0..4bee9be8 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -273,7 +273,8 @@ impl Wallet { where D: PersistBackend, { - self._get_address(address_index, KeychainKind::External) + self._get_address(KeychainKind::External, address_index) + .expect("persistence backend must not fail") } /// Return a derived address using the internal (change) descriptor. @@ -287,50 +288,63 @@ impl Wallet { where D: PersistBackend, { - self._get_address(address_index, KeychainKind::Internal) + self._get_address(KeychainKind::Internal, address_index) + .expect("persistence backend must not fail") } - fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo + /// Return a derived address using the specified `keychain` (external/internal). + /// + /// If `keychain` is [`KeychainKind::External`], external addresses will be derived (used for + /// receiving funds). + /// + /// If `keychain` is [`KeychainKind::Internal`], internal addresses will be derived (used for + /// creating change outputs). If the wallet does not have an internal keychain, it will use the + /// external keychain to derive change outputs. + /// + /// See [`AddressIndex`] for available address index selection strategies. If none of the keys + /// in the descriptor are derivable (i.e. does not end with /*) then the same address will + /// always be returned for any [`AddressIndex`]. + fn _get_address( + &mut self, + keychain: KeychainKind, + address_index: AddressIndex, + ) -> Result where D: PersistBackend, { let keychain = self.map_keychain(keychain); let txout_index = &mut self.indexed_graph.index; - let (index, spk) = match address_index { + let (index, spk, additions) = match address_index { AddressIndex::New => { let ((index, spk), index_additions) = txout_index.reveal_next_spk(&keychain); - let spk = spk.clone(); - - self.persist - .stage(ChangeSet::from(IndexedAdditions::from(index_additions))); - self.persist.commit().expect("TODO"); - (index, spk) + (index, spk.clone(), Some(index_additions)) } AddressIndex::LastUnused => { - let index = txout_index.last_revealed_index(&keychain); - match index { - Some(index) if !txout_index.is_used(&(keychain, index)) => ( - index, - txout_index - .spk_at_index(&(keychain, index)) - .expect("must exist") - .clone(), - ), - _ => return self._get_address(AddressIndex::New, keychain), - } + let ((index, spk), index_additions) = txout_index.next_unused_spk(&keychain); + (index, spk.clone(), Some(index_additions)) + } + AddressIndex::Peek(index) => { + let (index, spk) = txout_index + .spks_of_keychain(&keychain) + .take(index as usize + 1) + .last() + .unwrap(); + (index, spk, None) } - AddressIndex::Peek(index) => txout_index - .spks_of_keychain(&keychain) - .take(index as usize + 1) - .last() - .unwrap(), }; - AddressInfo { + + if let Some(additions) = additions { + self.persist + .stage(ChangeSet::from(IndexedAdditions::from(additions))); + self.persist.commit()?; + } + + Ok(AddressInfo { index, address: Address::from_script(&spk, self.network) .expect("descriptor must have address form"), keychain, - } + }) } /// Return whether or not a `script` is part of this wallet (either internal or external) From 75f8b81d58a985669ce7302fe235ad68eddc0d47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BF=97=E5=AE=87?= Date: Mon, 29 May 2023 21:54:53 +0800 Subject: [PATCH 17/17] Update documentation * Add `warn(missing_docs)` for `bdk_wallet` and `bdk_chain`. * Add missing documentation. * Remove `LocalChain::heights` method. * Remove old TODOs. --- crates/bdk/src/lib.rs | 2 ++ crates/bdk/src/wallet/mod.rs | 15 ++++++++++----- crates/chain/src/chain_data.rs | 21 ++++++++++++++++++--- crates/chain/src/indexed_tx_graph.rs | 4 ++++ crates/chain/src/lib.rs | 5 +++++ crates/chain/src/local_chain.rs | 16 +++++++++++----- crates/chain/src/tx_graph.rs | 4 ++++ crates/electrum/src/electrum_ext.rs | 1 - 8 files changed, 54 insertions(+), 14 deletions(-) diff --git a/crates/bdk/src/lib.rs b/crates/bdk/src/lib.rs index 19aa5540..ecd3fb23 100644 --- a/crates/bdk/src/lib.rs +++ b/crates/bdk/src/lib.rs @@ -1,5 +1,7 @@ #![doc = include_str!("../README.md")] #![no_std] +#![warn(missing_docs)] + #[cfg(feature = "std")] #[macro_use] extern crate std; diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs index 4bee9be8..3f58921a 100644 --- a/crates/bdk/src/wallet/mod.rs +++ b/crates/bdk/src/wallet/mod.rs @@ -21,12 +21,12 @@ use alloc::{ }; pub use bdk_chain::keychain::Balance; use bdk_chain::{ - indexed_tx_graph::{IndexedAdditions, IndexedTxGraph}, + indexed_tx_graph::IndexedAdditions, keychain::{KeychainTxOutIndex, LocalChangeSet, LocalUpdate}, local_chain::{self, LocalChain, UpdateNotConnectedError}, tx_graph::{CanonicalTx, TxGraph}, - Append, BlockId, ChainPosition, ConfirmationTime, ConfirmationTimeAnchor, FullTxOut, Persist, - PersistBackend, + Append, BlockId, ChainPosition, ConfirmationTime, ConfirmationTimeAnchor, FullTxOut, + IndexedTxGraph, Persist, PersistBackend, }; use bitcoin::consensus::encode::serialize; use bitcoin::secp256k1::Secp256k1; @@ -88,7 +88,7 @@ pub struct Wallet { change_signers: Arc, chain: LocalChain, indexed_graph: IndexedTxGraph>, - persist: Persist, // [TODO] Use a different `ChangeSet` + persist: Persist, network: Network, secp: SecpCtx, } @@ -96,7 +96,7 @@ pub struct Wallet { /// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources. pub type Update = LocalUpdate; -// /// The changeset produced internally by applying an update. +/// The changeset produced internally by [`Wallet`] when mutated. pub type ChangeSet = LocalChangeSet; /// The address index selection strategy to use to derived an address from the wallet's external @@ -184,10 +184,15 @@ where } } +/// An error that may occur when inserting a transaction into [`Wallet`]. #[derive(Debug)] pub enum InsertTxError { + /// The error variant that occurs when the caller attempts to insert a transaction with a + /// confirmation height that is greater than the internal chain tip. ConfirmationHeightCannotBeGreaterThanTip { + /// The internal chain's tip height. tip_height: Option, + /// The introduced transaction's confirmation height. tx_height: u32, }, } diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index 6decfd07..bd174c2e 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -21,6 +21,7 @@ impl ChainPosition { } impl ChainPosition<&A> { + /// Maps a [`ChainPosition<&A>`] into a [`ChainPosition`] by cloning the contents. pub fn cloned(self) -> ChainPosition { match self { ChainPosition::Confirmed(a) => ChainPosition::Confirmed(a.clone()), @@ -30,6 +31,7 @@ impl ChainPosition<&A> { } impl ChainPosition { + /// Determines the upper bound of the confirmation height. pub fn confirmation_height_upper_bound(&self) -> Option { match self { ChainPosition::Confirmed(a) => Some(a.confirmation_height_upper_bound()), @@ -46,15 +48,27 @@ impl ChainPosition { serde(crate = "serde_crate") )] pub enum ConfirmationTime { - Confirmed { height: u32, time: u64 }, - Unconfirmed { last_seen: u64 }, + /// The confirmed variant. + Confirmed { + /// Confirmation height. + height: u32, + /// Confirmation time in unix seconds. + time: u64, + }, + /// The unconfirmed variant. + Unconfirmed { + /// The last-seen timestamp in unix seconds. + last_seen: u64, + }, } impl ConfirmationTime { + /// Construct an unconfirmed variant using the given `last_seen` time in unix seconds. pub fn unconfirmed(last_seen: u64) -> Self { Self::Unconfirmed { last_seen } } + /// Returns whether [`ConfirmationTime`] is the confirmed variant. pub fn is_confirmed(&self) -> bool { matches!(self, Self::Confirmed { .. }) } @@ -154,8 +168,9 @@ impl Anchor for ConfirmationHeightAnchor { pub struct ConfirmationTimeAnchor { /// The anchor block. pub anchor_block: BlockId, - + /// The confirmation height of the chain data being anchored. pub confirmation_height: u32, + /// The confirmation time of the chain data being anchored. pub confirmation_time: u64, } diff --git a/crates/chain/src/indexed_tx_graph.rs b/crates/chain/src/indexed_tx_graph.rs index 000c1a6e..730b0434 100644 --- a/crates/chain/src/indexed_tx_graph.rs +++ b/crates/chain/src/indexed_tx_graph.rs @@ -1,3 +1,7 @@ +//! Contains the [`IndexedTxGraph`] structure and associated types. +//! +//! This is essentially a [`TxGraph`] combined with an indexer. + use alloc::vec::Vec; use bitcoin::{OutPoint, Transaction, TxOut}; diff --git a/crates/chain/src/lib.rs b/crates/chain/src/lib.rs index dc5261e2..ed167ebf 100644 --- a/crates/chain/src/lib.rs +++ b/crates/chain/src/lib.rs @@ -17,18 +17,23 @@ //! cache or how you fetch it. //! //! [Bitcoin Dev Kit]: https://bitcoindevkit.org/ + #![no_std] +#![warn(missing_docs)] + pub use bitcoin; mod spk_txout_index; pub use spk_txout_index::*; mod chain_data; pub use chain_data::*; pub mod indexed_tx_graph; +pub use indexed_tx_graph::IndexedTxGraph; pub mod keychain; pub mod local_chain; mod tx_data_traits; pub mod tx_graph; pub use tx_data_traits::*; +pub use tx_graph::TxGraph; mod chain_oracle; pub use chain_oracle::*; mod persist; diff --git a/crates/chain/src/local_chain.rs b/crates/chain/src/local_chain.rs index 7623b294..fe97e3f2 100644 --- a/crates/chain/src/local_chain.rs +++ b/crates/chain/src/local_chain.rs @@ -1,6 +1,8 @@ +//! The [`LocalChain`] is a local implementation of [`ChainOracle`]. + use core::convert::Infallible; -use alloc::collections::{BTreeMap, BTreeSet}; +use alloc::collections::BTreeMap; use bitcoin::BlockHash; use crate::{BlockId, ChainOracle}; @@ -59,6 +61,7 @@ impl From> for LocalChain { } impl LocalChain { + /// Contruct a [`LocalChain`] from a list of [`BlockId`]s. pub fn from_blocks(blocks: B) -> Self where B: IntoIterator, @@ -73,6 +76,7 @@ impl LocalChain { &self.blocks } + /// Get the chain tip. pub fn tip(&self) -> Option { self.blocks .iter() @@ -158,6 +162,9 @@ impl LocalChain { Ok(changeset) } + /// Derives a [`ChangeSet`] that assumes that there are no preceding changesets. + /// + /// The changeset returned will record additions of all blocks included in [`Self`]. pub fn initial_changeset(&self) -> ChangeSet { self.blocks .iter() @@ -165,10 +172,6 @@ impl LocalChain { .collect() } - pub fn heights(&self) -> BTreeSet { - self.blocks.keys().cloned().collect() - } - /// Insert a block of [`BlockId`] into the [`LocalChain`]. /// /// # Error @@ -225,8 +228,11 @@ impl std::error::Error for UpdateNotConnectedError {} /// Represents a failure when trying to insert a checkpoint into [`LocalChain`]. #[derive(Clone, Debug, PartialEq)] pub struct InsertBlockNotMatchingError { + /// The checkpoints' height. pub height: u32, + /// Original checkpoint's block hash. pub original_hash: BlockHash, + /// Update checkpoint's block hash. pub update_hash: BlockHash, } diff --git a/crates/chain/src/tx_graph.rs b/crates/chain/src/tx_graph.rs index a9475b00..bc72cc50 100644 --- a/crates/chain/src/tx_graph.rs +++ b/crates/chain/src/tx_graph.rs @@ -983,9 +983,13 @@ impl TxGraph { )] #[must_use] pub struct Additions { + /// Added transactions. pub txs: BTreeSet, + /// Added txouts. pub txouts: BTreeMap, + /// Added anchors. pub anchors: BTreeSet<(A, Txid)>, + /// Added last-seen unix timestamps of transactions. pub last_seen: BTreeMap, } diff --git a/crates/electrum/src/electrum_ext.rs b/crates/electrum/src/electrum_ext.rs index 908fdddc..1ec44d85 100644 --- a/crates/electrum/src/electrum_ext.rs +++ b/crates/electrum/src/electrum_ext.rs @@ -236,7 +236,6 @@ impl ElectrumExt for Client { populate_with_txids(self, anchor_block, &mut update, &mut txids.iter().cloned())?; - // [TODO] cache transactions to reduce bandwidth let _txs = populate_with_outpoints( self, anchor_block,