[bdk_chain_redesign] Rm anchor type param for structs that don't use it

This commit is contained in:
志宇 2023-03-28 14:58:59 +08:00
parent 3440a05711
commit 34d0277e44
No known key found for this signature in database
GPG Key ID: F6345C9837C2BDE8
16 changed files with 174 additions and 201 deletions

View File

@ -85,19 +85,19 @@ const COINBASE_MATURITY: u32 = 100;
pub struct Wallet<D = ()> {
signers: Arc<SignersContainer>,
change_signers: Arc<SignersContainer>,
keychain_tracker: KeychainTracker<KeychainKind, BlockId, ConfirmationTime>,
persist: persist::Persist<KeychainKind, BlockId, ConfirmationTime, D>,
keychain_tracker: KeychainTracker<KeychainKind, ConfirmationTime>,
persist: persist::Persist<KeychainKind, ConfirmationTime, D>,
network: Network,
secp: SecpCtx,
}
/// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources.
/// The type parameter `T` indicates the kind of transaction contained in the update. It's usually a [`bitcoin::Transaction`].
pub type Update = KeychainScan<KeychainKind, BlockId, ConfirmationTime>;
pub type Update = KeychainScan<KeychainKind, ConfirmationTime>;
/// Error indicating that something was wrong with an [`Update<T>`].
pub type UpdateError = chain_graph::UpdateError<ConfirmationTime>;
/// The changeset produced internally by applying an update
pub(crate) type ChangeSet = KeychainChangeSet<KeychainKind, BlockId, ConfirmationTime>;
pub(crate) type ChangeSet = KeychainChangeSet<KeychainKind, ConfirmationTime>;
/// The address index selection strategy to use to derived an address from the wallet's external
/// descriptor. See [`Wallet::get_address`]. If you're unsure which one to use use `WalletIndex::New`.
@ -197,7 +197,7 @@ impl<D> Wallet<D> {
network: Network,
) -> Result<Self, NewError<D::LoadError>>
where
D: persist::PersistBackend<KeychainKind, BlockId, ConfirmationTime>,
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{
let secp = Secp256k1::new();
@ -259,7 +259,7 @@ impl<D> Wallet<D> {
/// (i.e. does not end with /*) then the same address will always be returned for any [`AddressIndex`].
pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo
where
D: persist::PersistBackend<KeychainKind, BlockId, ConfirmationTime>,
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{
self._get_address(address_index, KeychainKind::External)
}
@ -273,14 +273,14 @@ impl<D> Wallet<D> {
/// be returned for any [`AddressIndex`].
pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo
where
D: persist::PersistBackend<KeychainKind, BlockId, ConfirmationTime>,
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{
self._get_address(address_index, KeychainKind::Internal)
}
fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo
where
D: persist::PersistBackend<KeychainKind, BlockId, ConfirmationTime>,
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{
let keychain = self.map_keychain(keychain);
let txout_index = &mut self.keychain_tracker.txout_index;
@ -620,7 +620,7 @@ impl<D> Wallet<D> {
params: TxParams,
) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error>
where
D: persist::PersistBackend<KeychainKind, BlockId, ConfirmationTime>,
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{
let external_descriptor = self
.keychain_tracker
@ -1694,7 +1694,7 @@ impl<D> Wallet<D> {
/// [`commit`]: Self::commit
pub fn apply_update(&mut self, update: Update) -> Result<(), UpdateError>
where
D: persist::PersistBackend<KeychainKind, BlockId, ConfirmationTime>,
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{
let changeset = self.keychain_tracker.apply_update(update)?;
self.persist.stage(changeset);
@ -1706,7 +1706,7 @@ impl<D> Wallet<D> {
/// [`staged`]: Self::staged
pub fn commit(&mut self) -> Result<(), D::WriteError>
where
D: persist::PersistBackend<KeychainKind, BlockId, ConfirmationTime>,
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{
self.persist.commit()
}
@ -1724,7 +1724,7 @@ impl<D> Wallet<D> {
}
/// Get a reference to the inner [`ChainGraph`](bdk_chain::chain_graph::ChainGraph).
pub fn as_chain_graph(&self) -> &bdk_chain::chain_graph::ChainGraph<BlockId, ConfirmationTime> {
pub fn as_chain_graph(&self) -> &bdk_chain::chain_graph::ChainGraph<ConfirmationTime> {
self.keychain_tracker.chain_graph()
}
}
@ -1735,8 +1735,8 @@ impl<D> AsRef<bdk_chain::tx_graph::TxGraph> for Wallet<D> {
}
}
impl<D> AsRef<bdk_chain::chain_graph::ChainGraph<BlockId, ConfirmationTime>> for Wallet<D> {
fn as_ref(&self) -> &bdk_chain::chain_graph::ChainGraph<BlockId, ConfirmationTime> {
impl<D> AsRef<bdk_chain::chain_graph::ChainGraph<ConfirmationTime>> for Wallet<D> {
fn as_ref(&self) -> &bdk_chain::chain_graph::ChainGraph<ConfirmationTime> {
self.keychain_tracker.chain_graph()
}
}

View File

@ -39,7 +39,6 @@
use crate::collections::BTreeMap;
use crate::collections::HashSet;
use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec};
use bdk_chain::BlockId;
use bdk_chain::ConfirmationTime;
use core::cell::RefCell;
use core::marker::PhantomData;
@ -527,7 +526,7 @@ impl<'a, D, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> TxBuilder<'a, D,
/// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki
pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error>
where
D: persist::PersistBackend<KeychainKind, BlockId, ConfirmationTime>,
D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
{
self.wallet
.borrow_mut()

View File

@ -3,7 +3,7 @@ use crate::{
collections::HashSet,
sparse_chain::{self, ChainPosition, SparseChain},
tx_graph::{self, TxGraph, TxInGraph},
BlockAnchor, BlockId, ForEachTxOut, FullTxOut, TxHeight,
BlockId, ForEachTxOut, FullTxOut, TxHeight,
};
use alloc::{string::ToString, vec::Vec};
use bitcoin::{OutPoint, Transaction, TxOut, Txid};
@ -25,12 +25,12 @@ use core::fmt::Debug;
/// `graph` but not the other way around. Transactions may fall out of the *chain* (via re-org or
/// mempool eviction) but will remain in the *graph*.
#[derive(Clone, Debug, PartialEq)]
pub struct ChainGraph<A = BlockId, P = TxHeight> {
pub struct ChainGraph<P = TxHeight> {
chain: SparseChain<P>,
graph: TxGraph<A>,
graph: TxGraph<BlockId>,
}
impl<A, P> Default for ChainGraph<A, P> {
impl<P> Default for ChainGraph<P> {
fn default() -> Self {
Self {
chain: Default::default(),
@ -39,39 +39,38 @@ impl<A, P> Default for ChainGraph<A, P> {
}
}
impl<A, P> AsRef<SparseChain<P>> for ChainGraph<A, P> {
impl<P> AsRef<SparseChain<P>> for ChainGraph<P> {
fn as_ref(&self) -> &SparseChain<P> {
&self.chain
}
}
impl<A, P> AsRef<TxGraph<A>> for ChainGraph<A, P> {
fn as_ref(&self) -> &TxGraph<A> {
impl<P> AsRef<TxGraph<BlockId>> for ChainGraph<P> {
fn as_ref(&self) -> &TxGraph<BlockId> {
&self.graph
}
}
impl<A, P> AsRef<ChainGraph<A, P>> for ChainGraph<A, P> {
fn as_ref(&self) -> &ChainGraph<A, P> {
impl<P> AsRef<ChainGraph<P>> for ChainGraph<P> {
fn as_ref(&self) -> &ChainGraph<P> {
self
}
}
impl<A, P> ChainGraph<A, P> {
impl<P> ChainGraph<P> {
/// Returns a reference to the internal [`SparseChain`].
pub fn chain(&self) -> &SparseChain<P> {
&self.chain
}
/// Returns a reference to the internal [`TxGraph`].
pub fn graph(&self) -> &TxGraph<A> {
pub fn graph(&self) -> &TxGraph<BlockId> {
&self.graph
}
}
impl<A, P> ChainGraph<A, P>
impl<P> ChainGraph<P>
where
A: BlockAnchor,
P: ChainPosition,
{
/// Create a new chain graph from a `chain` and a `graph`.
@ -82,7 +81,7 @@ where
/// transaction in `graph`.
/// 2. The `chain` has two transactions that are allegedly in it, but they conflict in the `graph`
/// (so could not possibly be in the same chain).
pub fn new(chain: SparseChain<P>, graph: TxGraph<A>) -> Result<Self, NewError<P>> {
pub fn new(chain: SparseChain<P>, graph: TxGraph<BlockId>) -> Result<Self, NewError<P>> {
let mut missing = HashSet::default();
for (pos, txid) in chain.txids() {
if let Some(graphed_tx) = graph.get_tx(*txid) {
@ -129,7 +128,7 @@ where
&self,
update: SparseChain<P>,
new_txs: impl IntoIterator<Item = Transaction>,
) -> Result<ChainGraph<A, P>, NewError<P>> {
) -> Result<ChainGraph<P>, NewError<P>> {
let mut inflated_chain = SparseChain::default();
let mut inflated_graph = TxGraph::default();
@ -188,7 +187,7 @@ where
/// Determines the changes required to invalidate checkpoints `from_height` (inclusive) and
/// above. Displaced transactions will have their positions moved to [`TxHeight::Unconfirmed`].
pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet<A, P> {
pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet<P> {
ChangeSet {
chain: self.chain.invalidate_checkpoints_preview(from_height),
..Default::default()
@ -200,9 +199,9 @@ where
///
/// This is equivalent to calling [`Self::invalidate_checkpoints_preview`] and
/// [`Self::apply_changeset`] in sequence.
pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet<A, P>
pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet<P>
where
ChangeSet<A, P>: Clone,
ChangeSet<P>: Clone,
{
let changeset = self.invalidate_checkpoints_preview(from_height);
self.apply_changeset(changeset.clone());
@ -213,7 +212,7 @@ where
///
/// This does not necessarily mean that it is *confirmed* in the blockchain; it might just be in
/// the unconfirmed transaction list within the [`SparseChain`].
pub fn get_tx_in_chain(&self, txid: Txid) -> Option<(&P, TxInGraph<'_, Transaction, A>)> {
pub fn get_tx_in_chain(&self, txid: Txid) -> Option<(&P, TxInGraph<'_, Transaction, BlockId>)> {
let position = self.chain.tx_position(txid)?;
let graphed_tx = self.graph.get_tx(txid).expect("must exist");
Some((position, graphed_tx))
@ -228,7 +227,7 @@ where
&self,
tx: Transaction,
pos: P,
) -> Result<ChangeSet<A, P>, InsertTxError<P>> {
) -> Result<ChangeSet<P>, InsertTxError<P>> {
let mut changeset = ChangeSet {
chain: self.chain.insert_tx_preview(tx.txid(), pos)?,
graph: self.graph.insert_tx_preview(tx),
@ -241,18 +240,14 @@ where
///
/// This is equivalent to calling [`Self::insert_tx_preview`] and [`Self::apply_changeset`] in
/// sequence.
pub fn insert_tx(
&mut self,
tx: Transaction,
pos: P,
) -> Result<ChangeSet<A, P>, InsertTxError<P>> {
pub fn insert_tx(&mut self, tx: Transaction, pos: P) -> Result<ChangeSet<P>, InsertTxError<P>> {
let changeset = self.insert_tx_preview(tx, pos)?;
self.apply_changeset(changeset.clone());
Ok(changeset)
}
/// Determines the changes required to insert a [`TxOut`] into the internal [`TxGraph`].
pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<A, P> {
pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<P> {
ChangeSet {
chain: Default::default(),
graph: self.graph.insert_txout_preview(outpoint, txout),
@ -263,7 +258,7 @@ where
///
/// This is equivalent to calling [`Self::insert_txout_preview`] and [`Self::apply_changeset`]
/// in sequence.
pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<A, P> {
pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<P> {
let changeset = self.insert_txout_preview(outpoint, txout);
self.apply_changeset(changeset.clone());
changeset
@ -276,7 +271,7 @@ where
pub fn insert_checkpoint_preview(
&self,
block_id: BlockId,
) -> Result<ChangeSet<A, P>, InsertCheckpointError> {
) -> Result<ChangeSet<P>, InsertCheckpointError> {
self.chain
.insert_checkpoint_preview(block_id)
.map(|chain_changeset| ChangeSet {
@ -292,7 +287,7 @@ where
pub fn insert_checkpoint(
&mut self,
block_id: BlockId,
) -> Result<ChangeSet<A, P>, InsertCheckpointError> {
) -> Result<ChangeSet<P>, InsertCheckpointError> {
let changeset = self.insert_checkpoint_preview(block_id)?;
self.apply_changeset(changeset.clone());
Ok(changeset)
@ -301,8 +296,8 @@ where
/// Calculates the difference between self and `update` in the form of a [`ChangeSet`].
pub fn determine_changeset(
&self,
update: &ChainGraph<A, P>,
) -> Result<ChangeSet<A, P>, UpdateError<P>> {
update: &ChainGraph<P>,
) -> Result<ChangeSet<P>, UpdateError<P>> {
let chain_changeset = self
.chain
.determine_changeset(&update.chain)
@ -337,10 +332,7 @@ where
///
/// **WARNING:** If there are any missing full txs, conflict resolution will not be complete. In
/// debug mode, this will result in panic.
fn fix_conflicts(
&self,
changeset: &mut ChangeSet<A, P>,
) -> Result<(), UnresolvableConflict<P>> {
fn fix_conflicts(&self, changeset: &mut ChangeSet<P>) -> Result<(), UnresolvableConflict<P>> {
let mut chain_conflicts = vec![];
for (&txid, pos_change) in &changeset.chain.txids {
@ -416,17 +408,14 @@ where
///
/// **Warning** this method assumes that the changeset is correctly formed. If it is not, the
/// chain graph may behave incorrectly in the future and panic unexpectedly.
pub fn apply_changeset(&mut self, changeset: ChangeSet<A, P>) {
pub fn apply_changeset(&mut self, changeset: ChangeSet<P>) {
self.chain.apply_changeset(changeset.chain);
self.graph.apply_additions(changeset.graph);
}
/// Applies the `update` chain graph. Note this is shorthand for calling
/// [`Self::determine_changeset()`] and [`Self::apply_changeset()`] in sequence.
pub fn apply_update(
&mut self,
update: ChainGraph<A, P>,
) -> Result<ChangeSet<A, P>, UpdateError<P>> {
pub fn apply_update(&mut self, update: ChainGraph<P>) -> Result<ChangeSet<P>, UpdateError<P>> {
let changeset = self.determine_changeset(&update)?;
self.apply_changeset(changeset.clone());
Ok(changeset)
@ -441,7 +430,7 @@ where
/// in ascending order.
pub fn transactions_in_chain(
&self,
) -> impl DoubleEndedIterator<Item = (&P, TxInGraph<'_, Transaction, A>)> {
) -> impl DoubleEndedIterator<Item = (&P, TxInGraph<'_, Transaction, BlockId>)> {
self.chain
.txids()
.map(move |(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist")))
@ -472,18 +461,18 @@ where
serde(
crate = "serde_crate",
bound(
deserialize = "A: Ord + serde::Deserialize<'de>, P: serde::Deserialize<'de>",
serialize = "A: Ord + serde::Serialize, P: serde::Serialize"
deserialize = "P: serde::Deserialize<'de>",
serialize = "P: serde::Serialize"
)
)
)]
#[must_use]
pub struct ChangeSet<A, P> {
pub struct ChangeSet<P> {
pub chain: sparse_chain::ChangeSet<P>,
pub graph: tx_graph::Additions<A>,
pub graph: tx_graph::Additions<BlockId>,
}
impl<A, P> ChangeSet<A, P> {
impl<P> ChangeSet<P> {
/// Returns `true` if this [`ChangeSet`] records no changes.
pub fn is_empty(&self) -> bool {
self.chain.is_empty() && self.graph.is_empty()
@ -499,7 +488,7 @@ impl<A, P> ChangeSet<A, P> {
/// Appends the changes in `other` into self such that applying `self` afterward has the same
/// effect as sequentially applying the original `self` and `other`.
pub fn append(&mut self, other: ChangeSet<A, P>)
pub fn append(&mut self, other: ChangeSet<P>)
where
P: ChainPosition,
{
@ -508,7 +497,7 @@ impl<A, P> ChangeSet<A, P> {
}
}
impl<A, P> Default for ChangeSet<A, P> {
impl<P> Default for ChangeSet<P> {
fn default() -> Self {
Self {
chain: Default::default(),
@ -523,7 +512,7 @@ impl<P> ForEachTxOut for ChainGraph<P> {
}
}
impl<A, P> ForEachTxOut for ChangeSet<A, P> {
impl<P> ForEachTxOut for ChangeSet<P> {
fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
self.graph.for_each_txout(f)
}

View File

@ -105,14 +105,14 @@ impl<K> AsRef<BTreeMap<K, u32>> for DerivationAdditions<K> {
#[derive(Clone, Debug, PartialEq)]
/// An update that includes the last active indexes of each keychain.
pub struct KeychainScan<K, A, P> {
pub struct KeychainScan<K, P> {
/// The update data in the form of a chain that could be applied
pub update: ChainGraph<A, P>,
pub update: ChainGraph<P>,
/// The last active indexes of each keychain
pub last_active_indices: BTreeMap<K, u32>,
}
impl<K, A: Default, P> Default for KeychainScan<K, A, P> {
impl<K, P> Default for KeychainScan<K, P> {
fn default() -> Self {
Self {
update: Default::default(),
@ -121,8 +121,8 @@ impl<K, A: Default, P> Default for KeychainScan<K, A, P> {
}
}
impl<K, A, P> From<ChainGraph<A, P>> for KeychainScan<K, A, P> {
fn from(update: ChainGraph<A, P>) -> Self {
impl<K, P> From<ChainGraph<P>> for KeychainScan<K, P> {
fn from(update: ChainGraph<P>) -> Self {
KeychainScan {
update,
last_active_indices: Default::default(),
@ -140,20 +140,20 @@ impl<K, A, P> From<ChainGraph<A, P>> for KeychainScan<K, A, P> {
serde(
crate = "serde_crate",
bound(
deserialize = "K: Ord + serde::Deserialize<'de>, A: Ord + serde::Deserialize<'de>, P: serde::Deserialize<'de>",
serialize = "K: Ord + serde::Serialize, A: Ord + serde::Serialize, P: serde::Serialize"
deserialize = "K: Ord + serde::Deserialize<'de>, P: serde::Deserialize<'de>",
serialize = "K: Ord + serde::Serialize, P: serde::Serialize"
)
)
)]
#[must_use]
pub struct KeychainChangeSet<K, A, P> {
pub struct KeychainChangeSet<K, P> {
/// The changes in local keychain derivation indices
pub derivation_indices: DerivationAdditions<K>,
/// The changes that have occurred in the blockchain
pub chain_graph: chain_graph::ChangeSet<A, P>,
pub chain_graph: chain_graph::ChangeSet<P>,
}
impl<K, A, P> Default for KeychainChangeSet<K, A, P> {
impl<K, P> Default for KeychainChangeSet<K, P> {
fn default() -> Self {
Self {
chain_graph: Default::default(),
@ -162,7 +162,7 @@ impl<K, A, P> Default for KeychainChangeSet<K, A, P> {
}
}
impl<K, A, P> KeychainChangeSet<K, A, P> {
impl<K, P> KeychainChangeSet<K, P> {
/// Returns whether the [`KeychainChangeSet`] is empty (no changes recorded).
pub fn is_empty(&self) -> bool {
self.chain_graph.is_empty() && self.derivation_indices.is_empty()
@ -173,7 +173,7 @@ impl<K, A, P> KeychainChangeSet<K, A, P> {
///
/// Note the derivation indices cannot be decreased, so `other` will only change the derivation
/// index for a keychain, if it's value is higher than the one in `self`.
pub fn append(&mut self, other: KeychainChangeSet<K, A, P>)
pub fn append(&mut self, other: KeychainChangeSet<K, P>)
where
K: Ord,
P: ChainPosition,
@ -183,8 +183,8 @@ impl<K, A, P> KeychainChangeSet<K, A, P> {
}
}
impl<K, A, P> From<chain_graph::ChangeSet<A, P>> for KeychainChangeSet<K, A, P> {
fn from(changeset: chain_graph::ChangeSet<A, P>) -> Self {
impl<K, P> From<chain_graph::ChangeSet<P>> for KeychainChangeSet<K, P> {
fn from(changeset: chain_graph::ChangeSet<P>) -> Self {
Self {
chain_graph: changeset,
..Default::default()
@ -192,7 +192,7 @@ impl<K, A, P> From<chain_graph::ChangeSet<A, P>> for KeychainChangeSet<K, A, P>
}
}
impl<K, A, P> From<DerivationAdditions<K>> for KeychainChangeSet<K, A, P> {
impl<K, P> From<DerivationAdditions<K>> for KeychainChangeSet<K, P> {
fn from(additions: DerivationAdditions<K>) -> Self {
Self {
derivation_indices: additions,
@ -201,13 +201,13 @@ impl<K, A, P> From<DerivationAdditions<K>> for KeychainChangeSet<K, A, P> {
}
}
impl<K, A, P> AsRef<TxGraph<A>> for KeychainScan<K, A, P> {
fn as_ref(&self) -> &TxGraph<A> {
impl<K, P> AsRef<TxGraph> for KeychainScan<K, P> {
fn as_ref(&self) -> &TxGraph {
self.update.graph()
}
}
impl<K, A, P> ForEachTxOut for KeychainChangeSet<K, A, P> {
impl<K, P> ForEachTxOut for KeychainChangeSet<K, P> {
fn for_each_txout(&self, f: impl FnMut((bitcoin::OutPoint, &bitcoin::TxOut))) {
self.chain_graph.for_each_txout(f)
}
@ -293,12 +293,12 @@ mod test {
rhs_di.insert(Keychain::Four, 4);
let mut lhs = KeychainChangeSet {
derivation_indices: DerivationAdditions(lhs_di),
chain_graph: chain_graph::ChangeSet::<(), TxHeight>::default(),
chain_graph: chain_graph::ChangeSet::<TxHeight>::default(),
};
let rhs = KeychainChangeSet {
derivation_indices: DerivationAdditions(rhs_di),
chain_graph: chain_graph::ChangeSet::<(), TxHeight>::default(),
chain_graph: chain_graph::ChangeSet::<TxHeight>::default(),
};
lhs.append(rhs);

View File

@ -18,12 +18,12 @@ use crate::{keychain, sparse_chain::ChainPosition};
///
/// [`KeychainTracker`]: keychain::KeychainTracker
#[derive(Debug)]
pub struct Persist<K, A, P, B> {
pub struct Persist<K, P, B> {
backend: B,
stage: keychain::KeychainChangeSet<K, A, P>,
stage: keychain::KeychainChangeSet<K, P>,
}
impl<K, A, P, B> Persist<K, A, P, B> {
impl<K, P, B> Persist<K, P, B> {
/// Create a new `Persist` from a [`PersistBackend`].
pub fn new(backend: B) -> Self {
Self {
@ -35,7 +35,7 @@ impl<K, A, P, B> Persist<K, A, P, B> {
/// Stage a `changeset` to later persistence with [`commit`].
///
/// [`commit`]: Self::commit
pub fn stage(&mut self, changeset: keychain::KeychainChangeSet<K, A, P>)
pub fn stage(&mut self, changeset: keychain::KeychainChangeSet<K, P>)
where
K: Ord,
P: ChainPosition,
@ -44,7 +44,7 @@ impl<K, A, P, B> Persist<K, A, P, B> {
}
/// Get the changes that haven't been committed yet
pub fn staged(&self) -> &keychain::KeychainChangeSet<K, A, P> {
pub fn staged(&self) -> &keychain::KeychainChangeSet<K, P> {
&self.stage
}
@ -53,7 +53,7 @@ impl<K, A, P, B> Persist<K, A, P, B> {
/// Returns a backend-defined error if this fails.
pub fn commit(&mut self) -> Result<(), B::WriteError>
where
B: PersistBackend<K, A, P>,
B: PersistBackend<K, P>,
{
self.backend.append_changeset(&self.stage)?;
self.stage = Default::default();
@ -62,7 +62,7 @@ impl<K, A, P, B> Persist<K, A, P, B> {
}
/// A persistence backend for [`Persist`].
pub trait PersistBackend<K, A, P> {
pub trait PersistBackend<K, P> {
/// The error the backend returns when it fails to write.
type WriteError: core::fmt::Debug;
@ -79,29 +79,29 @@ pub trait PersistBackend<K, A, P> {
/// [`load_into_keychain_tracker`]: Self::load_into_keychain_tracker
fn append_changeset(
&mut self,
changeset: &keychain::KeychainChangeSet<K, A, P>,
changeset: &keychain::KeychainChangeSet<K, P>,
) -> Result<(), Self::WriteError>;
/// Applies all the changesets the backend has received to `tracker`.
fn load_into_keychain_tracker(
&mut self,
tracker: &mut keychain::KeychainTracker<K, A, P>,
tracker: &mut keychain::KeychainTracker<K, P>,
) -> Result<(), Self::LoadError>;
}
impl<K, A, P> PersistBackend<K, A, P> for () {
impl<K, P> PersistBackend<K, P> for () {
type WriteError = ();
type LoadError = ();
fn append_changeset(
&mut self,
_changeset: &keychain::KeychainChangeSet<K, A, P>,
_changeset: &keychain::KeychainChangeSet<K, P>,
) -> Result<(), Self::WriteError> {
Ok(())
}
fn load_into_keychain_tracker(
&mut self,
_tracker: &mut keychain::KeychainTracker<K, A, P>,
_tracker: &mut keychain::KeychainTracker<K, P>,
) -> Result<(), Self::LoadError> {
Ok(())
}

View File

@ -17,16 +17,15 @@ use super::{Balance, DerivationAdditions};
/// The [`KeychainTracker`] atomically updates its [`KeychainTxOutIndex`] whenever new chain data is
/// incorporated into its internal [`ChainGraph`].
#[derive(Clone, Debug)]
pub struct KeychainTracker<K, A, P> {
pub struct KeychainTracker<K, P> {
/// Index between script pubkeys to transaction outputs
pub txout_index: KeychainTxOutIndex<K>,
chain_graph: ChainGraph<A, P>,
chain_graph: ChainGraph<P>,
}
impl<K, A, P> KeychainTracker<K, A, P>
impl<K, P> KeychainTracker<K, P>
where
P: sparse_chain::ChainPosition,
A: crate::BlockAnchor,
K: Ord + Clone + core::fmt::Debug,
{
/// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses.
@ -65,8 +64,8 @@ where
/// [`KeychainTxOutIndex`].
pub fn determine_changeset(
&self,
scan: &KeychainScan<K, A, P>,
) -> Result<KeychainChangeSet<K, A, P>, chain_graph::UpdateError<P>> {
scan: &KeychainScan<K, P>,
) -> Result<KeychainChangeSet<K, P>, chain_graph::UpdateError<P>> {
// TODO: `KeychainTxOutIndex::determine_additions`
let mut derivation_indices = scan.last_active_indices.clone();
derivation_indices.retain(|keychain, index| {
@ -90,8 +89,8 @@ where
/// [`apply_changeset`]: Self::apply_changeset
pub fn apply_update(
&mut self,
scan: KeychainScan<K, A, P>,
) -> Result<KeychainChangeSet<K, A, P>, chain_graph::UpdateError<P>> {
scan: KeychainScan<K, P>,
) -> Result<KeychainChangeSet<K, P>, chain_graph::UpdateError<P>> {
let changeset = self.determine_changeset(&scan)?;
self.apply_changeset(changeset.clone());
Ok(changeset)
@ -101,7 +100,7 @@ where
///
/// Internally, this calls [`KeychainTxOutIndex::apply_additions`] and
/// [`ChainGraph::apply_changeset`] in sequence.
pub fn apply_changeset(&mut self, changeset: KeychainChangeSet<K, A, P>) {
pub fn apply_changeset(&mut self, changeset: KeychainChangeSet<K, P>) {
let KeychainChangeSet {
derivation_indices,
chain_graph,
@ -133,12 +132,12 @@ where
}
/// Returns a reference to the internal [`ChainGraph`].
pub fn chain_graph(&self) -> &ChainGraph<A, P> {
pub fn chain_graph(&self) -> &ChainGraph<P> {
&self.chain_graph
}
/// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]).
pub fn graph(&self) -> &TxGraph<A> {
pub fn graph(&self) -> &TxGraph {
self.chain_graph().graph()
}
@ -160,7 +159,7 @@ where
pub fn insert_checkpoint_preview(
&self,
block_id: BlockId,
) -> Result<KeychainChangeSet<K, A, P>, chain_graph::InsertCheckpointError> {
) -> Result<KeychainChangeSet<K, P>, chain_graph::InsertCheckpointError> {
Ok(KeychainChangeSet {
chain_graph: self.chain_graph.insert_checkpoint_preview(block_id)?,
..Default::default()
@ -177,7 +176,7 @@ where
pub fn insert_checkpoint(
&mut self,
block_id: BlockId,
) -> Result<KeychainChangeSet<K, A, P>, chain_graph::InsertCheckpointError> {
) -> Result<KeychainChangeSet<K, P>, chain_graph::InsertCheckpointError> {
let changeset = self.insert_checkpoint_preview(block_id)?;
self.apply_changeset(changeset.clone());
Ok(changeset)
@ -192,7 +191,7 @@ where
&self,
tx: Transaction,
pos: P,
) -> Result<KeychainChangeSet<K, A, P>, chain_graph::InsertTxError<P>> {
) -> Result<KeychainChangeSet<K, P>, chain_graph::InsertTxError<P>> {
Ok(KeychainChangeSet {
chain_graph: self.chain_graph.insert_tx_preview(tx, pos)?,
..Default::default()
@ -210,7 +209,7 @@ where
&mut self,
tx: Transaction,
pos: P,
) -> Result<KeychainChangeSet<K, A, P>, chain_graph::InsertTxError<P>> {
) -> Result<KeychainChangeSet<K, P>, chain_graph::InsertTxError<P>> {
let changeset = self.insert_tx_preview(tx, pos)?;
self.apply_changeset(changeset.clone());
Ok(changeset)
@ -281,7 +280,7 @@ where
}
}
impl<K, A, P> Default for KeychainTracker<K, A, P> {
impl<K, P> Default for KeychainTracker<K, P> {
fn default() -> Self {
Self {
txout_index: Default::default(),
@ -290,20 +289,20 @@ impl<K, A, P> Default for KeychainTracker<K, A, P> {
}
}
impl<K, A, P> AsRef<SparseChain<P>> for KeychainTracker<K, A, P> {
impl<K, P> AsRef<SparseChain<P>> for KeychainTracker<K, P> {
fn as_ref(&self) -> &SparseChain<P> {
self.chain_graph.chain()
}
}
impl<K, A, P> AsRef<TxGraph<A>> for KeychainTracker<K, A, P> {
fn as_ref(&self) -> &TxGraph<A> {
impl<K, P> AsRef<TxGraph> for KeychainTracker<K, P> {
fn as_ref(&self) -> &TxGraph {
self.chain_graph.graph()
}
}
impl<K, A, P> AsRef<ChainGraph<A, P>> for KeychainTracker<K, A, P> {
fn as_ref(&self) -> &ChainGraph<A, P> {
impl<K, P> AsRef<ChainGraph<P>> for KeychainTracker<K, P> {
fn as_ref(&self) -> &ChainGraph<P> {
&self.chain_graph
}
}

View File

@ -10,9 +10,7 @@ use bdk_chain::{
tx_graph::{self, TxGraph, TxInGraph},
BlockId, TxHeight,
};
use bitcoin::{
BlockHash, OutPoint, PackedLockTime, Script, Sequence, Transaction, TxIn, TxOut, Witness,
};
use bitcoin::{OutPoint, PackedLockTime, Script, Sequence, Transaction, TxIn, TxOut, Witness};
#[test]
fn test_spent_by() {
@ -47,7 +45,7 @@ fn test_spent_by() {
output: vec![],
};
let mut cg1 = ChainGraph::<(u32, BlockHash), _>::default();
let mut cg1 = ChainGraph::default();
let _ = cg1
.insert_tx(tx1, TxHeight::Unconfirmed)
.expect("should insert");
@ -128,7 +126,7 @@ fn update_evicts_conflicting_tx() {
cg
};
let changeset = ChangeSet::<(u32, BlockHash), TxHeight> {
let changeset = ChangeSet::<TxHeight> {
chain: sparse_chain::ChangeSet {
checkpoints: Default::default(),
txids: [
@ -137,7 +135,7 @@ fn update_evicts_conflicting_tx() {
]
.into(),
},
graph: tx_graph::Additions::<(u32, BlockHash)> {
graph: tx_graph::Additions {
tx: [tx_b2.clone()].into(),
txout: [].into(),
..Default::default()
@ -154,7 +152,7 @@ fn update_evicts_conflicting_tx() {
{
let cg1 = {
let mut cg = ChainGraph::<(u32, BlockHash), _>::default();
let mut cg = ChainGraph::default();
let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
let _ = cg.insert_checkpoint(cp_b).expect("should insert cp");
let _ = cg
@ -208,7 +206,7 @@ fn update_evicts_conflicting_tx() {
cg
};
let changeset = ChangeSet::<(u32, BlockHash), TxHeight> {
let changeset = ChangeSet::<TxHeight> {
chain: sparse_chain::ChangeSet {
checkpoints: [(1, Some(h!("B'")))].into(),
txids: [
@ -217,7 +215,7 @@ fn update_evicts_conflicting_tx() {
]
.into(),
},
graph: tx_graph::Additions::<(u32, BlockHash)> {
graph: tx_graph::Additions {
tx: [tx_b2].into(),
txout: [].into(),
..Default::default()
@ -256,7 +254,7 @@ fn chain_graph_new_missing() {
(tx_b.txid(), TxHeight::Confirmed(0))
]
);
let mut graph = TxGraph::<(u32, BlockHash)>::default();
let mut graph = TxGraph::default();
let mut expected_missing = HashSet::new();
expected_missing.insert(tx_a.txid());
@ -293,7 +291,7 @@ fn chain_graph_new_missing() {
let new_graph = ChainGraph::new(update.clone(), graph.clone()).unwrap();
let expected_graph = {
let mut cg = ChainGraph::<(u32, BlockHash), TxHeight>::default();
let mut cg = ChainGraph::<TxHeight>::default();
let _ = cg
.insert_checkpoint(update.latest_checkpoint().unwrap())
.unwrap();
@ -348,7 +346,7 @@ fn chain_graph_new_conflicts() {
]
);
let graph = TxGraph::<(u32, BlockHash)>::new([tx_a, tx_b, tx_b2]);
let graph = TxGraph::new([tx_a, tx_b, tx_b2]);
assert!(matches!(
ChainGraph::new(chain, graph),
@ -358,7 +356,7 @@ fn chain_graph_new_conflicts() {
#[test]
fn test_get_tx_in_chain() {
let mut cg = ChainGraph::<(u32, BlockHash), _>::default();
let mut cg = ChainGraph::default();
let tx = Transaction {
version: 0x01,
lock_time: PackedLockTime(0),
@ -383,7 +381,7 @@ fn test_get_tx_in_chain() {
#[test]
fn test_iterate_transactions() {
let mut cg = ChainGraph::<BlockId, _>::default();
let mut cg = ChainGraph::default();
let txs = (0..3)
.map(|i| Transaction {
version: i,
@ -480,7 +478,7 @@ fn test_apply_changes_reintroduce_tx() {
// block1, block2a, tx1, tx2a
let mut cg = {
let mut cg = ChainGraph::<(u32, BlockHash), _>::default();
let mut cg = ChainGraph::default();
let _ = cg.insert_checkpoint(block1).unwrap();
let _ = cg.insert_checkpoint(block2a).unwrap();
let _ = cg.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
@ -636,7 +634,7 @@ fn test_evict_descendants() {
let txid_conflict = tx_conflict.txid();
let cg = {
let mut cg = ChainGraph::<(u32, BlockHash), TxHeight>::default();
let mut cg = ChainGraph::<TxHeight>::default();
let _ = cg.insert_checkpoint(block_1);
let _ = cg.insert_checkpoint(block_2a);
let _ = cg.insert_tx(tx_1, TxHeight::Confirmed(1));
@ -648,7 +646,7 @@ fn test_evict_descendants() {
};
let update = {
let mut cg = ChainGraph::<(u32, BlockHash), TxHeight>::default();
let mut cg = ChainGraph::<TxHeight>::default();
let _ = cg.insert_checkpoint(block_1);
let _ = cg.insert_checkpoint(block_2b);
let _ = cg.insert_tx(tx_conflict.clone(), TxHeight::Confirmed(2));

View File

@ -12,11 +12,11 @@ use bdk_chain::{
tx_graph::TxInGraph,
BlockId, ConfirmationTime, TxHeight,
};
use bitcoin::{BlockHash, TxIn};
use bitcoin::TxIn;
#[test]
fn test_insert_tx() {
let mut tracker = KeychainTracker::<_, BlockId, _>::default();
let mut tracker = KeychainTracker::default();
let secp = Secp256k1::new();
let (descriptor, _) = Descriptor::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
tracker.add_keychain((), descriptor.clone());
@ -72,7 +72,7 @@ fn test_balance() {
One,
Two,
}
let mut tracker = KeychainTracker::<Keychain, (u32, BlockHash), TxHeight>::default();
let mut tracker = KeychainTracker::default();
let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap();
let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap();
tracker.add_keychain(Keychain::One, one);

View File

@ -32,7 +32,7 @@ use bdk_chain::{
keychain::KeychainScan,
sparse_chain::{self, ChainPosition, SparseChain},
tx_graph::TxGraph,
BlockAnchor, BlockId, ConfirmationTime, TxHeight,
BlockId, ConfirmationTime, TxHeight,
};
pub use electrum_client;
use electrum_client::{Client, ElectrumApi, Error};
@ -243,14 +243,13 @@ impl<K: Ord + Clone + Debug, P: ChainPosition> ElectrumUpdate<K, P> {
/// `tracker`.
///
/// This will fail if there are missing full transactions not provided via `new_txs`.
pub fn into_keychain_scan<CG, A>(
pub fn into_keychain_scan<CG>(
self,
new_txs: Vec<Transaction>,
chain_graph: &CG,
) -> Result<KeychainScan<K, A, P>, chain_graph::NewError<P>>
) -> Result<KeychainScan<K, P>, chain_graph::NewError<P>>
where
CG: AsRef<ChainGraph<A, P>>,
A: BlockAnchor,
CG: AsRef<ChainGraph<P>>,
{
Ok(KeychainScan {
update: chain_graph

View File

@ -48,7 +48,7 @@ pub trait EsploraAsyncExt {
outpoints: impl IntoIterator<IntoIter = impl Iterator<Item = OutPoint> + Send> + Send,
stop_gap: usize,
parallel_requests: usize,
) -> Result<KeychainScan<K, BlockId, ConfirmationTime>, Error>;
) -> Result<KeychainScan<K, ConfirmationTime>, Error>;
/// Convenience method to call [`scan`] without requiring a keychain.
///
@ -61,7 +61,7 @@ pub trait EsploraAsyncExt {
txids: impl IntoIterator<IntoIter = impl Iterator<Item = Txid> + Send> + Send,
outpoints: impl IntoIterator<IntoIter = impl Iterator<Item = OutPoint> + Send> + Send,
parallel_requests: usize,
) -> Result<ChainGraph<BlockId, ConfirmationTime>, Error> {
) -> Result<ChainGraph<ConfirmationTime>, Error> {
let wallet_scan = self
.scan(
local_chain,
@ -100,7 +100,7 @@ impl EsploraAsyncExt for esplora_client::AsyncClient {
outpoints: impl IntoIterator<IntoIter = impl Iterator<Item = OutPoint> + Send> + Send,
stop_gap: usize,
parallel_requests: usize,
) -> Result<KeychainScan<K, BlockId, ConfirmationTime>, Error> {
) -> Result<KeychainScan<K, ConfirmationTime>, Error> {
let txids = txids.into_iter();
let outpoints = outpoints.into_iter();
let parallel_requests = parallel_requests.max(1);

View File

@ -38,7 +38,7 @@ pub trait EsploraExt {
outpoints: impl IntoIterator<Item = OutPoint>,
stop_gap: usize,
parallel_requests: usize,
) -> Result<KeychainScan<K, BlockId, ConfirmationTime>, Error>;
) -> Result<KeychainScan<K, ConfirmationTime>, Error>;
/// Convenience method to call [`scan`] without requiring a keychain.
///
@ -51,7 +51,7 @@ pub trait EsploraExt {
txids: impl IntoIterator<Item = Txid>,
outpoints: impl IntoIterator<Item = OutPoint>,
parallel_requests: usize,
) -> Result<ChainGraph<BlockId, ConfirmationTime>, Error> {
) -> Result<ChainGraph<ConfirmationTime>, Error> {
let wallet_scan = self.scan(
local_chain,
[(
@ -81,7 +81,7 @@ impl EsploraExt for esplora_client::BlockingClient {
outpoints: impl IntoIterator<Item = OutPoint>,
stop_gap: usize,
parallel_requests: usize,
) -> Result<KeychainScan<K, BlockId, ConfirmationTime>, Error> {
) -> Result<KeychainScan<K, ConfirmationTime>, Error> {
let parallel_requests = parallel_requests.max(1);
let mut scan = KeychainScan::default();
let update = &mut scan.update;

View File

@ -4,7 +4,7 @@
//! [`KeychainChangeSet`]s which can be used to restore a [`KeychainTracker`].
use bdk_chain::{
keychain::{KeychainChangeSet, KeychainTracker},
sparse_chain, BlockAnchor,
sparse_chain,
};
use bincode::{DefaultOptions, Options};
use core::marker::PhantomData;
@ -23,21 +23,20 @@ const MAGIC_BYTES: [u8; MAGIC_BYTES_LEN] = [98, 100, 107, 102, 115, 48, 48, 48,
/// Persists an append only list of `KeychainChangeSet<K,P>` to a single file.
/// [`KeychainChangeSet<K,P>`] record the changes made to a [`KeychainTracker<K,P>`].
#[derive(Debug)]
pub struct KeychainStore<K, A, P> {
pub struct KeychainStore<K, P> {
db_file: File,
changeset_type_params: core::marker::PhantomData<(K, A, P)>,
changeset_type_params: core::marker::PhantomData<(K, P)>,
}
fn bincode() -> impl bincode::Options {
DefaultOptions::new().with_varint_encoding()
}
impl<K, A, P> KeychainStore<K, A, P>
impl<K, P> KeychainStore<K, P>
where
K: Ord + Clone + core::fmt::Debug,
A: BlockAnchor,
P: sparse_chain::ChainPosition,
KeychainChangeSet<K, A, P>: serde::Serialize + serde::de::DeserializeOwned,
KeychainChangeSet<K, P>: serde::Serialize + serde::de::DeserializeOwned,
{
/// Creates a new store from a [`File`].
///
@ -86,9 +85,7 @@ where
/// **WARNING**: This method changes the write position in the underlying file. You should
/// always iterate over all entries until `None` is returned if you want your next write to go
/// at the end; otherwise, you will write over existing entries.
pub fn iter_changesets(
&mut self,
) -> Result<EntryIter<'_, KeychainChangeSet<K, A, P>>, io::Error> {
pub fn iter_changesets(&mut self) -> Result<EntryIter<'_, KeychainChangeSet<K, P>>, io::Error> {
self.db_file
.seek(io::SeekFrom::Start(MAGIC_BYTES_LEN as _))?;
@ -107,7 +104,7 @@ where
///
/// **WARNING**: This method changes the write position of the underlying file. The next
/// changeset will be written over the erroring entry (or the end of the file if none existed).
pub fn aggregate_changeset(&mut self) -> (KeychainChangeSet<K, A, P>, Result<(), IterError>) {
pub fn aggregate_changeset(&mut self) -> (KeychainChangeSet<K, P>, Result<(), IterError>) {
let mut changeset = KeychainChangeSet::default();
let result = (|| {
let iter_changeset = self.iter_changesets()?;
@ -127,7 +124,7 @@ where
/// changeset will be written over the erroring entry (or the end of the file if none existed).
pub fn load_into_keychain_tracker(
&mut self,
tracker: &mut KeychainTracker<K, A, P>,
tracker: &mut KeychainTracker<K, P>,
) -> Result<(), IterError> {
for changeset in self.iter_changesets()? {
tracker.apply_changeset(changeset?)
@ -141,7 +138,7 @@ where
/// directly after the appended changeset.
pub fn append_changeset(
&mut self,
changeset: &KeychainChangeSet<K, A, P>,
changeset: &KeychainChangeSet<K, P>,
) -> Result<(), io::Error> {
if changeset.is_empty() {
return Ok(());
@ -291,7 +288,7 @@ mod test {
use super::*;
use bdk_chain::{
keychain::{DerivationAdditions, KeychainChangeSet},
BlockId, TxHeight,
TxHeight,
};
use std::{
io::{Read, Write},
@ -335,7 +332,7 @@ mod test {
file.write_all(&MAGIC_BYTES[..MAGIC_BYTES_LEN - 1])
.expect("should write");
match KeychainStore::<TestKeychain, BlockId, TxHeight>::new(file.reopen().unwrap()) {
match KeychainStore::<TestKeychain, TxHeight>::new(file.reopen().unwrap()) {
Err(FileError::Io(e)) => assert_eq!(e.kind(), std::io::ErrorKind::UnexpectedEof),
unexpected => panic!("unexpected result: {:?}", unexpected),
};
@ -349,7 +346,7 @@ mod test {
file.write_all(invalid_magic_bytes.as_bytes())
.expect("should write");
match KeychainStore::<TestKeychain, BlockId, TxHeight>::new(file.reopen().unwrap()) {
match KeychainStore::<TestKeychain, TxHeight>::new(file.reopen().unwrap()) {
Err(FileError::InvalidMagicBytes(b)) => {
assert_eq!(b, invalid_magic_bytes.as_bytes())
}
@ -373,9 +370,8 @@ mod test {
let mut file = NamedTempFile::new().unwrap();
file.write_all(&data).expect("should write");
let mut store =
KeychainStore::<TestKeychain, BlockId, TxHeight>::new(file.reopen().unwrap())
.expect("should open");
let mut store = KeychainStore::<TestKeychain, TxHeight>::new(file.reopen().unwrap())
.expect("should open");
match store.iter_changesets().expect("seek should succeed").next() {
Some(Err(IterError::Bincode(_))) => {}
unexpected_res => panic!("unexpected result: {:?}", unexpected_res),

View File

@ -3,16 +3,14 @@ mod file_store;
use bdk_chain::{
keychain::{KeychainChangeSet, KeychainTracker, PersistBackend},
sparse_chain::ChainPosition,
BlockAnchor,
};
pub use file_store::*;
impl<K, A, P> PersistBackend<K, A, P> for KeychainStore<K, A, P>
impl<K, P> PersistBackend<K, P> for KeychainStore<K, P>
where
K: Ord + Clone + core::fmt::Debug,
A: BlockAnchor,
P: ChainPosition,
KeychainChangeSet<K, A, P>: serde::Serialize + serde::de::DeserializeOwned,
KeychainChangeSet<K, P>: serde::Serialize + serde::de::DeserializeOwned,
{
type WriteError = std::io::Error;
@ -20,14 +18,14 @@ where
fn append_changeset(
&mut self,
changeset: &KeychainChangeSet<K, A, P>,
changeset: &KeychainChangeSet<K, P>,
) -> Result<(), Self::WriteError> {
KeychainStore::append_changeset(self, changeset)
}
fn load_into_keychain_tracker(
&mut self,
tracker: &mut KeychainTracker<K, A, P>,
tracker: &mut KeychainTracker<K, P>,
) -> Result<(), Self::LoadError> {
KeychainStore::load_into_keychain_tracker(self, tracker)
}

View File

@ -48,7 +48,7 @@ pub struct ScanOptions {
}
fn main() -> anyhow::Result<()> {
let (args, keymap, tracker, db) = cli::init::<ElectrumCommands, _, _>()?;
let (args, keymap, tracker, db) = cli::init::<ElectrumCommands, _>()?;
let electrum_url = match args.network {
Network::Bitcoin => "ssl://electrum.blockstream.info:50002",

View File

@ -49,7 +49,7 @@ pub struct ScanOptions {
}
fn main() -> anyhow::Result<()> {
let (args, keymap, keychain_tracker, db) = cli::init::<EsploraCommands, _, _>()?;
let (args, keymap, keychain_tracker, db) = cli::init::<EsploraCommands, _>()?;
let esplora_url = match args.network {
Network::Bitcoin => "https://mempool.space/api",
Network::Testnet => "https://mempool.space/testnet/api",

View File

@ -13,7 +13,7 @@ use bdk_chain::{
Descriptor, DescriptorPublicKey,
},
sparse_chain::{self, ChainPosition},
BlockAnchor, DescriptorExt, FullTxOut,
DescriptorExt, FullTxOut,
};
use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue};
use bdk_file_store::KeychainStore;
@ -179,16 +179,15 @@ pub struct AddrsOutput {
used: bool,
}
pub fn run_address_cmd<A, P>(
tracker: &Mutex<KeychainTracker<Keychain, A, P>>,
db: &Mutex<KeychainStore<Keychain, A, P>>,
pub fn run_address_cmd<P>(
tracker: &Mutex<KeychainTracker<Keychain, P>>,
db: &Mutex<KeychainStore<Keychain, P>>,
addr_cmd: AddressCmd,
network: Network,
) -> Result<()>
where
A: bdk_chain::BlockAnchor,
P: bdk_chain::sparse_chain::ChainPosition,
KeychainChangeSet<Keychain, A, P>: serde::Serialize + serde::de::DeserializeOwned,
KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
{
let mut tracker = tracker.lock().unwrap();
let txout_index = &mut tracker.txout_index;
@ -242,9 +241,7 @@ where
}
}
pub fn run_balance_cmd<A: BlockAnchor, P: ChainPosition>(
tracker: &Mutex<KeychainTracker<Keychain, A, P>>,
) {
pub fn run_balance_cmd<P: ChainPosition>(tracker: &Mutex<KeychainTracker<Keychain, P>>) {
let tracker = tracker.lock().unwrap();
let (confirmed, unconfirmed) =
tracker
@ -261,9 +258,9 @@ pub fn run_balance_cmd<A: BlockAnchor, P: ChainPosition>(
println!("unconfirmed: {}", unconfirmed);
}
pub fn run_txo_cmd<K: Debug + Clone + Ord, A: BlockAnchor, P: ChainPosition>(
pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
txout_cmd: TxOutCmd,
tracker: &Mutex<KeychainTracker<K, A, P>>,
tracker: &Mutex<KeychainTracker<K, P>>,
network: Network,
) {
match txout_cmd {
@ -316,11 +313,11 @@ pub fn run_txo_cmd<K: Debug + Clone + Ord, A: BlockAnchor, P: ChainPosition>(
}
#[allow(clippy::type_complexity)] // FIXME
pub fn create_tx<A: BlockAnchor, P: ChainPosition>(
pub fn create_tx<P: ChainPosition>(
value: u64,
address: Address,
coin_select: CoinSelectionAlgo,
keychain_tracker: &mut KeychainTracker<Keychain, A, P>,
keychain_tracker: &mut KeychainTracker<Keychain, P>,
keymap: &HashMap<DescriptorPublicKey, DescriptorSecretKey>,
) -> Result<(
Transaction,
@ -529,20 +526,19 @@ pub fn create_tx<A: BlockAnchor, P: ChainPosition>(
Ok((transaction, change_info))
}
pub fn handle_commands<C: clap::Subcommand, A, P>(
pub fn handle_commands<C: clap::Subcommand, P>(
command: Commands<C>,
broadcast: impl FnOnce(&Transaction) -> Result<()>,
// we Mutex around these not because we need them for a simple CLI app but to demonstrate how
// all the stuff we're doing can be made thread-safe and not keep locks up over an IO bound.
tracker: &Mutex<KeychainTracker<Keychain, A, P>>,
store: &Mutex<KeychainStore<Keychain, A, P>>,
tracker: &Mutex<KeychainTracker<Keychain, P>>,
store: &Mutex<KeychainStore<Keychain, P>>,
network: Network,
keymap: &HashMap<DescriptorPublicKey, DescriptorSecretKey>,
) -> Result<()>
where
A: BlockAnchor,
P: ChainPosition,
KeychainChangeSet<Keychain, A, P>: serde::Serialize + serde::de::DeserializeOwned,
KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
{
match command {
// TODO: Make these functions return stuffs
@ -623,18 +619,17 @@ where
}
#[allow(clippy::type_complexity)] // FIXME
pub fn init<C: clap::Subcommand, A, P>() -> anyhow::Result<(
pub fn init<C: clap::Subcommand, P>() -> anyhow::Result<(
Args<C>,
KeyMap,
// These don't need to have mutexes around them, but we want the cli example code to make it obvious how they
// are thread-safe, forcing the example developers to show where they would lock and unlock things.
Mutex<KeychainTracker<Keychain, A, P>>,
Mutex<KeychainStore<Keychain, A, P>>,
Mutex<KeychainTracker<Keychain, P>>,
Mutex<KeychainStore<Keychain, P>>,
)>
where
A: BlockAnchor,
P: sparse_chain::ChainPosition,
KeychainChangeSet<Keychain, A, P>: serde::Serialize + serde::de::DeserializeOwned,
KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
{
let args = Args::<C>::parse();
let secp = Secp256k1::default();
@ -660,7 +655,7 @@ where
.add_keychain(Keychain::Internal, internal_descriptor);
};
let mut db = KeychainStore::<Keychain, A, P>::new_from_path(args.db_path.as_path())?;
let mut db = KeychainStore::<Keychain, P>::new_from_path(args.db_path.as_path())?;
if let Err(e) = db.load_into_keychain_tracker(&mut tracker) {
match tracker.chain().latest_checkpoint() {
@ -674,8 +669,8 @@ where
Ok((args, keymap, Mutex::new(tracker), Mutex::new(db)))
}
pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, A: BlockAnchor, P: ChainPosition>(
tracker: &'a KeychainTracker<Keychain, A, P>,
pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, P: ChainPosition>(
tracker: &'a KeychainTracker<Keychain, P>,
assets: &'a bdk_tmp_plan::Assets<AK>,
) -> impl Iterator<Item = (bdk_tmp_plan::Plan<AK>, FullTxOut<P>)> + 'a {
tracker