keychain::ChangeSet includes the descriptor
- The KeychainTxOutIndex's internal SpkIterator now uses DescriptorId instead of K. The DescriptorId -> K translation is made at the KeychainTxOutIndex level. - The keychain::Changeset is now a struct, which includes a map for last revealed indexes, and one for newly added keychains and their descriptor. API changes in bdk: - Wallet::keychains returns a `impl Iterator` instead of `BTreeMap` - Wallet::load doesn't take descriptors anymore, since they're stored in the db - Wallet::new_or_load checks if the loaded descriptor from db is the same as the provided one API changes in bdk_chain: - `ChangeSet` is now a struct, which includes a map for last revealed indexes, and one for keychains and descriptors. - `KeychainTxOutIndex::inner` returns a `SpkIterator<(DescriptorId, u32)>` - `KeychainTxOutIndex::outpoints` returns a `impl Iterator` instead of `&BTreeSet` - `KeychainTxOutIndex::keychains` returns a `impl Iterator` instead of `&BTreeMap` - `KeychainTxOutIndex::txouts` doesn't return a ExactSizeIterator anymore - `KeychainTxOutIndex::unbounded_spk_iter` returns an `Option` - `KeychainTxOutIndex::next_index` returns an `Option` - `KeychainTxOutIndex::last_revealed_indices` returns a `BTreeMap` instead of `&BTreeMap` - `KeychainTxOutIndex::reveal_to_target` returns an `Option` - `KeychainTxOutIndex::reveal_next_spk` returns an `Option` - `KeychainTxOutIndex::next_unused_spk` returns an `Option` - `KeychainTxOutIndex::add_keychain` has been renamed to `KeychainTxOutIndex::insert_descriptor`, and now it returns a ChangeSet
This commit is contained in:
parent
8ff99f27df
commit
4f05441a00
@ -305,6 +305,8 @@ pub enum LoadError {
|
||||
MissingNetwork,
|
||||
/// Data loaded from persistence is missing genesis hash.
|
||||
MissingGenesis,
|
||||
/// Data loaded from persistence is missing descriptor.
|
||||
MissingDescriptor,
|
||||
}
|
||||
|
||||
impl fmt::Display for LoadError {
|
||||
@ -317,6 +319,7 @@ impl fmt::Display for LoadError {
|
||||
}
|
||||
LoadError::MissingNetwork => write!(f, "loaded data is missing network type"),
|
||||
LoadError::MissingGenesis => write!(f, "loaded data is missing genesis hash"),
|
||||
LoadError::MissingDescriptor => write!(f, "loaded data is missing descriptor"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -352,6 +355,13 @@ pub enum NewOrLoadError {
|
||||
/// The network type loaded from persistence.
|
||||
got: Option<Network>,
|
||||
},
|
||||
/// The loaded desccriptor does not match what was provided.
|
||||
LoadedDescriptorDoesNotMatch {
|
||||
/// The descriptor loaded from persistence.
|
||||
got: Option<ExtendedDescriptor>,
|
||||
/// The keychain of the descriptor not matching
|
||||
keychain: KeychainKind,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for NewOrLoadError {
|
||||
@ -372,6 +382,13 @@ impl fmt::Display for NewOrLoadError {
|
||||
NewOrLoadError::LoadedNetworkDoesNotMatch { expected, got } => {
|
||||
write!(f, "loaded network type is not {}, got {:?}", expected, got)
|
||||
}
|
||||
NewOrLoadError::LoadedDescriptorDoesNotMatch { got, keychain } => {
|
||||
write!(
|
||||
f,
|
||||
"loaded descriptor is different from what was provided, got {:?} for keychain {:?}",
|
||||
got, keychain
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -499,21 +516,17 @@ impl Wallet {
|
||||
}
|
||||
|
||||
/// Load [`Wallet`] from the given persistence backend.
|
||||
pub fn load<E: IntoWalletDescriptor>(
|
||||
descriptor: E,
|
||||
change_descriptor: Option<E>,
|
||||
pub fn load(
|
||||
mut db: impl PersistBackend<ChangeSet> + Send + Sync + 'static,
|
||||
) -> Result<Self, LoadError> {
|
||||
let changeset = db
|
||||
.load_from_persistence()
|
||||
.map_err(LoadError::Persist)?
|
||||
.ok_or(LoadError::NotInitialized)?;
|
||||
Self::load_from_changeset(descriptor, change_descriptor, db, changeset)
|
||||
Self::load_from_changeset(db, changeset)
|
||||
}
|
||||
|
||||
fn load_from_changeset<E: IntoWalletDescriptor>(
|
||||
descriptor: E,
|
||||
change_descriptor: Option<E>,
|
||||
fn load_from_changeset(
|
||||
db: impl PersistBackend<ChangeSet> + Send + Sync + 'static,
|
||||
changeset: ChangeSet,
|
||||
) -> Result<Self, LoadError> {
|
||||
@ -522,10 +535,23 @@ impl Wallet {
|
||||
let chain =
|
||||
LocalChain::from_changeset(changeset.chain).map_err(|_| LoadError::MissingGenesis)?;
|
||||
let mut index = KeychainTxOutIndex::<KeychainKind>::default();
|
||||
let descriptor = changeset
|
||||
.indexed_tx_graph
|
||||
.indexer
|
||||
.keychains_added
|
||||
.get(&KeychainKind::External)
|
||||
.ok_or(LoadError::MissingDescriptor)?
|
||||
.clone();
|
||||
let change_descriptor = changeset
|
||||
.indexed_tx_graph
|
||||
.indexer
|
||||
.keychains_added
|
||||
.get(&KeychainKind::Internal)
|
||||
.cloned();
|
||||
|
||||
let (signers, change_signers) =
|
||||
create_signers(&mut index, &secp, descriptor, change_descriptor, network)
|
||||
.map_err(LoadError::Descriptor)?;
|
||||
.expect("Can't fail: we passed in valid descriptors, recovered from the changeset");
|
||||
|
||||
let mut indexed_graph = IndexedTxGraph::new(index);
|
||||
indexed_graph.apply_changeset(changeset.indexed_tx_graph);
|
||||
@ -562,8 +588,8 @@ impl Wallet {
|
||||
)
|
||||
}
|
||||
|
||||
/// Either loads [`Wallet`] from persistence, or initializes it if it does not exist (with a
|
||||
/// custom genesis hash).
|
||||
/// Either loads [`Wallet`] from persistence, or initializes it if it does not exist, using the
|
||||
/// provided descriptor, change descriptor, network, and custom genesis hash.
|
||||
///
|
||||
/// This method will fail if the loaded [`Wallet`] has different parameters to those provided.
|
||||
/// This is like [`Wallet::new_or_load`] with an additional `genesis_hash` parameter. This is
|
||||
@ -580,25 +606,23 @@ impl Wallet {
|
||||
.map_err(NewOrLoadError::Persist)?;
|
||||
match changeset {
|
||||
Some(changeset) => {
|
||||
let wallet =
|
||||
Self::load_from_changeset(descriptor, change_descriptor, db, changeset)
|
||||
.map_err(|e| match e {
|
||||
LoadError::Descriptor(e) => NewOrLoadError::Descriptor(e),
|
||||
LoadError::Persist(e) => NewOrLoadError::Persist(e),
|
||||
LoadError::NotInitialized => NewOrLoadError::NotInitialized,
|
||||
LoadError::MissingNetwork => {
|
||||
NewOrLoadError::LoadedNetworkDoesNotMatch {
|
||||
expected: network,
|
||||
got: None,
|
||||
}
|
||||
}
|
||||
LoadError::MissingGenesis => {
|
||||
NewOrLoadError::LoadedGenesisDoesNotMatch {
|
||||
expected: genesis_hash,
|
||||
got: None,
|
||||
}
|
||||
}
|
||||
})?;
|
||||
let wallet = Self::load_from_changeset(db, changeset).map_err(|e| match e {
|
||||
LoadError::Descriptor(e) => NewOrLoadError::Descriptor(e),
|
||||
LoadError::Persist(e) => NewOrLoadError::Persist(e),
|
||||
LoadError::NotInitialized => NewOrLoadError::NotInitialized,
|
||||
LoadError::MissingNetwork => NewOrLoadError::LoadedNetworkDoesNotMatch {
|
||||
expected: network,
|
||||
got: None,
|
||||
},
|
||||
LoadError::MissingGenesis => NewOrLoadError::LoadedGenesisDoesNotMatch {
|
||||
expected: genesis_hash,
|
||||
got: None,
|
||||
},
|
||||
LoadError::MissingDescriptor => NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||
got: None,
|
||||
keychain: KeychainKind::External,
|
||||
},
|
||||
})?;
|
||||
if wallet.network != network {
|
||||
return Err(NewOrLoadError::LoadedNetworkDoesNotMatch {
|
||||
expected: network,
|
||||
@ -611,6 +635,36 @@ impl Wallet {
|
||||
got: Some(wallet.chain.genesis_hash()),
|
||||
});
|
||||
}
|
||||
|
||||
let expected_descriptor = descriptor
|
||||
.into_wallet_descriptor(&wallet.secp, network)
|
||||
.map_err(NewOrLoadError::Descriptor)?
|
||||
.0;
|
||||
let wallet_descriptor = wallet.public_descriptor(KeychainKind::External).cloned();
|
||||
if wallet_descriptor != Some(expected_descriptor) {
|
||||
return Err(NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||
got: wallet_descriptor,
|
||||
keychain: KeychainKind::External,
|
||||
});
|
||||
}
|
||||
|
||||
let expected_change_descriptor = if let Some(c) = change_descriptor {
|
||||
Some(
|
||||
c.into_wallet_descriptor(&wallet.secp, network)
|
||||
.map_err(NewOrLoadError::Descriptor)?
|
||||
.0,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let wallet_change_descriptor =
|
||||
wallet.public_descriptor(KeychainKind::Internal).cloned();
|
||||
if wallet_change_descriptor != expected_change_descriptor {
|
||||
return Err(NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||
got: wallet_change_descriptor,
|
||||
keychain: KeychainKind::Internal,
|
||||
});
|
||||
}
|
||||
Ok(wallet)
|
||||
}
|
||||
None => Self::new_with_genesis_hash(
|
||||
@ -636,7 +690,7 @@ impl Wallet {
|
||||
}
|
||||
|
||||
/// Iterator over all keychains in this wallet
|
||||
pub fn keychains(&self) -> &BTreeMap<KeychainKind, ExtendedDescriptor> {
|
||||
pub fn keychains(&self) -> impl Iterator<Item = (&KeychainKind, &ExtendedDescriptor)> {
|
||||
self.indexed_graph.index.keychains()
|
||||
}
|
||||
|
||||
@ -650,7 +704,11 @@ impl Wallet {
|
||||
/// [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) max index.
|
||||
pub fn peek_address(&self, keychain: KeychainKind, mut index: u32) -> AddressInfo {
|
||||
let keychain = self.map_keychain(keychain);
|
||||
let mut spk_iter = self.indexed_graph.index.unbounded_spk_iter(&keychain);
|
||||
let mut spk_iter = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.unbounded_spk_iter(&keychain)
|
||||
.expect("Must exist (we called map_keychain)");
|
||||
if !spk_iter.descriptor().has_wildcard() {
|
||||
index = 0;
|
||||
}
|
||||
@ -677,7 +735,11 @@ impl Wallet {
|
||||
/// If writing to persistent storage fails.
|
||||
pub fn reveal_next_address(&mut self, keychain: KeychainKind) -> anyhow::Result<AddressInfo> {
|
||||
let keychain = self.map_keychain(keychain);
|
||||
let ((index, spk), index_changeset) = self.indexed_graph.index.reveal_next_spk(&keychain);
|
||||
let ((index, spk), index_changeset) = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.reveal_next_spk(&keychain)
|
||||
.expect("Must exist (we called map_keychain)");
|
||||
|
||||
self.persist
|
||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||
@ -705,8 +767,11 @@ impl Wallet {
|
||||
index: u32,
|
||||
) -> anyhow::Result<impl Iterator<Item = AddressInfo> + '_> {
|
||||
let keychain = self.map_keychain(keychain);
|
||||
let (spk_iter, index_changeset) =
|
||||
self.indexed_graph.index.reveal_to_target(&keychain, index);
|
||||
let (spk_iter, index_changeset) = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.reveal_to_target(&keychain, index)
|
||||
.expect("must exist (we called map_keychain)");
|
||||
|
||||
self.persist
|
||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||
@ -729,7 +794,11 @@ impl Wallet {
|
||||
/// If writing to persistent storage fails.
|
||||
pub fn next_unused_address(&mut self, keychain: KeychainKind) -> anyhow::Result<AddressInfo> {
|
||||
let keychain = self.map_keychain(keychain);
|
||||
let ((index, spk), index_changeset) = self.indexed_graph.index.next_unused_spk(&keychain);
|
||||
let ((index, spk), index_changeset) = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.next_unused_spk(&keychain)
|
||||
.expect("must exist (we called map_keychain)");
|
||||
|
||||
self.persist
|
||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||
@ -799,7 +868,7 @@ impl Wallet {
|
||||
.filter_chain_unspents(
|
||||
&self.chain,
|
||||
self.chain.tip().block_id(),
|
||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||
self.indexed_graph.index.outpoints(),
|
||||
)
|
||||
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
||||
}
|
||||
@ -813,7 +882,7 @@ impl Wallet {
|
||||
.filter_chain_txouts(
|
||||
&self.chain,
|
||||
self.chain.tip().block_id(),
|
||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||
self.indexed_graph.index.outpoints(),
|
||||
)
|
||||
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
||||
}
|
||||
@ -851,7 +920,11 @@ impl Wallet {
|
||||
&self,
|
||||
keychain: KeychainKind,
|
||||
) -> impl Iterator<Item = (u32, ScriptBuf)> + Clone {
|
||||
self.indexed_graph.index.unbounded_spk_iter(&keychain)
|
||||
let keychain = self.map_keychain(keychain);
|
||||
self.indexed_graph
|
||||
.index
|
||||
.unbounded_spk_iter(&keychain)
|
||||
.expect("Must exist (we called map_keychain)")
|
||||
}
|
||||
|
||||
/// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the
|
||||
@ -1133,7 +1206,7 @@ impl Wallet {
|
||||
self.indexed_graph.graph().balance(
|
||||
&self.chain,
|
||||
self.chain.tip().block_id(),
|
||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||
self.indexed_graph.index.outpoints(),
|
||||
|&(k, _), _| k == KeychainKind::Internal,
|
||||
)
|
||||
}
|
||||
@ -1220,17 +1293,9 @@ impl Wallet {
|
||||
coin_selection: Cs,
|
||||
params: TxParams,
|
||||
) -> Result<Psbt, CreateTxError> {
|
||||
let external_descriptor = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&KeychainKind::External)
|
||||
.expect("must exist");
|
||||
let internal_descriptor = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&KeychainKind::Internal);
|
||||
let keychains: BTreeMap<_, _> = self.indexed_graph.index.keychains().collect();
|
||||
let external_descriptor = keychains.get(&KeychainKind::External).expect("must exist");
|
||||
let internal_descriptor = keychains.get(&KeychainKind::Internal);
|
||||
|
||||
let external_policy = external_descriptor
|
||||
.extract_policy(&self.signers, BuildSatisfaction::None, &self.secp)?
|
||||
@ -1464,8 +1529,11 @@ impl Wallet {
|
||||
Some(ref drain_recipient) => drain_recipient.clone(),
|
||||
None => {
|
||||
let change_keychain = self.map_keychain(KeychainKind::Internal);
|
||||
let ((index, spk), index_changeset) =
|
||||
self.indexed_graph.index.next_unused_spk(&change_keychain);
|
||||
let ((index, spk), index_changeset) = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.next_unused_spk(&change_keychain)
|
||||
.expect("Keychain exists (we called map_keychain)");
|
||||
let spk = spk.into();
|
||||
self.indexed_graph.index.mark_used(change_keychain, index);
|
||||
self.persist
|
||||
@ -1825,7 +1893,11 @@ impl Wallet {
|
||||
///
|
||||
/// This can be used to build a watch-only version of a wallet
|
||||
pub fn public_descriptor(&self, keychain: KeychainKind) -> Option<&ExtendedDescriptor> {
|
||||
self.indexed_graph.index.keychains().get(&keychain)
|
||||
self.indexed_graph
|
||||
.index
|
||||
.keychains()
|
||||
.find(|(k, _)| *k == &keychain)
|
||||
.map(|(_, d)| d)
|
||||
}
|
||||
|
||||
/// Finalize a PSBT, i.e., for each input determine if sufficient data is available to pass
|
||||
@ -1876,17 +1948,9 @@ impl Wallet {
|
||||
.get_utxo_for(n)
|
||||
.and_then(|txout| self.get_descriptor_for_txout(&txout))
|
||||
.or_else(|| {
|
||||
self.indexed_graph
|
||||
.index
|
||||
.keychains()
|
||||
.iter()
|
||||
.find_map(|(_, desc)| {
|
||||
desc.derive_from_psbt_input(
|
||||
psbt_input,
|
||||
psbt.get_utxo_for(n),
|
||||
&self.secp,
|
||||
)
|
||||
})
|
||||
self.indexed_graph.index.keychains().find_map(|(_, desc)| {
|
||||
desc.derive_from_psbt_input(psbt_input, psbt.get_utxo_for(n), &self.secp)
|
||||
})
|
||||
});
|
||||
|
||||
match desc {
|
||||
@ -1952,7 +2016,12 @@ impl Wallet {
|
||||
|
||||
/// The index of the next address that you would get if you were to ask the wallet for a new address
|
||||
pub fn next_derivation_index(&self, keychain: KeychainKind) -> u32 {
|
||||
self.indexed_graph.index.next_index(&keychain).0
|
||||
let keychain = self.map_keychain(keychain);
|
||||
self.indexed_graph
|
||||
.index
|
||||
.next_index(&keychain)
|
||||
.expect("Keychain must exist (we called map_keychain)")
|
||||
.0
|
||||
}
|
||||
|
||||
/// Informs the wallet that you no longer intend to broadcast a tx that was built from it.
|
||||
@ -2119,7 +2188,6 @@ impl Wallet {
|
||||
if params.add_global_xpubs {
|
||||
let all_xpubs = self
|
||||
.keychains()
|
||||
.iter()
|
||||
.flat_map(|(_, desc)| desc.get_extended_keys())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@ -2496,13 +2564,13 @@ fn create_signers<E: IntoWalletDescriptor>(
|
||||
) -> Result<(Arc<SignersContainer>, Arc<SignersContainer>), crate::descriptor::error::Error> {
|
||||
let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
||||
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
||||
index.add_keychain(KeychainKind::External, descriptor);
|
||||
let _ = index.insert_descriptor(KeychainKind::External, descriptor);
|
||||
|
||||
let change_signers = match change_descriptor {
|
||||
Some(descriptor) => {
|
||||
let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
||||
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
||||
index.add_keychain(KeychainKind::Internal, descriptor);
|
||||
let _ = index.insert_descriptor(KeychainKind::Internal, descriptor);
|
||||
signers
|
||||
}
|
||||
None => Arc::new(SignersContainer::new()),
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use assert_matches::assert_matches;
|
||||
use bdk::descriptor::calc_checksum;
|
||||
use bdk::descriptor::{calc_checksum, IntoWalletDescriptor};
|
||||
use bdk::psbt::PsbtUtils;
|
||||
use bdk::signer::{SignOptions, SignerError};
|
||||
use bdk::wallet::coin_selection::{self, LargestFirstCoinSelection};
|
||||
@ -10,9 +10,11 @@ use bdk::wallet::tx_builder::AddForeignUtxoError;
|
||||
use bdk::wallet::NewError;
|
||||
use bdk::wallet::{AddressInfo, Balance, Wallet};
|
||||
use bdk::KeychainKind;
|
||||
use bdk_chain::collections::BTreeMap;
|
||||
use bdk_chain::COINBASE_MATURITY;
|
||||
use bdk_chain::{BlockId, ConfirmationTime};
|
||||
use bitcoin::hashes::Hash;
|
||||
use bitcoin::key::Secp256k1;
|
||||
use bitcoin::psbt;
|
||||
use bitcoin::script::PushBytesBuf;
|
||||
use bitcoin::sighash::{EcdsaSighashType, TapSighashType};
|
||||
@ -84,14 +86,24 @@ fn load_recovers_wallet() {
|
||||
// recover wallet
|
||||
{
|
||||
let db = bdk_file_store::Store::open(DB_MAGIC, &file_path).expect("must recover db");
|
||||
let wallet =
|
||||
Wallet::load(get_test_tr_single_sig_xprv(), None, db).expect("must recover wallet");
|
||||
let wallet = Wallet::load(db).expect("must recover wallet");
|
||||
assert_eq!(wallet.network(), Network::Testnet);
|
||||
assert_eq!(wallet.spk_index().keychains(), wallet_spk_index.keychains());
|
||||
assert_eq!(
|
||||
wallet.spk_index().keychains().collect::<Vec<_>>(),
|
||||
wallet_spk_index.keychains().collect::<Vec<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
wallet.spk_index().last_revealed_indices(),
|
||||
wallet_spk_index.last_revealed_indices()
|
||||
);
|
||||
let secp = Secp256k1::new();
|
||||
assert_eq!(
|
||||
*wallet.get_descriptor_for_keychain(KeychainKind::External),
|
||||
get_test_tr_single_sig_xprv()
|
||||
.into_wallet_descriptor(&secp, wallet.network())
|
||||
.unwrap()
|
||||
.0
|
||||
);
|
||||
}
|
||||
|
||||
// `new` can only be called on empty db
|
||||
@ -108,12 +120,12 @@ fn new_or_load() {
|
||||
let file_path = temp_dir.path().join("store.db");
|
||||
|
||||
// init wallet when non-existent
|
||||
let wallet_keychains = {
|
||||
let wallet_keychains: BTreeMap<_, _> = {
|
||||
let db = bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path)
|
||||
.expect("must create db");
|
||||
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
|
||||
.expect("must init wallet");
|
||||
wallet.keychains().clone()
|
||||
wallet.keychains().map(|(k, v)| (*k, v.clone())).collect()
|
||||
};
|
||||
|
||||
// wrong network
|
||||
@ -162,6 +174,49 @@ fn new_or_load() {
|
||||
);
|
||||
}
|
||||
|
||||
// wrong external descriptor
|
||||
{
|
||||
let exp_descriptor = get_test_tr_single_sig();
|
||||
let got_descriptor = get_test_wpkh()
|
||||
.into_wallet_descriptor(&Secp256k1::new(), Network::Testnet)
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
let db =
|
||||
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
|
||||
let err = Wallet::new_or_load(exp_descriptor, None, db, Network::Testnet)
|
||||
.expect_err("wrong external descriptor");
|
||||
assert!(
|
||||
matches!(
|
||||
err,
|
||||
bdk::wallet::NewOrLoadError::LoadedDescriptorDoesNotMatch { ref got, keychain }
|
||||
if got == &Some(got_descriptor) && keychain == KeychainKind::External
|
||||
),
|
||||
"err: {}",
|
||||
err,
|
||||
);
|
||||
}
|
||||
|
||||
// wrong internal descriptor
|
||||
{
|
||||
let exp_descriptor = Some(get_test_tr_single_sig());
|
||||
let got_descriptor = None;
|
||||
|
||||
let db =
|
||||
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
|
||||
let err = Wallet::new_or_load(get_test_wpkh(), exp_descriptor, db, Network::Testnet)
|
||||
.expect_err("wrong internal descriptor");
|
||||
assert!(
|
||||
matches!(
|
||||
err,
|
||||
bdk::wallet::NewOrLoadError::LoadedDescriptorDoesNotMatch { ref got, keychain }
|
||||
if got == &got_descriptor && keychain == KeychainKind::Internal
|
||||
),
|
||||
"err: {}",
|
||||
err,
|
||||
);
|
||||
}
|
||||
|
||||
// all parameters match
|
||||
{
|
||||
let db =
|
||||
@ -169,7 +224,10 @@ fn new_or_load() {
|
||||
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
|
||||
.expect("must recover wallet");
|
||||
assert_eq!(wallet.network(), Network::Testnet);
|
||||
assert_eq!(wallet.keychains(), &wallet_keychains);
|
||||
assert!(wallet
|
||||
.keychains()
|
||||
.map(|(k, v)| (*k, v.clone()))
|
||||
.eq(wallet_keychains));
|
||||
}
|
||||
}
|
||||
|
||||
@ -181,7 +239,6 @@ fn test_descriptor_checksum() {
|
||||
|
||||
let raw_descriptor = wallet
|
||||
.keychains()
|
||||
.iter()
|
||||
.next()
|
||||
.unwrap()
|
||||
.1
|
||||
|
@ -27,5 +27,5 @@ proptest = "1.2.0"
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = ["bitcoin/std", "miniscript/std"]
|
||||
serde = ["serde_crate", "bitcoin/serde"]
|
||||
std = ["bitcoin/std", "miniscript?/std"]
|
||||
serde = ["serde_crate", "bitcoin/serde", "miniscript?/serde"]
|
||||
|
@ -1,10 +1,29 @@
|
||||
use crate::miniscript::{Descriptor, DescriptorPublicKey};
|
||||
use crate::{
|
||||
alloc::{string::ToString, vec::Vec},
|
||||
miniscript::{Descriptor, DescriptorPublicKey},
|
||||
};
|
||||
use bitcoin::hashes::{hash_newtype, sha256, Hash};
|
||||
|
||||
hash_newtype! {
|
||||
/// Represents the ID of a descriptor, defined as the sha256 hash of
|
||||
/// the descriptor string, checksum excluded.
|
||||
///
|
||||
/// This is useful for having a fixed-length unique representation of a descriptor,
|
||||
/// in particular, we use it to persist application state changes related to the
|
||||
/// descriptor without having to re-write the whole descriptor each time.
|
||||
///
|
||||
pub struct DescriptorId(pub sha256::Hash);
|
||||
}
|
||||
|
||||
/// A trait to extend the functionality of a miniscript descriptor.
|
||||
pub trait DescriptorExt {
|
||||
/// Returns the minimum value (in satoshis) at which an output is broadcastable.
|
||||
/// Panics if the descriptor wildcard is hardened.
|
||||
fn dust_value(&self) -> u64;
|
||||
|
||||
/// Returns the descriptor id, calculated as the sha256 of the descriptor, checksum not
|
||||
/// included.
|
||||
fn descriptor_id(&self) -> DescriptorId;
|
||||
}
|
||||
|
||||
impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
||||
@ -15,4 +34,11 @@ impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
||||
.dust_value()
|
||||
.to_sat()
|
||||
}
|
||||
|
||||
fn descriptor_id(&self) -> DescriptorId {
|
||||
let desc = self.to_string();
|
||||
let desc_without_checksum = desc.split('#').next().expect("Must be here");
|
||||
let descriptor_bytes = <Vec<u8>>::from(desc_without_checksum.as_bytes());
|
||||
DescriptorId(sha256::Hash::hash(&descriptor_bytes))
|
||||
}
|
||||
}
|
||||
|
@ -3,9 +3,9 @@ use crate::{
|
||||
indexed_tx_graph::Indexer,
|
||||
miniscript::{Descriptor, DescriptorPublicKey},
|
||||
spk_iter::BIP32_MAX_INDEX,
|
||||
SpkIterator, SpkTxOutIndex,
|
||||
DescriptorExt, DescriptorId, SpkIterator, SpkTxOutIndex,
|
||||
};
|
||||
use bitcoin::{Amount, OutPoint, Script, SignedAmount, Transaction, TxOut, Txid};
|
||||
use bitcoin::{hashes::Hash, Amount, OutPoint, Script, SignedAmount, Transaction, TxOut, Txid};
|
||||
use core::{
|
||||
fmt::Debug,
|
||||
ops::{Bound, RangeBounds},
|
||||
@ -13,9 +13,8 @@ use core::{
|
||||
|
||||
use crate::Append;
|
||||
|
||||
|
||||
/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
|
||||
/// It maps each keychain `K` to its last revealed index.
|
||||
/// It maps each keychain `K` to a descriptor and its last revealed index.
|
||||
///
|
||||
/// It can be applied to [`KeychainTxOutIndex`] with [`apply_changeset`]. [`ChangeSet] are
|
||||
/// monotone in that they will never decrease the revealed derivation index.
|
||||
@ -35,46 +34,52 @@ use crate::Append;
|
||||
)
|
||||
)]
|
||||
#[must_use]
|
||||
pub struct ChangeSet<K>(pub BTreeMap<K, u32>);
|
||||
|
||||
impl<K> ChangeSet<K> {
|
||||
/// Get the inner map of the keychain to its new derivation index.
|
||||
pub fn as_inner(&self) -> &BTreeMap<K, u32> {
|
||||
&self.0
|
||||
}
|
||||
pub struct ChangeSet<K> {
|
||||
/// Contains the keychains that have been added and their respective descriptor
|
||||
pub keychains_added: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
|
||||
/// Contains for each descriptor_id the last revealed index of derivation
|
||||
pub last_revealed: BTreeMap<DescriptorId, u32>,
|
||||
}
|
||||
|
||||
impl<K: Ord> Append for ChangeSet<K> {
|
||||
/// Append another [`ChangeSet`] into self.
|
||||
///
|
||||
/// For each keychain in `keychains_added` in the given [`ChangeSet`]:
|
||||
/// If the keychain already exist with a different descriptor, we overwrite the old descriptor.
|
||||
///
|
||||
/// For each `last_revealed` in the given [`ChangeSet`]:
|
||||
/// If the keychain already exists, increase the index when the other's index > self's index.
|
||||
/// If the keychain did not exist, append the new keychain.
|
||||
fn append(&mut self, mut other: Self) {
|
||||
self.0.iter_mut().for_each(|(key, index)| {
|
||||
if let Some(other_index) = other.0.remove(key) {
|
||||
for (keychain, descriptor) in &mut self.keychains_added {
|
||||
if let Some(other_descriptor) = other.keychains_added.remove(keychain) {
|
||||
*descriptor = other_descriptor;
|
||||
}
|
||||
}
|
||||
|
||||
for (descriptor_id, index) in &mut self.last_revealed {
|
||||
if let Some(other_index) = other.last_revealed.remove(descriptor_id) {
|
||||
*index = other_index.max(*index);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
||||
self.0.extend(other.0);
|
||||
self.keychains_added.extend(other.keychains_added);
|
||||
self.last_revealed.extend(other.last_revealed);
|
||||
}
|
||||
|
||||
/// Returns whether the changeset are empty.
|
||||
fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
self.last_revealed.is_empty() && self.keychains_added.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for ChangeSet<K> {
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> AsRef<BTreeMap<K, u32>> for ChangeSet<K> {
|
||||
fn as_ref(&self) -> &BTreeMap<K, u32> {
|
||||
&self.0
|
||||
Self {
|
||||
last_revealed: BTreeMap::default(),
|
||||
keychains_added: BTreeMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -119,7 +124,7 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||
///
|
||||
/// # Change sets
|
||||
///
|
||||
/// Methods that can update the last revealed index will return [`super::ChangeSet`] to report
|
||||
/// Methods that can update the last revealed index or add keychains will return [`super::ChangeSet`] to report
|
||||
/// these changes. This can be persisted for future recovery.
|
||||
///
|
||||
/// ## Synopsis
|
||||
@ -144,10 +149,10 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||
/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
/// # let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
/// # let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||
/// # let (descriptor_for_user_42, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/2/*)").unwrap();
|
||||
/// txout_index.add_keychain(MyKeychain::External, external_descriptor);
|
||||
/// txout_index.add_keychain(MyKeychain::Internal, internal_descriptor);
|
||||
/// txout_index.add_keychain(MyKeychain::MyAppUser { user_id: 42 }, descriptor_for_user_42);
|
||||
/// # let (descriptor_42, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/2/*)").unwrap();
|
||||
/// let _ = txout_index.insert_descriptor(MyKeychain::External, external_descriptor);
|
||||
/// let _ = txout_index.insert_descriptor(MyKeychain::Internal, internal_descriptor);
|
||||
/// let _ = txout_index.insert_descriptor(MyKeychain::MyAppUser { user_id: 42 }, descriptor_42);
|
||||
///
|
||||
/// let new_spk_for_user = txout_index.reveal_next_spk(&MyKeychain::MyAppUser{ user_id: 42 });
|
||||
/// ```
|
||||
@ -164,13 +169,24 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||
/// [`new`]: KeychainTxOutIndex::new
|
||||
/// [`unbounded_spk_iter`]: KeychainTxOutIndex::unbounded_spk_iter
|
||||
/// [`all_unbounded_spk_iters`]: KeychainTxOutIndex::all_unbounded_spk_iters
|
||||
// Under the hood, the KeychainTxOutIndex uses a SpkTxOutIndex that keeps track of spks, indexed by
|
||||
// descriptors. Users then assign or unassign keychains to those descriptors. It's important to
|
||||
// note that descriptors, once added, never get removed from the SpkTxOutIndex; this is useful in
|
||||
// case a user unassigns a keychain from a descriptor and after some time assigns it again.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct KeychainTxOutIndex<K> {
|
||||
inner: SpkTxOutIndex<(K, u32)>,
|
||||
// descriptors of each keychain
|
||||
keychains: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
|
||||
inner: SpkTxOutIndex<(DescriptorId, u32)>,
|
||||
// keychain -> (descriptor, descriptor id) map
|
||||
keychains_to_descriptors: BTreeMap<K, (DescriptorId, Descriptor<DescriptorPublicKey>)>,
|
||||
// descriptor id -> keychain map
|
||||
descriptor_ids_to_keychain: BTreeMap<DescriptorId, (K, Descriptor<DescriptorPublicKey>)>,
|
||||
// descriptor_id -> descriptor map
|
||||
// This is a "monotone" map, meaning that its size keeps growing, i.e., we never delete
|
||||
// descriptors from it. This is useful for revealing spks for descriptors that don't have
|
||||
// keychains associated.
|
||||
descriptor_ids_to_descriptors: BTreeMap<DescriptorId, Descriptor<DescriptorPublicKey>>,
|
||||
// last revealed indexes
|
||||
last_revealed: BTreeMap<K, u32>,
|
||||
last_revealed: BTreeMap<DescriptorId, u32>,
|
||||
// lookahead settings for each keychain
|
||||
lookahead: u32,
|
||||
}
|
||||
@ -186,7 +202,15 @@ impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
||||
|
||||
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
||||
match self.inner.scan_txout(outpoint, txout).cloned() {
|
||||
Some((keychain, index)) => self.reveal_to_target(&keychain, index).1,
|
||||
Some((descriptor_id, index)) => {
|
||||
// We want to reveal spks for descriptors that aren't tracked by any keychain, and
|
||||
// so we call reveal with descriptor_id
|
||||
if let Some((_, changeset)) = self.reveal_to_target_with_id(descriptor_id, index) {
|
||||
changeset
|
||||
} else {
|
||||
super::ChangeSet::default()
|
||||
}
|
||||
}
|
||||
None => super::ChangeSet::default(),
|
||||
}
|
||||
}
|
||||
@ -200,7 +224,13 @@ impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
||||
}
|
||||
|
||||
fn initial_changeset(&self) -> Self::ChangeSet {
|
||||
super::ChangeSet(self.last_revealed.clone())
|
||||
super::ChangeSet {
|
||||
keychains_added: self
|
||||
.keychains()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
last_revealed: self.last_revealed.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_changeset(&mut self, changeset: Self::ChangeSet) {
|
||||
@ -226,7 +256,9 @@ impl<K> KeychainTxOutIndex<K> {
|
||||
pub fn new(lookahead: u32) -> Self {
|
||||
Self {
|
||||
inner: SpkTxOutIndex::default(),
|
||||
keychains: BTreeMap::new(),
|
||||
descriptor_ids_to_keychain: BTreeMap::new(),
|
||||
descriptor_ids_to_descriptors: BTreeMap::new(),
|
||||
keychains_to_descriptors: BTreeMap::new(),
|
||||
last_revealed: BTreeMap::new(),
|
||||
lookahead,
|
||||
}
|
||||
@ -239,22 +271,29 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// **WARNING:** The internal index will contain lookahead spks. Refer to
|
||||
/// [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
||||
pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> {
|
||||
pub fn inner(&self) -> &SpkTxOutIndex<(DescriptorId, u32)> {
|
||||
&self.inner
|
||||
}
|
||||
|
||||
/// Get a reference to the set of indexed outpoints.
|
||||
pub fn outpoints(&self) -> &BTreeSet<((K, u32), OutPoint)> {
|
||||
self.inner.outpoints()
|
||||
/// Get the set of indexed outpoints, corresponding to tracked keychains.
|
||||
pub fn outpoints(&self) -> impl DoubleEndedIterator<Item = ((K, u32), OutPoint)> + '_ {
|
||||
self.inner
|
||||
.outpoints()
|
||||
.iter()
|
||||
.filter_map(|((desc_id, index), op)| {
|
||||
self.descriptor_ids_to_keychain
|
||||
.get(desc_id)
|
||||
.map(|(k, _)| ((k.clone(), *index), *op))
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterate over known txouts that spend to tracked script pubkeys.
|
||||
pub fn txouts(
|
||||
&self,
|
||||
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> + ExactSizeIterator {
|
||||
self.inner
|
||||
.txouts()
|
||||
.map(|((k, i), op, txo)| (k.clone(), *i, op, txo))
|
||||
pub fn txouts(&self) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> + '_ {
|
||||
self.inner.txouts().filter_map(|((desc_id, i), op, txo)| {
|
||||
self.descriptor_ids_to_keychain
|
||||
.get(desc_id)
|
||||
.map(|(k, _)| (k.clone(), *i, op, txo))
|
||||
})
|
||||
}
|
||||
|
||||
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
||||
@ -264,32 +303,41 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> {
|
||||
self.inner
|
||||
.txouts_in_tx(txid)
|
||||
.map(|((k, i), op, txo)| (k.clone(), *i, op, txo))
|
||||
.filter_map(|((desc_id, i), op, txo)| {
|
||||
self.descriptor_ids_to_keychain
|
||||
.get(desc_id)
|
||||
.map(|(k, _)| (k.clone(), *i, op, txo))
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the [`TxOut`] of `outpoint` if it has been indexed.
|
||||
/// Return the [`TxOut`] of `outpoint` if it has been indexed, and if it corresponds to a
|
||||
/// tracked keychain.
|
||||
///
|
||||
/// The associated keychain and keychain index of the txout's spk is also returned.
|
||||
///
|
||||
/// This calls [`SpkTxOutIndex::txout`] internally.
|
||||
pub fn txout(&self, outpoint: OutPoint) -> Option<(K, u32, &TxOut)> {
|
||||
self.inner
|
||||
.txout(outpoint)
|
||||
.map(|((k, i), txo)| (k.clone(), *i, txo))
|
||||
let ((descriptor_id, index), txo) = self.inner.txout(outpoint)?;
|
||||
let (keychain, _) = self.descriptor_ids_to_keychain.get(descriptor_id)?;
|
||||
Some((keychain.clone(), *index, txo))
|
||||
}
|
||||
|
||||
/// Return the script that exists under the given `keychain`'s `index`.
|
||||
///
|
||||
/// This calls [`SpkTxOutIndex::spk_at_index`] internally.
|
||||
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<&Script> {
|
||||
self.inner.spk_at_index(&(keychain, index))
|
||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain)?.0;
|
||||
self.inner.spk_at_index(&(descriptor_id, index))
|
||||
}
|
||||
|
||||
/// Returns the keychain and keychain index associated with the spk.
|
||||
///
|
||||
/// This calls [`SpkTxOutIndex::index_of_spk`] internally.
|
||||
pub fn index_of_spk(&self, script: &Script) -> Option<(K, u32)> {
|
||||
self.inner.index_of_spk(script).cloned()
|
||||
let (desc_id, last_index) = self.inner.index_of_spk(script)?;
|
||||
self.descriptor_ids_to_keychain
|
||||
.get(desc_id)
|
||||
.map(|(k, _)| (k.clone(), *last_index))
|
||||
}
|
||||
|
||||
/// Returns whether the spk under the `keychain`'s `index` has been used.
|
||||
@ -299,7 +347,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// This calls [`SpkTxOutIndex::is_used`] internally.
|
||||
pub fn is_used(&self, keychain: K, index: u32) -> bool {
|
||||
self.inner.is_used(&(keychain, index))
|
||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||
match descriptor_id {
|
||||
Some(descriptor_id) => self.inner.is_used(&(descriptor_id, index)),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output
|
||||
@ -307,7 +359,9 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// This only has an effect when the `index` had been added to `self` already and was unused.
|
||||
///
|
||||
/// Returns whether the `index` was initially present as `unused`.
|
||||
/// Returns whether the spk under the given `keychain` and `index` is successfully
|
||||
/// marked as used. Returns false either when there is no descriptor under the given
|
||||
/// keychain, or when the spk is already marked as used.
|
||||
///
|
||||
/// This is useful when you want to reserve a script pubkey for something but don't want to add
|
||||
/// the transaction output using it to the index yet. Other callers will consider `index` on
|
||||
@ -317,7 +371,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// [`unmark_used`]: Self::unmark_used
|
||||
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||
self.inner.mark_used(&(keychain, index))
|
||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||
match descriptor_id {
|
||||
Some(descriptor_id) => self.inner.mark_used(&(descriptor_id, index)),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
||||
@ -330,7 +388,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// [`mark_used`]: Self::mark_used
|
||||
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||
self.inner.unmark_used(&(keychain, index))
|
||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||
match descriptor_id {
|
||||
Some(descriptor_id) => self.inner.unmark_used(&(descriptor_id, index)),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the total value transfer effect `tx` has on the script pubkeys belonging to the
|
||||
@ -344,7 +406,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
range: impl RangeBounds<K>,
|
||||
) -> (Amount, Amount) {
|
||||
self.inner
|
||||
.sent_and_received(tx, Self::map_to_inner_bounds(range))
|
||||
.sent_and_received(tx, self.map_to_inner_bounds(range))
|
||||
}
|
||||
|
||||
/// Computes the net value that this transaction gives to the script pubkeys in the index and
|
||||
@ -355,34 +417,79 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// [`sent_and_received`]: Self::sent_and_received
|
||||
pub fn net_value(&self, tx: &Transaction, range: impl RangeBounds<K>) -> SignedAmount {
|
||||
self.inner.net_value(tx, Self::map_to_inner_bounds(range))
|
||||
self.inner.net_value(tx, self.map_to_inner_bounds(range))
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// Return a reference to the internal map of keychain to descriptors.
|
||||
pub fn keychains(&self) -> &BTreeMap<K, Descriptor<DescriptorPublicKey>> {
|
||||
&self.keychains
|
||||
/// Return the map of the keychain to descriptors.
|
||||
pub fn keychains(
|
||||
&self,
|
||||
) -> impl DoubleEndedIterator<Item = (&K, &Descriptor<DescriptorPublicKey>)> + ExactSizeIterator + '_
|
||||
{
|
||||
self.keychains_to_descriptors
|
||||
.iter()
|
||||
.map(|(k, (_, d))| (k, d))
|
||||
}
|
||||
|
||||
/// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses.
|
||||
/// Insert a descriptor with a keychain associated to it.
|
||||
///
|
||||
/// Adding a keychain means you will be able to derive new script pubkeys under that keychain
|
||||
/// Adding a descriptor means you will be able to derive new script pubkeys under it
|
||||
/// and the txout index will discover transaction outputs with those script pubkeys.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This will panic if a different `descriptor` is introduced to the same `keychain`.
|
||||
pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor<DescriptorPublicKey>) {
|
||||
let old_descriptor = &*self
|
||||
.keychains
|
||||
.entry(keychain.clone())
|
||||
.or_insert_with(|| descriptor.clone());
|
||||
assert_eq!(
|
||||
&descriptor, old_descriptor,
|
||||
"keychain already contains a different descriptor"
|
||||
);
|
||||
/// When trying to add a keychain that already existed under a different descriptor, or a descriptor
|
||||
/// that already existed with a different keychain, the old keychain (or descriptor) will be
|
||||
/// overwritten.
|
||||
pub fn insert_descriptor(
|
||||
&mut self,
|
||||
keychain: K,
|
||||
descriptor: Descriptor<DescriptorPublicKey>,
|
||||
) -> super::ChangeSet<K> {
|
||||
let descriptor_id = descriptor.descriptor_id();
|
||||
// First, we fill the keychain -> (desc_id, descriptor) map
|
||||
let old_descriptor_opt = self
|
||||
.keychains_to_descriptors
|
||||
.insert(keychain.clone(), (descriptor_id, descriptor.clone()));
|
||||
|
||||
// Then, we fill the descriptor_id -> (keychain, descriptor) map
|
||||
let old_keychain_opt = self
|
||||
.descriptor_ids_to_keychain
|
||||
.insert(descriptor_id, (keychain.clone(), descriptor.clone()));
|
||||
|
||||
// If `keychain` already had a `descriptor` associated, different from the `descriptor`
|
||||
// passed in, we remove it from the descriptor -> keychain map
|
||||
if let Some((old_desc_id, _)) = old_descriptor_opt {
|
||||
if old_desc_id != descriptor_id {
|
||||
self.descriptor_ids_to_keychain.remove(&old_desc_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Lastly, we fill the desc_id -> desc map
|
||||
self.descriptor_ids_to_descriptors
|
||||
.insert(descriptor_id, descriptor.clone());
|
||||
|
||||
self.replenish_lookahead(&keychain, self.lookahead);
|
||||
|
||||
// If both the keychain and descriptor were already inserted and associated, the
|
||||
// keychains_added changeset must be empty
|
||||
let keychains_added = if old_keychain_opt.map(|(k, _)| k) == Some(keychain.clone())
|
||||
&& old_descriptor_opt.map(|(_, d)| d) == Some(descriptor.clone())
|
||||
{
|
||||
[].into()
|
||||
} else {
|
||||
[(keychain, descriptor)].into()
|
||||
};
|
||||
|
||||
super::ChangeSet {
|
||||
keychains_added,
|
||||
last_revealed: [].into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the descriptor associated with the keychain. Returns `None` if the keychain doesn't
|
||||
/// have a descriptor associated with it.
|
||||
pub fn get_descriptor(&self, keychain: &K) -> Option<&Descriptor<DescriptorPublicKey>> {
|
||||
self.keychains_to_descriptors.get(keychain).map(|(_, d)| d)
|
||||
}
|
||||
|
||||
/// Get the lookahead setting.
|
||||
@ -398,63 +505,60 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// This does not change the global `lookahead` setting.
|
||||
pub fn lookahead_to_target(&mut self, keychain: &K, target_index: u32) {
|
||||
let (next_index, _) = self.next_index(keychain);
|
||||
if let Some((next_index, _)) = self.next_index(keychain) {
|
||||
let temp_lookahead = (target_index + 1)
|
||||
.checked_sub(next_index)
|
||||
.filter(|&index| index > 0);
|
||||
|
||||
let temp_lookahead = (target_index + 1)
|
||||
.checked_sub(next_index)
|
||||
.filter(|&index| index > 0);
|
||||
|
||||
if let Some(temp_lookahead) = temp_lookahead {
|
||||
self.replenish_lookahead(keychain, temp_lookahead);
|
||||
if let Some(temp_lookahead) = temp_lookahead {
|
||||
self.replenish_lookahead(keychain, temp_lookahead);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn replenish_lookahead(&mut self, keychain: &K, lookahead: u32) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let next_store_index = self.next_store_index(keychain);
|
||||
let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
|
||||
let descriptor_opt = self.keychains_to_descriptors.get(keychain).cloned();
|
||||
if let Some((descriptor_id, descriptor)) = descriptor_opt {
|
||||
let next_store_index = self.next_store_index(descriptor_id);
|
||||
let next_reveal_index = self.last_revealed.get(&descriptor_id).map_or(0, |v| *v + 1);
|
||||
|
||||
for (new_index, new_spk) in
|
||||
SpkIterator::new_with_range(descriptor, next_store_index..next_reveal_index + lookahead)
|
||||
{
|
||||
let _inserted = self
|
||||
.inner
|
||||
.insert_spk((keychain.clone(), new_index), new_spk);
|
||||
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
||||
for (new_index, new_spk) in SpkIterator::new_with_range(
|
||||
descriptor,
|
||||
next_store_index..next_reveal_index + lookahead,
|
||||
) {
|
||||
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
||||
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next_store_index(&self, keychain: &K) -> u32 {
|
||||
fn next_store_index(&self, descriptor_id: DescriptorId) -> u32 {
|
||||
self.inner()
|
||||
.all_spks()
|
||||
// This range is filtering out the spks with a keychain different than
|
||||
// `keychain`. We don't use filter here as range is more optimized.
|
||||
.range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX))
|
||||
// This range is keeping only the spks with descriptor_id equal to
|
||||
// `descriptor_id`. We don't use filter here as range is more optimized.
|
||||
.range((descriptor_id, u32::MIN)..(descriptor_id, u32::MAX))
|
||||
.last()
|
||||
.map_or(0, |((_, index), _)| *index + 1)
|
||||
}
|
||||
|
||||
/// Get an unbounded spk iterator over a given `keychain`.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This will panic if the given `keychain`'s descriptor does not exist.
|
||||
pub fn unbounded_spk_iter(&self, keychain: &K) -> SpkIterator<Descriptor<DescriptorPublicKey>> {
|
||||
SpkIterator::new(
|
||||
self.keychains
|
||||
.get(keychain)
|
||||
.expect("keychain does not exist")
|
||||
.clone(),
|
||||
)
|
||||
/// Get an unbounded spk iterator over a given `keychain`. Returns `None` if the provided
|
||||
/// keychain doesn't exist
|
||||
pub fn unbounded_spk_iter(
|
||||
&self,
|
||||
keychain: &K,
|
||||
) -> Option<SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||
let descriptor = self.keychains_to_descriptors.get(keychain)?.1.clone();
|
||||
Some(SpkIterator::new(descriptor))
|
||||
}
|
||||
|
||||
/// Get unbounded spk iterators for all keychains.
|
||||
pub fn all_unbounded_spk_iters(
|
||||
&self,
|
||||
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||
self.keychains
|
||||
self.keychains_to_descriptors
|
||||
.iter()
|
||||
.map(|(k, descriptor)| (k.clone(), SpkIterator::new(descriptor.clone())))
|
||||
.map(|(k, (_, descriptor))| (k.clone(), SpkIterator::new(descriptor.clone())))
|
||||
.collect()
|
||||
}
|
||||
|
||||
@ -463,18 +567,30 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
&self,
|
||||
range: impl RangeBounds<K>,
|
||||
) -> impl DoubleEndedIterator<Item = (&K, u32, &Script)> + Clone {
|
||||
self.keychains.range(range).flat_map(|(keychain, _)| {
|
||||
let start = Bound::Included((keychain.clone(), u32::MIN));
|
||||
let end = match self.last_revealed.get(keychain) {
|
||||
Some(last_revealed) => Bound::Included((keychain.clone(), *last_revealed)),
|
||||
None => Bound::Excluded((keychain.clone(), u32::MIN)),
|
||||
};
|
||||
self.keychains_to_descriptors
|
||||
.range(range)
|
||||
.flat_map(|(_, (descriptor_id, _))| {
|
||||
let start = Bound::Included((*descriptor_id, u32::MIN));
|
||||
let end = match self.last_revealed.get(descriptor_id) {
|
||||
Some(last_revealed) => Bound::Included((*descriptor_id, *last_revealed)),
|
||||
None => Bound::Excluded((*descriptor_id, u32::MIN)),
|
||||
};
|
||||
|
||||
self.inner
|
||||
.all_spks()
|
||||
.range((start, end))
|
||||
.map(|((keychain, i), spk)| (keychain, *i, spk.as_script()))
|
||||
})
|
||||
self.inner
|
||||
.all_spks()
|
||||
.range((start, end))
|
||||
.map(|((descriptor_id, i), spk)| {
|
||||
(
|
||||
&self
|
||||
.descriptor_ids_to_keychain
|
||||
.get(descriptor_id)
|
||||
.expect("Must be here")
|
||||
.0,
|
||||
*i,
|
||||
spk.as_script(),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterate over revealed spks of the given `keychain`.
|
||||
@ -488,20 +604,29 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
|
||||
/// Iterate over revealed, but unused, spks of all keychains.
|
||||
pub fn unused_spks(&self) -> impl DoubleEndedIterator<Item = (K, u32, &Script)> + Clone {
|
||||
self.keychains.keys().flat_map(|keychain| {
|
||||
self.keychains_to_descriptors.keys().flat_map(|keychain| {
|
||||
self.unused_keychain_spks(keychain)
|
||||
.map(|(i, spk)| (keychain.clone(), i, spk))
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterate over revealed, but unused, spks of the given `keychain`.
|
||||
/// Returns an empty iterator if the provided keychain doesn't exist.
|
||||
pub fn unused_keychain_spks(
|
||||
&self,
|
||||
keychain: &K,
|
||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
|
||||
let next_i = self.last_revealed.get(keychain).map_or(0, |&i| i + 1);
|
||||
let desc_id = self
|
||||
.keychains_to_descriptors
|
||||
.get(keychain)
|
||||
.map(|(desc_id, _)| *desc_id)
|
||||
// We use a dummy desc id if we can't find the real one in our map. In this way,
|
||||
// if this method was to be called with a non-existent keychain, we would return an
|
||||
// empty iterator
|
||||
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]));
|
||||
let next_i = self.last_revealed.get(&desc_id).map_or(0, |&i| i + 1);
|
||||
self.inner
|
||||
.unused_spks((keychain.clone(), u32::MIN)..(keychain.clone(), next_i))
|
||||
.unused_spks((desc_id, u32::MIN)..(desc_id, next_i))
|
||||
.map(|((_, i), spk)| (*i, spk))
|
||||
}
|
||||
|
||||
@ -516,17 +641,15 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// Not checking the second field of the tuple may result in address reuse.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the `keychain` does not exist.
|
||||
pub fn next_index(&self, keychain: &K) -> (u32, bool) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let last_index = self.last_revealed.get(keychain).cloned();
|
||||
/// Returns None if the provided `keychain` doesn't exist.
|
||||
pub fn next_index(&self, keychain: &K) -> Option<(u32, bool)> {
|
||||
let (descriptor_id, descriptor) = self.keychains_to_descriptors.get(keychain)?;
|
||||
let last_index = self.last_revealed.get(descriptor_id).cloned();
|
||||
|
||||
// we can only get the next index if the wildcard exists.
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
|
||||
match last_index {
|
||||
Some(match last_index {
|
||||
// if there is no index, next_index is always 0.
|
||||
None => (0, true),
|
||||
// descriptors without wildcards can only have one index.
|
||||
@ -538,19 +661,28 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
Some(index) if index == BIP32_MAX_INDEX => (index, false),
|
||||
// get the next derivation index.
|
||||
Some(index) => (index + 1, true),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the last derivation index that is revealed for each keychain.
|
||||
///
|
||||
/// Keychains with no revealed indices will not be included in the returned [`BTreeMap`].
|
||||
pub fn last_revealed_indices(&self) -> &BTreeMap<K, u32> {
|
||||
&self.last_revealed
|
||||
pub fn last_revealed_indices(&self) -> BTreeMap<K, u32> {
|
||||
self.last_revealed
|
||||
.iter()
|
||||
.filter_map(|(descriptor_id, index)| {
|
||||
self.descriptor_ids_to_keychain
|
||||
.get(descriptor_id)
|
||||
.map(|(k, _)| (k.clone(), *index))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get the last derivation index revealed for `keychain`.
|
||||
/// Get the last derivation index revealed for `keychain`. Returns None if the keychain doesn't
|
||||
/// exist, or if the keychain doesn't have any revealed scripts.
|
||||
pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
|
||||
self.last_revealed.get(keychain).cloned()
|
||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||
self.last_revealed.get(&descriptor_id).cloned()
|
||||
}
|
||||
|
||||
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
||||
@ -565,16 +697,77 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
let mut spks = BTreeMap::new();
|
||||
|
||||
for (keychain, &index) in keychains {
|
||||
let (new_spks, new_changeset) = self.reveal_to_target(keychain, index);
|
||||
if !new_changeset.is_empty() {
|
||||
spks.insert(keychain.clone(), new_spks);
|
||||
changeset.append(new_changeset.clone());
|
||||
if let Some((new_spks, new_changeset)) = self.reveal_to_target(keychain, index) {
|
||||
if !new_changeset.is_empty() {
|
||||
spks.insert(keychain.clone(), new_spks);
|
||||
changeset.append(new_changeset.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(spks, changeset)
|
||||
}
|
||||
|
||||
/// Convenience method to call `reveal_to_target` with a descriptor_id instead of a keychain.
|
||||
/// This is useful for revealing spks of descriptors for which we don't have a keychain
|
||||
/// tracked.
|
||||
/// Refer to the `reveal_to_target` documentation for more.
|
||||
///
|
||||
/// Returns None if the provided `descriptor_id` doesn't correspond to a tracked descriptor.
|
||||
fn reveal_to_target_with_id(
|
||||
&mut self,
|
||||
descriptor_id: DescriptorId,
|
||||
target_index: u32,
|
||||
) -> Option<(
|
||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
||||
super::ChangeSet<K>,
|
||||
)> {
|
||||
let descriptor = self
|
||||
.descriptor_ids_to_descriptors
|
||||
.get(&descriptor_id)?
|
||||
.clone();
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
|
||||
let target_index = if has_wildcard { target_index } else { 0 };
|
||||
let next_reveal_index = self
|
||||
.last_revealed
|
||||
.get(&descriptor_id)
|
||||
.map_or(0, |index| *index + 1);
|
||||
|
||||
debug_assert!(next_reveal_index + self.lookahead >= self.next_store_index(descriptor_id));
|
||||
|
||||
// If the target_index is already revealed, we are done
|
||||
if next_reveal_index > target_index {
|
||||
return Some((
|
||||
SpkIterator::new_with_range(descriptor, next_reveal_index..next_reveal_index),
|
||||
super::ChangeSet::default(),
|
||||
));
|
||||
}
|
||||
|
||||
// We range over the indexes that are not stored and insert their spks in the index.
|
||||
// Indexes from next_reveal_index to next_reveal_index + lookahead are already stored (due
|
||||
// to lookahead), so we only range from next_reveal_index + lookahead to target + lookahead
|
||||
let range = next_reveal_index + self.lookahead..=target_index + self.lookahead;
|
||||
for (new_index, new_spk) in SpkIterator::new_with_range(descriptor.clone(), range) {
|
||||
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
||||
debug_assert!(_inserted, "must not have existing spk");
|
||||
debug_assert!(
|
||||
has_wildcard || new_index == 0,
|
||||
"non-wildcard descriptors must not iterate past index 0"
|
||||
);
|
||||
}
|
||||
|
||||
let _old_index = self.last_revealed.insert(descriptor_id, target_index);
|
||||
debug_assert!(_old_index < Some(target_index));
|
||||
Some((
|
||||
SpkIterator::new_with_range(descriptor, next_reveal_index..target_index + 1),
|
||||
super::ChangeSet {
|
||||
keychains_added: BTreeMap::new(),
|
||||
last_revealed: core::iter::once((descriptor_id, target_index)).collect(),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
||||
/// `target_index`.
|
||||
///
|
||||
@ -586,84 +779,46 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// [`super::ChangeSet`], which reports updates to the latest revealed index. If no new script
|
||||
/// pubkeys are revealed, then both of these will be empty.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `keychain` does not exist.
|
||||
/// Returns None if the provided `keychain` doesn't exist.
|
||||
pub fn reveal_to_target(
|
||||
&mut self,
|
||||
keychain: &K,
|
||||
target_index: u32,
|
||||
) -> (
|
||||
) -> Option<(
|
||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
||||
super::ChangeSet<K>,
|
||||
) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
|
||||
let target_index = if has_wildcard { target_index } else { 0 };
|
||||
let next_reveal_index = self
|
||||
.last_revealed
|
||||
.get(keychain)
|
||||
.map_or(0, |index| *index + 1);
|
||||
|
||||
debug_assert!(next_reveal_index + self.lookahead >= self.next_store_index(keychain));
|
||||
|
||||
// If the target_index is already revealed, we are done
|
||||
if next_reveal_index > target_index {
|
||||
return (
|
||||
SpkIterator::new_with_range(
|
||||
descriptor.clone(),
|
||||
next_reveal_index..next_reveal_index,
|
||||
),
|
||||
super::ChangeSet::default(),
|
||||
);
|
||||
}
|
||||
|
||||
// We range over the indexes that are not stored and insert their spks in the index.
|
||||
// Indexes from next_reveal_index to next_reveal_index + lookahead are already stored (due
|
||||
// to lookahead), so we only range from next_reveal_index + lookahead to target + lookahead
|
||||
let range = next_reveal_index + self.lookahead..=target_index + self.lookahead;
|
||||
for (new_index, new_spk) in SpkIterator::new_with_range(descriptor, range) {
|
||||
let _inserted = self
|
||||
.inner
|
||||
.insert_spk((keychain.clone(), new_index), new_spk);
|
||||
debug_assert!(_inserted, "must not have existing spk");
|
||||
debug_assert!(
|
||||
has_wildcard || new_index == 0,
|
||||
"non-wildcard descriptors must not iterate past index 0"
|
||||
);
|
||||
}
|
||||
|
||||
let _old_index = self.last_revealed.insert(keychain.clone(), target_index);
|
||||
debug_assert!(_old_index < Some(target_index));
|
||||
(
|
||||
SpkIterator::new_with_range(descriptor.clone(), next_reveal_index..target_index + 1),
|
||||
super::ChangeSet(core::iter::once((keychain.clone(), target_index)).collect()),
|
||||
)
|
||||
)> {
|
||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||
self.reveal_to_target_with_id(descriptor_id, target_index)
|
||||
}
|
||||
|
||||
/// Attempts to reveal the next script pubkey for `keychain`.
|
||||
///
|
||||
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
||||
/// [`super::ChangeSet`] which represents changes in the last revealed index (if any).
|
||||
/// Returns None if the provided keychain doesn't exist.
|
||||
///
|
||||
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
||||
/// [`super::ChangeSet`]. There are two scenarios when a new script pubkey cannot be derived:
|
||||
///
|
||||
/// 1. The descriptor has no wildcard and already has one script revealed.
|
||||
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the `keychain` does not exist.
|
||||
pub fn reveal_next_spk(&mut self, keychain: &K) -> ((u32, &Script), super::ChangeSet<K>) {
|
||||
let (next_index, _) = self.next_index(keychain);
|
||||
let changeset = self.reveal_to_target(keychain, next_index).1;
|
||||
/// 3. There is no descriptor associated with the given keychain.
|
||||
pub fn reveal_next_spk(
|
||||
&mut self,
|
||||
keychain: &K,
|
||||
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||
let (next_index, _) = self.next_index(keychain).expect("We know keychain exists");
|
||||
let changeset = self
|
||||
.reveal_to_target(keychain, next_index)
|
||||
.expect("We know keychain exists")
|
||||
.1;
|
||||
let script = self
|
||||
.inner
|
||||
.spk_at_index(&(keychain.clone(), next_index))
|
||||
.spk_at_index(&(descriptor_id, next_index))
|
||||
.expect("script must already be stored");
|
||||
((next_index, script), changeset)
|
||||
Some(((next_index, script), changeset))
|
||||
}
|
||||
|
||||
/// Gets the next unused script pubkey in the keychain. I.e., the script pubkey with the lowest
|
||||
@ -675,21 +830,22 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// has used all scripts up to the derivation bounds, then the last derived script pubkey will be
|
||||
/// returned.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `keychain` has never been added to the index
|
||||
pub fn next_unused_spk(&mut self, keychain: &K) -> ((u32, &Script), super::ChangeSet<K>) {
|
||||
/// Returns None if the provided keychain doesn't exist.
|
||||
pub fn next_unused_spk(
|
||||
&mut self,
|
||||
keychain: &K,
|
||||
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
||||
let need_new = self.unused_keychain_spks(keychain).next().is_none();
|
||||
// this rather strange branch is needed because of some lifetime issues
|
||||
if need_new {
|
||||
self.reveal_next_spk(keychain)
|
||||
} else {
|
||||
(
|
||||
Some((
|
||||
self.unused_keychain_spks(keychain)
|
||||
.next()
|
||||
.expect("we already know next exists"),
|
||||
super::ChangeSet::default(),
|
||||
)
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@ -708,21 +864,40 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
&'a self,
|
||||
range: impl RangeBounds<K> + 'a,
|
||||
) -> impl DoubleEndedIterator<Item = (&'a K, u32, OutPoint)> + 'a {
|
||||
let bounds = Self::map_to_inner_bounds(range);
|
||||
let bounds = self.map_to_inner_bounds(range);
|
||||
self.inner
|
||||
.outputs_in_range(bounds)
|
||||
.map(move |((keychain, i), op)| (keychain, *i, op))
|
||||
.map(move |((descriptor_id, i), op)| {
|
||||
(
|
||||
&self
|
||||
.descriptor_ids_to_keychain
|
||||
.get(descriptor_id)
|
||||
.expect("must be here")
|
||||
.0,
|
||||
*i,
|
||||
op,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn map_to_inner_bounds(bound: impl RangeBounds<K>) -> impl RangeBounds<(K, u32)> {
|
||||
fn map_to_inner_bounds(
|
||||
&self,
|
||||
bound: impl RangeBounds<K>,
|
||||
) -> impl RangeBounds<(DescriptorId, u32)> {
|
||||
let get_desc_id = |keychain| {
|
||||
self.keychains_to_descriptors
|
||||
.get(keychain)
|
||||
.map(|(desc_id, _)| *desc_id)
|
||||
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]))
|
||||
};
|
||||
let start = match bound.start_bound() {
|
||||
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MIN)),
|
||||
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MAX)),
|
||||
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MIN)),
|
||||
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MAX)),
|
||||
Bound::Unbounded => Bound::Unbounded,
|
||||
};
|
||||
let end = match bound.end_bound() {
|
||||
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MAX)),
|
||||
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MIN)),
|
||||
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MAX)),
|
||||
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MIN)),
|
||||
Bound::Unbounded => Bound::Unbounded,
|
||||
};
|
||||
|
||||
@ -738,7 +913,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
||||
/// a [`TxOut`] with it's script pubkey.
|
||||
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
||||
self.keychains
|
||||
self.keychains_to_descriptors
|
||||
.iter()
|
||||
.filter_map(|(keychain, _)| {
|
||||
self.last_used_index(keychain)
|
||||
@ -747,9 +922,28 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Applies the derivation changeset to the [`KeychainTxOutIndex`], extending the number of
|
||||
/// derived scripts per keychain, as specified in the `changeset`.
|
||||
/// Applies the derivation changeset to the [`KeychainTxOutIndex`], as specified in the
|
||||
/// [`ChangeSet::append`] documentation:
|
||||
/// - Extends the number of derived scripts per keychain
|
||||
/// - Adds new descriptors introduced
|
||||
/// - If a descriptor is introduced for a keychain that already had a descriptor, overwrites
|
||||
/// the old descriptor
|
||||
pub fn apply_changeset(&mut self, changeset: super::ChangeSet<K>) {
|
||||
let _ = self.reveal_to_target_multi(&changeset.0);
|
||||
let ChangeSet {
|
||||
keychains_added,
|
||||
last_revealed,
|
||||
} = changeset;
|
||||
for (keychain, descriptor) in keychains_added {
|
||||
let _ = self.insert_descriptor(keychain, descriptor);
|
||||
}
|
||||
let last_revealed = last_revealed
|
||||
.into_iter()
|
||||
.filter_map(|(descriptor_id, index)| {
|
||||
self.descriptor_ids_to_keychain
|
||||
.get(&descriptor_id)
|
||||
.map(|(k, _)| (k.clone(), index))
|
||||
})
|
||||
.collect();
|
||||
let _ = self.reveal_to_target_multi(&last_revealed);
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ pub use miniscript;
|
||||
#[cfg(feature = "miniscript")]
|
||||
mod descriptor_ext;
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub use descriptor_ext::DescriptorExt;
|
||||
pub use descriptor_ext::{DescriptorExt, DescriptorId};
|
||||
#[cfg(feature = "miniscript")]
|
||||
mod spk_iter;
|
||||
#[cfg(feature = "miniscript")]
|
||||
|
@ -158,8 +158,8 @@ mod test {
|
||||
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||
|
||||
txout_index.add_keychain(TestKeychain::External, external_descriptor.clone());
|
||||
txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone());
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::External, external_descriptor.clone());
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::Internal, internal_descriptor.clone());
|
||||
|
||||
(txout_index, external_descriptor, internal_descriptor)
|
||||
}
|
||||
|
@ -75,10 +75,13 @@ pub fn new_tx(lt: u32) -> bitcoin::Transaction {
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub const DESCRIPTORS: [&str; 5] = [
|
||||
pub const DESCRIPTORS: [&str; 7] = [
|
||||
"tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)",
|
||||
"tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)",
|
||||
"wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0/*)",
|
||||
"tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/0/*)",
|
||||
"tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/1/*)",
|
||||
"wpkh(xprv9s21ZrQH143K4EXURwMHuLS469fFzZyXk7UUpdKfQwhoHcAiYTakpe8pMU2RiEdvrU9McyuE7YDoKcXkoAwEGoK53WBDnKKv2zZbb9BzttX/1/0/*)",
|
||||
// non-wildcard
|
||||
"wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)",
|
||||
];
|
||||
|
@ -52,7 +52,8 @@ impl TxOutTemplate {
|
||||
pub fn init_graph<'a, A: Anchor + Clone + 'a>(
|
||||
tx_templates: impl IntoIterator<Item = &'a TxTemplate<'a, A>>,
|
||||
) -> (TxGraph<A>, SpkTxOutIndex<u32>, HashMap<&'a str, Txid>) {
|
||||
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), super::DESCRIPTORS[2]).unwrap();
|
||||
let (descriptor, _) =
|
||||
Descriptor::parse_descriptor(&Secp256k1::signing_only(), super::DESCRIPTORS[2]).unwrap();
|
||||
let mut graph = TxGraph::<A>::default();
|
||||
let mut spk_index = SpkTxOutIndex::default();
|
||||
(0..10).for_each(|index| {
|
||||
|
@ -3,11 +3,12 @@ mod common;
|
||||
|
||||
use std::{collections::BTreeSet, sync::Arc};
|
||||
|
||||
use crate::common::DESCRIPTORS;
|
||||
use bdk_chain::{
|
||||
indexed_tx_graph::{self, IndexedTxGraph},
|
||||
keychain::{self, Balance, KeychainTxOutIndex},
|
||||
local_chain::LocalChain,
|
||||
tx_graph, ChainPosition, ConfirmationHeightAnchor,
|
||||
tx_graph, ChainPosition, ConfirmationHeightAnchor, DescriptorExt,
|
||||
};
|
||||
use bitcoin::{
|
||||
secp256k1::Secp256k1, Amount, OutPoint, Script, ScriptBuf, Transaction, TxIn, TxOut,
|
||||
@ -23,16 +24,15 @@ use miniscript::Descriptor;
|
||||
/// agnostic.
|
||||
#[test]
|
||||
fn insert_relevant_txs() {
|
||||
let (descriptor, _) =
|
||||
Descriptor::parse_descriptor(&Secp256k1::signing_only(), common::DESCRIPTORS[0])
|
||||
.expect("must be valid");
|
||||
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), DESCRIPTORS[0])
|
||||
.expect("must be valid");
|
||||
let spk_0 = descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
||||
let spk_1 = descriptor.at_derivation_index(9).unwrap().script_pubkey();
|
||||
|
||||
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<()>>::new(
|
||||
KeychainTxOutIndex::new(10),
|
||||
);
|
||||
graph.index.add_keychain((), descriptor);
|
||||
let _ = graph.index.insert_descriptor((), descriptor.clone());
|
||||
|
||||
let tx_a = Transaction {
|
||||
output: vec![
|
||||
@ -71,7 +71,10 @@ fn insert_relevant_txs() {
|
||||
txs: txs.iter().cloned().map(Arc::new).collect(),
|
||||
..Default::default()
|
||||
},
|
||||
indexer: keychain::ChangeSet([((), 9_u32)].into()),
|
||||
indexer: keychain::ChangeSet {
|
||||
last_revealed: [(descriptor.descriptor_id(), 9_u32)].into(),
|
||||
keychains_added: [].into(),
|
||||
},
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
@ -79,7 +82,16 @@ fn insert_relevant_txs() {
|
||||
changeset,
|
||||
);
|
||||
|
||||
assert_eq!(graph.initial_changeset(), changeset,);
|
||||
// The initial changeset will also contain info about the keychain we added
|
||||
let initial_changeset = indexed_tx_graph::ChangeSet {
|
||||
graph: changeset.graph,
|
||||
indexer: keychain::ChangeSet {
|
||||
last_revealed: changeset.indexer.last_revealed,
|
||||
keychains_added: [((), descriptor)].into(),
|
||||
},
|
||||
};
|
||||
|
||||
assert_eq!(graph.initial_changeset(), initial_changeset);
|
||||
}
|
||||
|
||||
/// Ensure consistency IndexedTxGraph list_* and balance methods. These methods lists
|
||||
@ -126,8 +138,8 @@ fn test_list_owned_txouts() {
|
||||
KeychainTxOutIndex::new(10),
|
||||
);
|
||||
|
||||
graph.index.add_keychain("keychain_1".into(), desc_1);
|
||||
graph.index.add_keychain("keychain_2".into(), desc_2);
|
||||
let _ = graph.index.insert_descriptor("keychain_1".into(), desc_1);
|
||||
let _ = graph.index.insert_descriptor("keychain_2".into(), desc_2);
|
||||
|
||||
// Get trusted and untrusted addresses
|
||||
|
||||
@ -137,14 +149,20 @@ fn test_list_owned_txouts() {
|
||||
{
|
||||
// we need to scope here to take immutanble reference of the graph
|
||||
for _ in 0..10 {
|
||||
let ((_, script), _) = graph.index.reveal_next_spk(&"keychain_1".to_string());
|
||||
let ((_, script), _) = graph
|
||||
.index
|
||||
.reveal_next_spk(&"keychain_1".to_string())
|
||||
.unwrap();
|
||||
// TODO Assert indexes
|
||||
trusted_spks.push(script.to_owned());
|
||||
}
|
||||
}
|
||||
{
|
||||
for _ in 0..10 {
|
||||
let ((_, script), _) = graph.index.reveal_next_spk(&"keychain_2".to_string());
|
||||
let ((_, script), _) = graph
|
||||
.index
|
||||
.reveal_next_spk(&"keychain_2".to_string())
|
||||
.unwrap();
|
||||
untrusted_spks.push(script.to_owned());
|
||||
}
|
||||
}
|
||||
@ -237,26 +255,18 @@ fn test_list_owned_txouts() {
|
||||
.unwrap_or_else(|| panic!("block must exist at {}", height));
|
||||
let txouts = graph
|
||||
.graph()
|
||||
.filter_chain_txouts(
|
||||
&local_chain,
|
||||
chain_tip,
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
)
|
||||
.filter_chain_txouts(&local_chain, chain_tip, graph.index.outpoints())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let utxos = graph
|
||||
.graph()
|
||||
.filter_chain_unspents(
|
||||
&local_chain,
|
||||
chain_tip,
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
)
|
||||
.filter_chain_unspents(&local_chain, chain_tip, graph.index.outpoints())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let balance = graph.graph().balance(
|
||||
&local_chain,
|
||||
chain_tip,
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
graph.index.outpoints(),
|
||||
|_, spk: &Script| trusted_spks.contains(&spk.to_owned()),
|
||||
);
|
||||
|
||||
|
@ -6,35 +6,38 @@ use bdk_chain::{
|
||||
collections::BTreeMap,
|
||||
indexed_tx_graph::Indexer,
|
||||
keychain::{self, ChangeSet, KeychainTxOutIndex},
|
||||
Append,
|
||||
Append, DescriptorExt, DescriptorId,
|
||||
};
|
||||
|
||||
use bitcoin::{secp256k1::Secp256k1, Amount, OutPoint, ScriptBuf, Transaction, TxOut};
|
||||
use miniscript::{Descriptor, DescriptorPublicKey};
|
||||
|
||||
use crate::common::DESCRIPTORS;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
||||
enum TestKeychain {
|
||||
External,
|
||||
Internal,
|
||||
}
|
||||
|
||||
fn parse_descriptor(descriptor: &str) -> Descriptor<DescriptorPublicKey> {
|
||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, descriptor)
|
||||
.unwrap()
|
||||
.0
|
||||
}
|
||||
|
||||
fn init_txout_index(
|
||||
external_descriptor: Descriptor<DescriptorPublicKey>,
|
||||
internal_descriptor: Descriptor<DescriptorPublicKey>,
|
||||
lookahead: u32,
|
||||
) -> (
|
||||
bdk_chain::keychain::KeychainTxOutIndex<TestKeychain>,
|
||||
Descriptor<DescriptorPublicKey>,
|
||||
Descriptor<DescriptorPublicKey>,
|
||||
) {
|
||||
) -> bdk_chain::keychain::KeychainTxOutIndex<TestKeychain> {
|
||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<TestKeychain>::new(lookahead);
|
||||
|
||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::External, external_descriptor);
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::Internal, internal_descriptor);
|
||||
|
||||
txout_index.add_keychain(TestKeychain::External, external_descriptor.clone());
|
||||
txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone());
|
||||
|
||||
(txout_index, external_descriptor, internal_descriptor)
|
||||
txout_index
|
||||
}
|
||||
|
||||
fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> ScriptBuf {
|
||||
@ -44,61 +47,136 @@ fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> Scr
|
||||
.script_pubkey()
|
||||
}
|
||||
|
||||
// We create two empty changesets lhs and rhs, we then insert various descriptors with various
|
||||
// last_revealed, append rhs to lhs, and check that the result is consistent with these rules:
|
||||
// - Existing index doesn't update if the new index in `other` is lower than `self`.
|
||||
// - Existing index updates if the new index in `other` is higher than `self`.
|
||||
// - Existing index is unchanged if keychain doesn't exist in `other`.
|
||||
// - New keychain gets added if the keychain is in `other` but not in `self`.
|
||||
#[test]
|
||||
fn append_keychain_derivation_indices() {
|
||||
#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)]
|
||||
enum Keychain {
|
||||
One,
|
||||
Two,
|
||||
Three,
|
||||
Four,
|
||||
}
|
||||
let mut lhs_di = BTreeMap::<Keychain, u32>::default();
|
||||
let mut rhs_di = BTreeMap::<Keychain, u32>::default();
|
||||
lhs_di.insert(Keychain::One, 7);
|
||||
lhs_di.insert(Keychain::Two, 0);
|
||||
rhs_di.insert(Keychain::One, 3);
|
||||
rhs_di.insert(Keychain::Two, 5);
|
||||
lhs_di.insert(Keychain::Three, 3);
|
||||
rhs_di.insert(Keychain::Four, 4);
|
||||
fn append_changesets_check_last_revealed() {
|
||||
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let descriptor_ids: Vec<_> = DESCRIPTORS
|
||||
.iter()
|
||||
.take(4)
|
||||
.map(|d| {
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, d)
|
||||
.unwrap()
|
||||
.0
|
||||
.descriptor_id()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut lhs = ChangeSet(lhs_di);
|
||||
let rhs = ChangeSet(rhs_di);
|
||||
let mut lhs_di = BTreeMap::<DescriptorId, u32>::default();
|
||||
let mut rhs_di = BTreeMap::<DescriptorId, u32>::default();
|
||||
lhs_di.insert(descriptor_ids[0], 7);
|
||||
lhs_di.insert(descriptor_ids[1], 0);
|
||||
lhs_di.insert(descriptor_ids[2], 3);
|
||||
|
||||
rhs_di.insert(descriptor_ids[0], 3); // value less than lhs desc 0
|
||||
rhs_di.insert(descriptor_ids[1], 5); // value more than lhs desc 1
|
||||
lhs_di.insert(descriptor_ids[3], 4); // key doesn't exist in lhs
|
||||
|
||||
let mut lhs = ChangeSet {
|
||||
keychains_added: BTreeMap::<(), _>::new(),
|
||||
last_revealed: lhs_di,
|
||||
};
|
||||
let rhs = ChangeSet {
|
||||
keychains_added: BTreeMap::<(), _>::new(),
|
||||
last_revealed: rhs_di,
|
||||
};
|
||||
lhs.append(rhs);
|
||||
|
||||
// Exiting index doesn't update if the new index in `other` is lower than `self`.
|
||||
assert_eq!(lhs.0.get(&Keychain::One), Some(&7));
|
||||
// Existing index doesn't update if the new index in `other` is lower than `self`.
|
||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[0]), Some(&7));
|
||||
// Existing index updates if the new index in `other` is higher than `self`.
|
||||
assert_eq!(lhs.0.get(&Keychain::Two), Some(&5));
|
||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[1]), Some(&5));
|
||||
// Existing index is unchanged if keychain doesn't exist in `other`.
|
||||
assert_eq!(lhs.0.get(&Keychain::Three), Some(&3));
|
||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[2]), Some(&3));
|
||||
// New keychain gets added if the keychain is in `other` but not in `self`.
|
||||
assert_eq!(lhs.0.get(&Keychain::Four), Some(&4));
|
||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[3]), Some(&4));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_changeset_with_different_descriptors_to_same_keychain() {
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index =
|
||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||
assert_eq!(
|
||||
txout_index.keychains().collect::<Vec<_>>(),
|
||||
vec![
|
||||
(&TestKeychain::External, &external_descriptor),
|
||||
(&TestKeychain::Internal, &internal_descriptor)
|
||||
]
|
||||
);
|
||||
|
||||
let changeset = ChangeSet {
|
||||
keychains_added: [(TestKeychain::External, internal_descriptor.clone())].into(),
|
||||
last_revealed: [].into(),
|
||||
};
|
||||
txout_index.apply_changeset(changeset);
|
||||
|
||||
assert_eq!(
|
||||
txout_index.keychains().collect::<Vec<_>>(),
|
||||
vec![
|
||||
(&TestKeychain::External, &internal_descriptor),
|
||||
(&TestKeychain::Internal, &internal_descriptor)
|
||||
]
|
||||
);
|
||||
|
||||
let changeset = ChangeSet {
|
||||
keychains_added: [(TestKeychain::Internal, external_descriptor.clone())].into(),
|
||||
last_revealed: [].into(),
|
||||
};
|
||||
txout_index.apply_changeset(changeset);
|
||||
|
||||
assert_eq!(
|
||||
txout_index.keychains().collect::<Vec<_>>(),
|
||||
vec![
|
||||
(&TestKeychain::External, &internal_descriptor),
|
||||
(&TestKeychain::Internal, &external_descriptor)
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_all_derivation_indices() {
|
||||
use bdk_chain::indexed_tx_graph::Indexer;
|
||||
|
||||
let (mut txout_index, _, _) = init_txout_index(0);
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index =
|
||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||
let derive_to: BTreeMap<_, _> =
|
||||
[(TestKeychain::External, 12), (TestKeychain::Internal, 24)].into();
|
||||
let last_revealed: BTreeMap<_, _> = [
|
||||
(external_descriptor.descriptor_id(), 12),
|
||||
(internal_descriptor.descriptor_id(), 24),
|
||||
]
|
||||
.into();
|
||||
assert_eq!(
|
||||
txout_index.reveal_to_target_multi(&derive_to).1.as_inner(),
|
||||
&derive_to
|
||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
||||
ChangeSet {
|
||||
keychains_added: BTreeMap::new(),
|
||||
last_revealed: last_revealed.clone()
|
||||
}
|
||||
);
|
||||
assert_eq!(txout_index.last_revealed_indices(), &derive_to);
|
||||
assert_eq!(txout_index.last_revealed_indices(), derive_to);
|
||||
assert_eq!(
|
||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
||||
keychain::ChangeSet::default(),
|
||||
"no changes if we set to the same thing"
|
||||
);
|
||||
assert_eq!(txout_index.initial_changeset().as_inner(), &derive_to);
|
||||
assert_eq!(txout_index.initial_changeset().last_revealed, last_revealed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lookahead() {
|
||||
let (mut txout_index, external_desc, internal_desc) = init_txout_index(10);
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index =
|
||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 10);
|
||||
|
||||
// given:
|
||||
// - external lookahead set to 10
|
||||
@ -108,15 +186,16 @@ fn test_lookahead() {
|
||||
// - scripts cached in spk_txout_index should increase correctly
|
||||
// - stored scripts of external keychain should be of expected counts
|
||||
for index in (0..20).skip_while(|i| i % 2 == 1) {
|
||||
let (revealed_spks, revealed_changeset) =
|
||||
txout_index.reveal_to_target(&TestKeychain::External, index);
|
||||
let (revealed_spks, revealed_changeset) = txout_index
|
||||
.reveal_to_target(&TestKeychain::External, index)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
revealed_spks.collect::<Vec<_>>(),
|
||||
vec![(index, spk_at_index(&external_desc, index))],
|
||||
vec![(index, spk_at_index(&external_descriptor, index))],
|
||||
);
|
||||
assert_eq!(
|
||||
revealed_changeset.as_inner(),
|
||||
&[(TestKeychain::External, index)].into()
|
||||
&revealed_changeset.last_revealed,
|
||||
&[(external_descriptor.descriptor_id(), index)].into()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
@ -158,17 +237,18 @@ fn test_lookahead() {
|
||||
// - derivation index is set ahead of current derivation index + lookahead
|
||||
// expect:
|
||||
// - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped
|
||||
let (revealed_spks, revealed_changeset) =
|
||||
txout_index.reveal_to_target(&TestKeychain::Internal, 24);
|
||||
let (revealed_spks, revealed_changeset) = txout_index
|
||||
.reveal_to_target(&TestKeychain::Internal, 24)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
revealed_spks.collect::<Vec<_>>(),
|
||||
(0..=24)
|
||||
.map(|index| (index, spk_at_index(&internal_desc, index)))
|
||||
.map(|index| (index, spk_at_index(&internal_descriptor, index)))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(
|
||||
revealed_changeset.as_inner(),
|
||||
&[(TestKeychain::Internal, 24)].into()
|
||||
&revealed_changeset.last_revealed,
|
||||
&[(internal_descriptor.descriptor_id(), 24)].into()
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.inner().all_spks().len(),
|
||||
@ -204,14 +284,14 @@ fn test_lookahead() {
|
||||
let tx = Transaction {
|
||||
output: vec![
|
||||
TxOut {
|
||||
script_pubkey: external_desc
|
||||
script_pubkey: external_descriptor
|
||||
.at_derivation_index(external_index)
|
||||
.unwrap()
|
||||
.script_pubkey(),
|
||||
value: Amount::from_sat(10_000),
|
||||
},
|
||||
TxOut {
|
||||
script_pubkey: internal_desc
|
||||
script_pubkey: internal_descriptor
|
||||
.at_derivation_index(internal_index)
|
||||
.unwrap()
|
||||
.script_pubkey(),
|
||||
@ -251,14 +331,17 @@ fn test_lookahead() {
|
||||
// - last used index should change as expected
|
||||
#[test]
|
||||
fn test_scan_with_lookahead() {
|
||||
let (mut txout_index, external_desc, _) = init_txout_index(10);
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index =
|
||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 10);
|
||||
|
||||
let spks: BTreeMap<u32, ScriptBuf> = [0, 10, 20, 30]
|
||||
.into_iter()
|
||||
.map(|i| {
|
||||
(
|
||||
i,
|
||||
external_desc
|
||||
external_descriptor
|
||||
.at_derivation_index(i)
|
||||
.unwrap()
|
||||
.script_pubkey(),
|
||||
@ -275,8 +358,8 @@ fn test_scan_with_lookahead() {
|
||||
|
||||
let changeset = txout_index.index_txout(op, &txout);
|
||||
assert_eq!(
|
||||
changeset.as_inner(),
|
||||
&[(TestKeychain::External, spk_i)].into()
|
||||
&changeset.last_revealed,
|
||||
&[(external_descriptor.descriptor_id(), spk_i)].into()
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.last_revealed_index(&TestKeychain::External),
|
||||
@ -289,7 +372,7 @@ fn test_scan_with_lookahead() {
|
||||
}
|
||||
|
||||
// now try with index 41 (lookahead surpassed), we expect that the txout to not be indexed
|
||||
let spk_41 = external_desc
|
||||
let spk_41 = external_descriptor
|
||||
.at_derivation_index(41)
|
||||
.unwrap()
|
||||
.script_pubkey();
|
||||
@ -305,11 +388,13 @@ fn test_scan_with_lookahead() {
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn test_wildcard_derivations() {
|
||||
let (mut txout_index, external_desc, _) = init_txout_index(0);
|
||||
let external_spk_0 = external_desc.at_derivation_index(0).unwrap().script_pubkey();
|
||||
let external_spk_16 = external_desc.at_derivation_index(16).unwrap().script_pubkey();
|
||||
let external_spk_26 = external_desc.at_derivation_index(26).unwrap().script_pubkey();
|
||||
let external_spk_27 = external_desc.at_derivation_index(27).unwrap().script_pubkey();
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index = init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||
let external_spk_0 = external_descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
||||
let external_spk_16 = external_descriptor.at_derivation_index(16).unwrap().script_pubkey();
|
||||
let external_spk_26 = external_descriptor.at_derivation_index(26).unwrap().script_pubkey();
|
||||
let external_spk_27 = external_descriptor.at_derivation_index(27).unwrap().script_pubkey();
|
||||
|
||||
// - nothing is derived
|
||||
// - unused list is also empty
|
||||
@ -317,13 +402,13 @@ fn test_wildcard_derivations() {
|
||||
// - next_derivation_index() == (0, true)
|
||||
// - derive_new() == ((0, <spk>), keychain::ChangeSet)
|
||||
// - next_unused() == ((0, <spk>), keychain::ChangeSet:is_empty())
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External).unwrap(), (0, true));
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 0)].into());
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
|
||||
// - derived till 25
|
||||
// - used all spks till 15.
|
||||
@ -339,16 +424,16 @@ fn test_wildcard_derivations() {
|
||||
.chain([17, 20, 23])
|
||||
.for_each(|index| assert!(txout_index.mark_used(TestKeychain::External, index)));
|
||||
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true));
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External).unwrap(), (26, true));
|
||||
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (26, external_spk_26.as_script()));
|
||||
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 26)].into());
|
||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 26)].into());
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (16, external_spk_16.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
|
||||
// - Use all the derived till 26.
|
||||
// - next_unused() = ((27, <spk>), keychain::ChangeSet)
|
||||
@ -356,9 +441,9 @@ fn test_wildcard_derivations() {
|
||||
txout_index.mark_used(TestKeychain::External, index);
|
||||
});
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (27, external_spk_27.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 27)].into());
|
||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 27)].into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -366,13 +451,14 @@ fn test_non_wildcard_derivations() {
|
||||
let mut txout_index = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||
|
||||
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let (no_wildcard_descriptor, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)").unwrap();
|
||||
let (no_wildcard_descriptor, _) =
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[6]).unwrap();
|
||||
let external_spk = no_wildcard_descriptor
|
||||
.at_derivation_index(0)
|
||||
.unwrap()
|
||||
.script_pubkey();
|
||||
|
||||
txout_index.add_keychain(TestKeychain::External, no_wildcard_descriptor);
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::External, no_wildcard_descriptor.clone());
|
||||
|
||||
// given:
|
||||
// - `txout_index` with no stored scripts
|
||||
@ -380,14 +466,24 @@ fn test_non_wildcard_derivations() {
|
||||
// - next derivation index should be new
|
||||
// - when we derive a new script, script @ index 0
|
||||
// - when we get the next unused script, script @ index 0
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
assert_eq!(
|
||||
txout_index.next_index(&TestKeychain::External).unwrap(),
|
||||
(0, true)
|
||||
);
|
||||
let (spk, changeset) = txout_index
|
||||
.reveal_next_spk(&TestKeychain::External)
|
||||
.unwrap();
|
||||
assert_eq!(spk, (0, external_spk.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
|
||||
assert_eq!(
|
||||
&changeset.last_revealed,
|
||||
&[(no_wildcard_descriptor.descriptor_id(), 0)].into()
|
||||
);
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index
|
||||
.next_unused_spk(&TestKeychain::External)
|
||||
.unwrap();
|
||||
assert_eq!(spk, (0, external_spk.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
|
||||
// given:
|
||||
// - the non-wildcard descriptor already has a stored and used script
|
||||
@ -395,18 +491,26 @@ fn test_non_wildcard_derivations() {
|
||||
// - next derivation index should not be new
|
||||
// - derive new and next unused should return the old script
|
||||
// - store_up_to should not panic and return empty changeset
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, false));
|
||||
assert_eq!(
|
||||
txout_index.next_index(&TestKeychain::External).unwrap(),
|
||||
(0, false)
|
||||
);
|
||||
txout_index.mark_used(TestKeychain::External, 0);
|
||||
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index
|
||||
.reveal_next_spk(&TestKeychain::External)
|
||||
.unwrap();
|
||||
assert_eq!(spk, (0, external_spk.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index
|
||||
.next_unused_spk(&TestKeychain::External)
|
||||
.unwrap();
|
||||
assert_eq!(spk, (0, external_spk.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
let (revealed_spks, revealed_changeset) =
|
||||
txout_index.reveal_to_target(&TestKeychain::External, 200);
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
let (revealed_spks, revealed_changeset) = txout_index
|
||||
.reveal_to_target(&TestKeychain::External, 200)
|
||||
.unwrap();
|
||||
assert_eq!(revealed_spks.count(), 0);
|
||||
assert!(revealed_changeset.is_empty());
|
||||
|
||||
@ -470,7 +574,13 @@ fn lookahead_to_target() {
|
||||
];
|
||||
|
||||
for t in test_cases {
|
||||
let (mut index, _, _) = init_txout_index(t.lookahead);
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut index = init_txout_index(
|
||||
external_descriptor.clone(),
|
||||
internal_descriptor.clone(),
|
||||
t.lookahead,
|
||||
);
|
||||
|
||||
if let Some(last_revealed) = t.external_last_revealed {
|
||||
let _ = index.reveal_to_target(&TestKeychain::External, last_revealed);
|
||||
@ -481,17 +591,19 @@ fn lookahead_to_target() {
|
||||
|
||||
let keychain_test_cases = [
|
||||
(
|
||||
external_descriptor.descriptor_id(),
|
||||
TestKeychain::External,
|
||||
t.external_last_revealed,
|
||||
t.external_target,
|
||||
),
|
||||
(
|
||||
internal_descriptor.descriptor_id(),
|
||||
TestKeychain::Internal,
|
||||
t.internal_last_revealed,
|
||||
t.internal_target,
|
||||
),
|
||||
];
|
||||
for (keychain, last_revealed, target) in keychain_test_cases {
|
||||
for (descriptor_id, keychain, last_revealed, target) in keychain_test_cases {
|
||||
if let Some(target) = target {
|
||||
let original_last_stored_index = match last_revealed {
|
||||
Some(last_revealed) => Some(last_revealed + t.lookahead),
|
||||
@ -507,10 +619,10 @@ fn lookahead_to_target() {
|
||||
let keys = index
|
||||
.inner()
|
||||
.all_spks()
|
||||
.range((keychain.clone(), 0)..=(keychain.clone(), u32::MAX))
|
||||
.map(|(k, _)| k.clone())
|
||||
.range((descriptor_id, 0)..=(descriptor_id, u32::MAX))
|
||||
.map(|(k, _)| *k)
|
||||
.collect::<Vec<_>>();
|
||||
let exp_keys = core::iter::repeat(keychain)
|
||||
let exp_keys = core::iter::repeat(descriptor_id)
|
||||
.zip(0_u32..=exp_last_stored_index)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(keys, exp_keys);
|
||||
@ -518,3 +630,67 @@ fn lookahead_to_target() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `::index_txout` should still index txouts with spks derived from descriptors without keychains.
|
||||
/// This includes properly refilling the lookahead for said descriptors.
|
||||
#[test]
|
||||
fn index_txout_after_changing_descriptor_under_keychain() {
|
||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let (desc_a, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0])
|
||||
.expect("descriptor 0 must be valid");
|
||||
let (desc_b, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[1])
|
||||
.expect("descriptor 1 must be valid");
|
||||
let desc_id_a = desc_a.descriptor_id();
|
||||
|
||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<()>::new(10);
|
||||
|
||||
// Introduce `desc_a` under keychain `()` and replace the descriptor.
|
||||
let _ = txout_index.insert_descriptor((), desc_a.clone());
|
||||
let _ = txout_index.insert_descriptor((), desc_b.clone());
|
||||
|
||||
// Loop through spks in intervals of `lookahead` to create outputs with. We should always be
|
||||
// able to index these outputs if `lookahead` is respected.
|
||||
let spk_indices = [9, 19, 29, 39];
|
||||
for i in spk_indices {
|
||||
let spk_at_index = desc_a
|
||||
.at_derivation_index(i)
|
||||
.expect("must derive")
|
||||
.script_pubkey();
|
||||
let index_changeset = txout_index.index_txout(
|
||||
// Use spk derivation index as vout as we just want an unique outpoint.
|
||||
OutPoint::new(h!("mock_tx"), i as _),
|
||||
&TxOut {
|
||||
value: Amount::from_sat(10_000),
|
||||
script_pubkey: spk_at_index,
|
||||
},
|
||||
);
|
||||
assert_eq!(
|
||||
index_changeset,
|
||||
bdk_chain::keychain::ChangeSet {
|
||||
keychains_added: BTreeMap::default(),
|
||||
last_revealed: [(desc_id_a, i)].into(),
|
||||
},
|
||||
"must always increase last active if impl respects lookahead"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_descriptor_no_change() {
|
||||
let secp = Secp256k1::signing_only();
|
||||
let (desc, _) =
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0]).unwrap();
|
||||
let mut txout_index = KeychainTxOutIndex::<()>::default();
|
||||
assert_eq!(
|
||||
txout_index.insert_descriptor((), desc.clone()),
|
||||
keychain::ChangeSet {
|
||||
keychains_added: [((), desc.clone())].into(),
|
||||
last_revealed: Default::default()
|
||||
},
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.insert_descriptor((), desc.clone()),
|
||||
keychain::ChangeSet::default(),
|
||||
"inserting the same descriptor for keychain should return an empty changeset",
|
||||
);
|
||||
}
|
||||
|
@ -212,7 +212,7 @@ fn main() -> anyhow::Result<()> {
|
||||
graph.graph().balance(
|
||||
&*chain,
|
||||
synced_to.block_id(),
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
graph.index.outpoints(),
|
||||
|(k, _), _| k == &Keychain::Internal,
|
||||
)
|
||||
};
|
||||
@ -336,7 +336,7 @@ fn main() -> anyhow::Result<()> {
|
||||
graph.graph().balance(
|
||||
&*chain,
|
||||
synced_to.block_id(),
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
graph.index.outpoints(),
|
||||
|(k, _), _| k == &Keychain::Internal,
|
||||
)
|
||||
};
|
||||
|
@ -249,14 +249,20 @@ where
|
||||
script_pubkey: address.script_pubkey(),
|
||||
}];
|
||||
|
||||
let internal_keychain = if graph.index.keychains().get(&Keychain::Internal).is_some() {
|
||||
let internal_keychain = if graph
|
||||
.index
|
||||
.keychains()
|
||||
.any(|(k, _)| *k == Keychain::Internal)
|
||||
{
|
||||
Keychain::Internal
|
||||
} else {
|
||||
Keychain::External
|
||||
};
|
||||
|
||||
let ((change_index, change_script), change_changeset) =
|
||||
graph.index.next_unused_spk(&internal_keychain);
|
||||
let ((change_index, change_script), change_changeset) = graph
|
||||
.index
|
||||
.next_unused_spk(&internal_keychain)
|
||||
.expect("Must exist");
|
||||
changeset.append(change_changeset);
|
||||
|
||||
// Clone to drop the immutable reference.
|
||||
@ -266,8 +272,9 @@ where
|
||||
&graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&internal_keychain)
|
||||
.find(|(k, _)| *k == &internal_keychain)
|
||||
.expect("must exist")
|
||||
.1
|
||||
.at_derivation_index(change_index)
|
||||
.expect("change_index can't be hardened"),
|
||||
&assets,
|
||||
@ -284,8 +291,9 @@ where
|
||||
min_drain_value: graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&internal_keychain)
|
||||
.find(|(k, _)| *k == &internal_keychain)
|
||||
.expect("must exist")
|
||||
.1
|
||||
.dust_value(),
|
||||
..CoinSelectorOpt::fund_outputs(
|
||||
&outputs,
|
||||
@ -416,7 +424,7 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
||||
assets: &bdk_tmp_plan::Assets<K>,
|
||||
) -> Result<Vec<PlannedUtxo<K, A>>, O::Error> {
|
||||
let chain_tip = chain.get_chain_tip()?;
|
||||
let outpoints = graph.index.outpoints().iter().cloned();
|
||||
let outpoints = graph.index.outpoints();
|
||||
graph
|
||||
.graph()
|
||||
.try_filter_chain_unspents(chain, chain_tip, outpoints)
|
||||
@ -428,8 +436,9 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
||||
let desc = graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&k)
|
||||
.find(|(keychain, _)| *keychain == &k)
|
||||
.expect("keychain must exist")
|
||||
.1
|
||||
.at_derivation_index(i)
|
||||
.expect("i can't be hardened");
|
||||
let plan = bdk_tmp_plan::plan_satisfaction(&desc, assets)?;
|
||||
@ -465,7 +474,8 @@ where
|
||||
_ => unreachable!("only these two variants exist in match arm"),
|
||||
};
|
||||
|
||||
let ((spk_i, spk), index_changeset) = spk_chooser(index, &Keychain::External);
|
||||
let ((spk_i, spk), index_changeset) =
|
||||
spk_chooser(index, &Keychain::External).expect("Must exist");
|
||||
let db = &mut *db.lock().unwrap();
|
||||
db.stage_and_commit(C::from((
|
||||
local_chain::ChangeSet::default(),
|
||||
@ -517,7 +527,7 @@ where
|
||||
let balance = graph.graph().try_balance(
|
||||
chain,
|
||||
chain.get_chain_tip()?,
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
graph.index.outpoints(),
|
||||
|(k, _), _| k == &Keychain::Internal,
|
||||
)?;
|
||||
|
||||
@ -547,7 +557,7 @@ where
|
||||
let graph = &*graph.lock().unwrap();
|
||||
let chain = &*chain.lock().unwrap();
|
||||
let chain_tip = chain.get_chain_tip()?;
|
||||
let outpoints = graph.index.outpoints().iter().cloned();
|
||||
let outpoints = graph.index.outpoints();
|
||||
|
||||
match txout_cmd {
|
||||
TxOutCmd::List {
|
||||
@ -695,9 +705,11 @@ where
|
||||
|
||||
let mut index = KeychainTxOutIndex::<Keychain>::default();
|
||||
|
||||
// TODO: descriptors are already stored in the db, so we shouldn't re-insert
|
||||
// them in the index here. However, the keymap is not stored in the database.
|
||||
let (descriptor, mut keymap) =
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &args.descriptor)?;
|
||||
index.add_keychain(Keychain::External, descriptor);
|
||||
let _ = index.insert_descriptor(Keychain::External, descriptor);
|
||||
|
||||
if let Some((internal_descriptor, internal_keymap)) = args
|
||||
.change_descriptor
|
||||
@ -706,7 +718,7 @@ where
|
||||
.transpose()?
|
||||
{
|
||||
keymap.extend(internal_keymap);
|
||||
index.add_keychain(Keychain::Internal, internal_descriptor);
|
||||
let _ = index.insert_descriptor(Keychain::Internal, internal_descriptor);
|
||||
}
|
||||
|
||||
let mut db_backend = match Store::<C>::open_or_create_new(db_magic, &args.db_path) {
|
||||
|
@ -238,7 +238,7 @@ fn main() -> anyhow::Result<()> {
|
||||
let mut outpoints: Box<dyn Iterator<Item = OutPoint>> = Box::new(core::iter::empty());
|
||||
|
||||
if utxos {
|
||||
let init_outpoints = graph.index.outpoints().iter().cloned();
|
||||
let init_outpoints = graph.index.outpoints();
|
||||
|
||||
let utxos = graph
|
||||
.graph()
|
||||
|
@ -277,7 +277,7 @@ fn main() -> anyhow::Result<()> {
|
||||
// We want to search for whether the UTXO is spent, and spent by which
|
||||
// transaction. We provide the outpoint of the UTXO to
|
||||
// `EsploraExt::update_tx_graph_without_keychain`.
|
||||
let init_outpoints = graph.index.outpoints().iter().cloned();
|
||||
let init_outpoints = graph.index.outpoints();
|
||||
let utxos = graph
|
||||
.graph()
|
||||
.filter_chain_unspents(&*chain, local_tip.block_id(), init_outpoints)
|
||||
|
Loading…
x
Reference in New Issue
Block a user