Merge bitcoindevkit/bdk#1203: Include the descriptor in keychain::Changeset
86711d4f46f467c651238ad3804fdbe1d22a8600 doc(chain): add section for non-recommended K to descriptor assignments (Daniela Brozzoni) de53d721913537f56281a134270eafd356f908ad test: Only the highest ord keychain is returned (Daniela Brozzoni) 9d8023bf56a693f1cb2ba340ed024c654307c069 fix(chain): introduce keychain-variant-ranking to `KeychainTxOutIndex` (志宇) 6c8748124fd40e0fee37f78ca30457441b13fbcb chore(chain): move `use` in `indexed_tx_graph.rs` so clippy is happy (志宇) 537aa03ae0f8bec4dc799d33738e9bb7977bdac1 chore(chain): update test so clippy does not complain (志宇) ed117de7a5b1756482b2e6487855b80e97c597ba test(chain): applying changesets one-by-one vs aggregate should be same (志宇) 6a3fb849e86e0bc21086519ae0201b95ddde5bf4 fix(chain): simplify `Append::append` impl for `keychain::ChangeSet` (志宇) 1d294b734dd6f4639075cba271e2b40f437f998f fix: Run tests only if the miniscript feature is.. ..enabled, enable it by default (Daniela Brozzoni) 0e3e136f6fa7215f6391dbcc1c4781262111ce64 doc(bdk): Add instructions for manually inserting... ...secret keys in the wallet in Wallet::load (Daniela Brozzoni) 76afccc555feff084867d6f9406e4e74bee938cc fix(wallet): add expected descriptors as signers after creating from wallet::ChangeSet (Steve Myers) 4f05441a00b921efd661da0dff94d9c28e38b70d keychain::ChangeSet includes the descriptor (Daniela Brozzoni) 8ff99f27dfe45643bf401409ca72429a9b812873 ref(chain): Define test descriptors, use them... ...everywhere (Daniela Brozzoni) b9902936a0d17498eec8866233d89b2882d0af8f ref(chain): move `keychain::ChangeSet` into `txout_index.rs` (志宇) Pull request description: Fixes #1101 - Moves keychain::ChangeSet inside `keychain/txout_index.rs` as now the `ChangeSet` depends on miniscript - Slightly cleans up tests by introducing some constant descriptors - The KeychainTxOutIndex's internal SpkIterator now uses DescriptorId instead of K. The DescriptorId -> K translation is made at the KeychainTxOutIndex level. - The keychain::Changeset is now a struct, which includes a map for last revealed indexes, and one for newly added keychains and their descriptor. ### Changelog notice API changes in bdk: - Wallet::keychains returns a `impl Iterator` instead of `BTreeMap` - Wallet::load doesn't take descriptors anymore, since they're stored in the db - Wallet::new_or_load checks if the loaded descriptor from db is the same as the provided one API changes in bdk_chain: - `ChangeSet` is now a struct, which includes a map for last revealed indexes, and one for keychains and descriptors. - `KeychainTxOutIndex::inner` returns a `SpkIterator<(DescriptorId, u32)>` - `KeychainTxOutIndex::outpoints` returns a `BTreeSet` instead of `&BTreeSet` - `KeychainTxOutIndex::keychains` returns a `impl Iterator` instead of `&BTreeMap` - `KeychainTxOutIndex::txouts` doesn't return a ExactSizeIterator anymore - `KeychainTxOutIndex::last_revealed_indices` returns a `BTreeMap` instead of `&BTreeMap` - `KeychainTxOutIndex::add_keychain` has been renamed to `KeychainTxOutIndex::insert_descriptor`, and now it returns a ChangeSet - `KeychainTxOutIndex::reveal_next_spk` returns Option - `KeychainTxOutIndex::next_unused_spk` returns Option - `KeychainTxOutIndex::unbounded_spk_iter` returns Option - `KeychainTxOutIndex::next_index` returns Option - `KeychainTxOutIndex::reveal_to_target` returns Option - `KeychainTxOutIndex::revealed_keychain_spks` returns Option - `KeychainTxOutIndex::unused_keychain_spks` returns Option - `KeychainTxOutIndex::last_revealed_index` returns Option - `KeychainTxOutIndex::keychain_outpoints` returns Option - `KeychainTxOutIndex::keychain_outpoints_in_range` returns Option - `KeychainTxOutIndex::last_used_index` returns None if the keychain has never been used, or if it doesn't exist ### Checklists #### All Submissions: * [x] I've signed all my commits * [x] I followed the [contribution guidelines](https://github.com/bitcoindevkit/bdk/blob/master/CONTRIBUTING.md) * [x] I ran `cargo fmt` and `cargo clippy` before committing #### New Features: * [x] I've added tests for the new feature * [x] I've added docs for the new feature ACKs for top commit: evanlinjin: ACK 86711d4f46f467c651238ad3804fdbe1d22a8600 Tree-SHA512: 4b1c9a31951f67b18037b7dd9837acbc35823f21de644ab833754b74d20f5373549f81e66965ecd3953ebf4f99644c9fd834812acfa65f9188950f1bda17ab60
This commit is contained in:
commit
fb7ff298a4
@ -54,6 +54,7 @@ pub mod tx_builder;
|
||||
pub(crate) mod utils;
|
||||
|
||||
pub mod error;
|
||||
|
||||
pub use utils::IsDust;
|
||||
|
||||
use coin_selection::DefaultCoinSelectionAlgorithm;
|
||||
@ -305,6 +306,8 @@ pub enum LoadError {
|
||||
MissingNetwork,
|
||||
/// Data loaded from persistence is missing genesis hash.
|
||||
MissingGenesis,
|
||||
/// Data loaded from persistence is missing descriptor.
|
||||
MissingDescriptor,
|
||||
}
|
||||
|
||||
impl fmt::Display for LoadError {
|
||||
@ -317,6 +320,7 @@ impl fmt::Display for LoadError {
|
||||
}
|
||||
LoadError::MissingNetwork => write!(f, "loaded data is missing network type"),
|
||||
LoadError::MissingGenesis => write!(f, "loaded data is missing genesis hash"),
|
||||
LoadError::MissingDescriptor => write!(f, "loaded data is missing descriptor"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -352,6 +356,13 @@ pub enum NewOrLoadError {
|
||||
/// The network type loaded from persistence.
|
||||
got: Option<Network>,
|
||||
},
|
||||
/// The loaded desccriptor does not match what was provided.
|
||||
LoadedDescriptorDoesNotMatch {
|
||||
/// The descriptor loaded from persistence.
|
||||
got: Option<ExtendedDescriptor>,
|
||||
/// The keychain of the descriptor not matching
|
||||
keychain: KeychainKind,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for NewOrLoadError {
|
||||
@ -372,6 +383,13 @@ impl fmt::Display for NewOrLoadError {
|
||||
NewOrLoadError::LoadedNetworkDoesNotMatch { expected, got } => {
|
||||
write!(f, "loaded network type is not {}, got {:?}", expected, got)
|
||||
}
|
||||
NewOrLoadError::LoadedDescriptorDoesNotMatch { got, keychain } => {
|
||||
write!(
|
||||
f,
|
||||
"loaded descriptor is different from what was provided, got {:?} for keychain {:?}",
|
||||
got, keychain
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -499,21 +517,57 @@ impl Wallet {
|
||||
}
|
||||
|
||||
/// Load [`Wallet`] from the given persistence backend.
|
||||
pub fn load<E: IntoWalletDescriptor>(
|
||||
descriptor: E,
|
||||
change_descriptor: Option<E>,
|
||||
///
|
||||
/// Note that the descriptor secret keys are not persisted to the db; this means that after
|
||||
/// calling this method the [`Wallet`] **won't** know the secret keys, and as such, won't be
|
||||
/// able to sign transactions.
|
||||
///
|
||||
/// If you wish to use the wallet to sign transactions, you need to add the secret keys
|
||||
/// manually to the [`Wallet`]:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// # use bdk::Wallet;
|
||||
/// # use bdk::signer::{SignersContainer, SignerOrdering};
|
||||
/// # use bdk::descriptor::Descriptor;
|
||||
/// # use bitcoin::key::Secp256k1;
|
||||
/// # use bdk::KeychainKind;
|
||||
/// # use bdk_file_store::Store;
|
||||
/// #
|
||||
/// # fn main() -> Result<(), anyhow::Error> {
|
||||
/// # let temp_dir = tempfile::tempdir().expect("must create tempdir");
|
||||
/// # let file_path = temp_dir.path().join("store.db");
|
||||
/// # let db: Store<bdk::wallet::ChangeSet> = Store::create_new(&[], &file_path).expect("must create db");
|
||||
/// let secp = Secp256k1::new();
|
||||
///
|
||||
/// let (external_descriptor, external_keymap) = Descriptor::parse_descriptor(&secp, "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)").unwrap();
|
||||
/// let (internal_descriptor, internal_keymap) = Descriptor::parse_descriptor(&secp, "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)").unwrap();
|
||||
///
|
||||
/// let external_signer_container = SignersContainer::build(external_keymap, &external_descriptor, &secp);
|
||||
/// let internal_signer_container = SignersContainer::build(internal_keymap, &internal_descriptor, &secp);
|
||||
///
|
||||
/// let mut wallet = Wallet::load(db)?;
|
||||
///
|
||||
/// external_signer_container.signers().into_iter()
|
||||
/// .for_each(|s| wallet.add_signer(KeychainKind::External, SignerOrdering::default(), s.clone()));
|
||||
/// internal_signer_container.signers().into_iter()
|
||||
/// .for_each(|s| wallet.add_signer(KeychainKind::Internal, SignerOrdering::default(), s.clone()));
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// Alternatively, you can call [`Wallet::new_or_load`], which will add the private keys of the
|
||||
/// passed-in descriptors to the [`Wallet`].
|
||||
pub fn load(
|
||||
mut db: impl PersistBackend<ChangeSet> + Send + Sync + 'static,
|
||||
) -> Result<Self, LoadError> {
|
||||
let changeset = db
|
||||
.load_from_persistence()
|
||||
.map_err(LoadError::Persist)?
|
||||
.ok_or(LoadError::NotInitialized)?;
|
||||
Self::load_from_changeset(descriptor, change_descriptor, db, changeset)
|
||||
Self::load_from_changeset(db, changeset)
|
||||
}
|
||||
|
||||
fn load_from_changeset<E: IntoWalletDescriptor>(
|
||||
descriptor: E,
|
||||
change_descriptor: Option<E>,
|
||||
fn load_from_changeset(
|
||||
db: impl PersistBackend<ChangeSet> + Send + Sync + 'static,
|
||||
changeset: ChangeSet,
|
||||
) -> Result<Self, LoadError> {
|
||||
@ -522,10 +576,23 @@ impl Wallet {
|
||||
let chain =
|
||||
LocalChain::from_changeset(changeset.chain).map_err(|_| LoadError::MissingGenesis)?;
|
||||
let mut index = KeychainTxOutIndex::<KeychainKind>::default();
|
||||
let descriptor = changeset
|
||||
.indexed_tx_graph
|
||||
.indexer
|
||||
.keychains_added
|
||||
.get(&KeychainKind::External)
|
||||
.ok_or(LoadError::MissingDescriptor)?
|
||||
.clone();
|
||||
let change_descriptor = changeset
|
||||
.indexed_tx_graph
|
||||
.indexer
|
||||
.keychains_added
|
||||
.get(&KeychainKind::Internal)
|
||||
.cloned();
|
||||
|
||||
let (signers, change_signers) =
|
||||
create_signers(&mut index, &secp, descriptor, change_descriptor, network)
|
||||
.map_err(LoadError::Descriptor)?;
|
||||
.expect("Can't fail: we passed in valid descriptors, recovered from the changeset");
|
||||
|
||||
let mut indexed_graph = IndexedTxGraph::new(index);
|
||||
indexed_graph.apply_changeset(changeset.indexed_tx_graph);
|
||||
@ -562,8 +629,8 @@ impl Wallet {
|
||||
)
|
||||
}
|
||||
|
||||
/// Either loads [`Wallet`] from persistence, or initializes it if it does not exist (with a
|
||||
/// custom genesis hash).
|
||||
/// Either loads [`Wallet`] from persistence, or initializes it if it does not exist, using the
|
||||
/// provided descriptor, change descriptor, network, and custom genesis hash.
|
||||
///
|
||||
/// This method will fail if the loaded [`Wallet`] has different parameters to those provided.
|
||||
/// This is like [`Wallet::new_or_load`] with an additional `genesis_hash` parameter. This is
|
||||
@ -580,24 +647,22 @@ impl Wallet {
|
||||
.map_err(NewOrLoadError::Persist)?;
|
||||
match changeset {
|
||||
Some(changeset) => {
|
||||
let wallet =
|
||||
Self::load_from_changeset(descriptor, change_descriptor, db, changeset)
|
||||
.map_err(|e| match e {
|
||||
let mut wallet = Self::load_from_changeset(db, changeset).map_err(|e| match e {
|
||||
LoadError::Descriptor(e) => NewOrLoadError::Descriptor(e),
|
||||
LoadError::Persist(e) => NewOrLoadError::Persist(e),
|
||||
LoadError::NotInitialized => NewOrLoadError::NotInitialized,
|
||||
LoadError::MissingNetwork => {
|
||||
NewOrLoadError::LoadedNetworkDoesNotMatch {
|
||||
LoadError::MissingNetwork => NewOrLoadError::LoadedNetworkDoesNotMatch {
|
||||
expected: network,
|
||||
got: None,
|
||||
}
|
||||
}
|
||||
LoadError::MissingGenesis => {
|
||||
NewOrLoadError::LoadedGenesisDoesNotMatch {
|
||||
},
|
||||
LoadError::MissingGenesis => NewOrLoadError::LoadedGenesisDoesNotMatch {
|
||||
expected: genesis_hash,
|
||||
got: None,
|
||||
}
|
||||
}
|
||||
},
|
||||
LoadError::MissingDescriptor => NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||
got: None,
|
||||
keychain: KeychainKind::External,
|
||||
},
|
||||
})?;
|
||||
if wallet.network != network {
|
||||
return Err(NewOrLoadError::LoadedNetworkDoesNotMatch {
|
||||
@ -611,6 +676,73 @@ impl Wallet {
|
||||
got: Some(wallet.chain.genesis_hash()),
|
||||
});
|
||||
}
|
||||
|
||||
let (expected_descriptor, expected_descriptor_keymap) = descriptor
|
||||
.into_wallet_descriptor(&wallet.secp, network)
|
||||
.map_err(NewOrLoadError::Descriptor)?;
|
||||
let wallet_descriptor = wallet.public_descriptor(KeychainKind::External).cloned();
|
||||
if wallet_descriptor != Some(expected_descriptor.clone()) {
|
||||
return Err(NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||
got: wallet_descriptor,
|
||||
keychain: KeychainKind::External,
|
||||
});
|
||||
}
|
||||
// if expected descriptor has private keys add them as new signers
|
||||
if !expected_descriptor_keymap.is_empty() {
|
||||
let signer_container = SignersContainer::build(
|
||||
expected_descriptor_keymap,
|
||||
&expected_descriptor,
|
||||
&wallet.secp,
|
||||
);
|
||||
signer_container.signers().into_iter().for_each(|signer| {
|
||||
wallet.add_signer(
|
||||
KeychainKind::External,
|
||||
SignerOrdering::default(),
|
||||
signer.clone(),
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
let expected_change_descriptor = if let Some(c) = change_descriptor {
|
||||
Some(
|
||||
c.into_wallet_descriptor(&wallet.secp, network)
|
||||
.map_err(NewOrLoadError::Descriptor)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let wallet_change_descriptor =
|
||||
wallet.public_descriptor(KeychainKind::Internal).cloned();
|
||||
|
||||
match (expected_change_descriptor, wallet_change_descriptor) {
|
||||
(Some((expected_descriptor, expected_keymap)), Some(wallet_descriptor))
|
||||
if wallet_descriptor == expected_descriptor =>
|
||||
{
|
||||
// if expected change descriptor has private keys add them as new signers
|
||||
if !expected_keymap.is_empty() {
|
||||
let signer_container = SignersContainer::build(
|
||||
expected_keymap,
|
||||
&expected_descriptor,
|
||||
&wallet.secp,
|
||||
);
|
||||
signer_container.signers().into_iter().for_each(|signer| {
|
||||
wallet.add_signer(
|
||||
KeychainKind::Internal,
|
||||
SignerOrdering::default(),
|
||||
signer.clone(),
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
(None, None) => (),
|
||||
(_, wallet_descriptor) => {
|
||||
return Err(NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||
got: wallet_descriptor,
|
||||
keychain: KeychainKind::Internal,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(wallet)
|
||||
}
|
||||
None => Self::new_with_genesis_hash(
|
||||
@ -636,7 +768,7 @@ impl Wallet {
|
||||
}
|
||||
|
||||
/// Iterator over all keychains in this wallet
|
||||
pub fn keychains(&self) -> &BTreeMap<KeychainKind, ExtendedDescriptor> {
|
||||
pub fn keychains(&self) -> impl Iterator<Item = (&KeychainKind, &ExtendedDescriptor)> {
|
||||
self.indexed_graph.index.keychains()
|
||||
}
|
||||
|
||||
@ -650,7 +782,11 @@ impl Wallet {
|
||||
/// [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) max index.
|
||||
pub fn peek_address(&self, keychain: KeychainKind, mut index: u32) -> AddressInfo {
|
||||
let keychain = self.map_keychain(keychain);
|
||||
let mut spk_iter = self.indexed_graph.index.unbounded_spk_iter(&keychain);
|
||||
let mut spk_iter = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.unbounded_spk_iter(&keychain)
|
||||
.expect("Must exist (we called map_keychain)");
|
||||
if !spk_iter.descriptor().has_wildcard() {
|
||||
index = 0;
|
||||
}
|
||||
@ -677,7 +813,11 @@ impl Wallet {
|
||||
/// If writing to persistent storage fails.
|
||||
pub fn reveal_next_address(&mut self, keychain: KeychainKind) -> anyhow::Result<AddressInfo> {
|
||||
let keychain = self.map_keychain(keychain);
|
||||
let ((index, spk), index_changeset) = self.indexed_graph.index.reveal_next_spk(&keychain);
|
||||
let ((index, spk), index_changeset) = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.reveal_next_spk(&keychain)
|
||||
.expect("Must exist (we called map_keychain)");
|
||||
|
||||
self.persist
|
||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||
@ -705,8 +845,11 @@ impl Wallet {
|
||||
index: u32,
|
||||
) -> anyhow::Result<impl Iterator<Item = AddressInfo> + '_> {
|
||||
let keychain = self.map_keychain(keychain);
|
||||
let (spk_iter, index_changeset) =
|
||||
self.indexed_graph.index.reveal_to_target(&keychain, index);
|
||||
let (spk_iter, index_changeset) = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.reveal_to_target(&keychain, index)
|
||||
.expect("must exist (we called map_keychain)");
|
||||
|
||||
self.persist
|
||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||
@ -729,7 +872,11 @@ impl Wallet {
|
||||
/// If writing to persistent storage fails.
|
||||
pub fn next_unused_address(&mut self, keychain: KeychainKind) -> anyhow::Result<AddressInfo> {
|
||||
let keychain = self.map_keychain(keychain);
|
||||
let ((index, spk), index_changeset) = self.indexed_graph.index.next_unused_spk(&keychain);
|
||||
let ((index, spk), index_changeset) = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.next_unused_spk(&keychain)
|
||||
.expect("must exist (we called map_keychain)");
|
||||
|
||||
self.persist
|
||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||
@ -799,7 +946,7 @@ impl Wallet {
|
||||
.filter_chain_unspents(
|
||||
&self.chain,
|
||||
self.chain.tip().block_id(),
|
||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||
self.indexed_graph.index.outpoints(),
|
||||
)
|
||||
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
||||
}
|
||||
@ -813,7 +960,7 @@ impl Wallet {
|
||||
.filter_chain_txouts(
|
||||
&self.chain,
|
||||
self.chain.tip().block_id(),
|
||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||
self.indexed_graph.index.outpoints(),
|
||||
)
|
||||
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
||||
}
|
||||
@ -851,7 +998,11 @@ impl Wallet {
|
||||
&self,
|
||||
keychain: KeychainKind,
|
||||
) -> impl Iterator<Item = (u32, ScriptBuf)> + Clone {
|
||||
self.indexed_graph.index.unbounded_spk_iter(&keychain)
|
||||
let keychain = self.map_keychain(keychain);
|
||||
self.indexed_graph
|
||||
.index
|
||||
.unbounded_spk_iter(&keychain)
|
||||
.expect("Must exist (we called map_keychain)")
|
||||
}
|
||||
|
||||
/// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the
|
||||
@ -1133,7 +1284,7 @@ impl Wallet {
|
||||
self.indexed_graph.graph().balance(
|
||||
&self.chain,
|
||||
self.chain.tip().block_id(),
|
||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||
self.indexed_graph.index.outpoints(),
|
||||
|&(k, _), _| k == KeychainKind::Internal,
|
||||
)
|
||||
}
|
||||
@ -1220,17 +1371,9 @@ impl Wallet {
|
||||
coin_selection: Cs,
|
||||
params: TxParams,
|
||||
) -> Result<Psbt, CreateTxError> {
|
||||
let external_descriptor = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&KeychainKind::External)
|
||||
.expect("must exist");
|
||||
let internal_descriptor = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&KeychainKind::Internal);
|
||||
let keychains: BTreeMap<_, _> = self.indexed_graph.index.keychains().collect();
|
||||
let external_descriptor = keychains.get(&KeychainKind::External).expect("must exist");
|
||||
let internal_descriptor = keychains.get(&KeychainKind::Internal);
|
||||
|
||||
let external_policy = external_descriptor
|
||||
.extract_policy(&self.signers, BuildSatisfaction::None, &self.secp)?
|
||||
@ -1464,8 +1607,11 @@ impl Wallet {
|
||||
Some(ref drain_recipient) => drain_recipient.clone(),
|
||||
None => {
|
||||
let change_keychain = self.map_keychain(KeychainKind::Internal);
|
||||
let ((index, spk), index_changeset) =
|
||||
self.indexed_graph.index.next_unused_spk(&change_keychain);
|
||||
let ((index, spk), index_changeset) = self
|
||||
.indexed_graph
|
||||
.index
|
||||
.next_unused_spk(&change_keychain)
|
||||
.expect("Keychain exists (we called map_keychain)");
|
||||
let spk = spk.into();
|
||||
self.indexed_graph.index.mark_used(change_keychain, index);
|
||||
self.persist
|
||||
@ -1825,7 +1971,11 @@ impl Wallet {
|
||||
///
|
||||
/// This can be used to build a watch-only version of a wallet
|
||||
pub fn public_descriptor(&self, keychain: KeychainKind) -> Option<&ExtendedDescriptor> {
|
||||
self.indexed_graph.index.keychains().get(&keychain)
|
||||
self.indexed_graph
|
||||
.index
|
||||
.keychains()
|
||||
.find(|(k, _)| *k == &keychain)
|
||||
.map(|(_, d)| d)
|
||||
}
|
||||
|
||||
/// Finalize a PSBT, i.e., for each input determine if sufficient data is available to pass
|
||||
@ -1876,16 +2026,8 @@ impl Wallet {
|
||||
.get_utxo_for(n)
|
||||
.and_then(|txout| self.get_descriptor_for_txout(&txout))
|
||||
.or_else(|| {
|
||||
self.indexed_graph
|
||||
.index
|
||||
.keychains()
|
||||
.iter()
|
||||
.find_map(|(_, desc)| {
|
||||
desc.derive_from_psbt_input(
|
||||
psbt_input,
|
||||
psbt.get_utxo_for(n),
|
||||
&self.secp,
|
||||
)
|
||||
self.indexed_graph.index.keychains().find_map(|(_, desc)| {
|
||||
desc.derive_from_psbt_input(psbt_input, psbt.get_utxo_for(n), &self.secp)
|
||||
})
|
||||
});
|
||||
|
||||
@ -1952,7 +2094,12 @@ impl Wallet {
|
||||
|
||||
/// The index of the next address that you would get if you were to ask the wallet for a new address
|
||||
pub fn next_derivation_index(&self, keychain: KeychainKind) -> u32 {
|
||||
self.indexed_graph.index.next_index(&keychain).0
|
||||
let keychain = self.map_keychain(keychain);
|
||||
self.indexed_graph
|
||||
.index
|
||||
.next_index(&keychain)
|
||||
.expect("Keychain must exist (we called map_keychain)")
|
||||
.0
|
||||
}
|
||||
|
||||
/// Informs the wallet that you no longer intend to broadcast a tx that was built from it.
|
||||
@ -2119,7 +2266,6 @@ impl Wallet {
|
||||
if params.add_global_xpubs {
|
||||
let all_xpubs = self
|
||||
.keychains()
|
||||
.iter()
|
||||
.flat_map(|(_, desc)| desc.get_extended_keys())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@ -2496,13 +2642,13 @@ fn create_signers<E: IntoWalletDescriptor>(
|
||||
) -> Result<(Arc<SignersContainer>, Arc<SignersContainer>), crate::descriptor::error::Error> {
|
||||
let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
||||
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
||||
index.add_keychain(KeychainKind::External, descriptor);
|
||||
let _ = index.insert_descriptor(KeychainKind::External, descriptor);
|
||||
|
||||
let change_signers = match change_descriptor {
|
||||
Some(descriptor) => {
|
||||
let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
||||
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
||||
index.add_keychain(KeychainKind::Internal, descriptor);
|
||||
let _ = index.insert_descriptor(KeychainKind::Internal, descriptor);
|
||||
signers
|
||||
}
|
||||
None => Arc::new(SignersContainer::new()),
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use assert_matches::assert_matches;
|
||||
use bdk::descriptor::calc_checksum;
|
||||
use bdk::descriptor::{calc_checksum, IntoWalletDescriptor};
|
||||
use bdk::psbt::PsbtUtils;
|
||||
use bdk::signer::{SignOptions, SignerError};
|
||||
use bdk::wallet::coin_selection::{self, LargestFirstCoinSelection};
|
||||
@ -10,9 +10,11 @@ use bdk::wallet::tx_builder::AddForeignUtxoError;
|
||||
use bdk::wallet::NewError;
|
||||
use bdk::wallet::{AddressInfo, Balance, Wallet};
|
||||
use bdk::KeychainKind;
|
||||
use bdk_chain::collections::BTreeMap;
|
||||
use bdk_chain::COINBASE_MATURITY;
|
||||
use bdk_chain::{BlockId, ConfirmationTime};
|
||||
use bitcoin::hashes::Hash;
|
||||
use bitcoin::key::Secp256k1;
|
||||
use bitcoin::psbt;
|
||||
use bitcoin::script::PushBytesBuf;
|
||||
use bitcoin::sighash::{EcdsaSighashType, TapSighashType};
|
||||
@ -84,14 +86,24 @@ fn load_recovers_wallet() {
|
||||
// recover wallet
|
||||
{
|
||||
let db = bdk_file_store::Store::open(DB_MAGIC, &file_path).expect("must recover db");
|
||||
let wallet =
|
||||
Wallet::load(get_test_tr_single_sig_xprv(), None, db).expect("must recover wallet");
|
||||
let wallet = Wallet::load(db).expect("must recover wallet");
|
||||
assert_eq!(wallet.network(), Network::Testnet);
|
||||
assert_eq!(wallet.spk_index().keychains(), wallet_spk_index.keychains());
|
||||
assert_eq!(
|
||||
wallet.spk_index().keychains().collect::<Vec<_>>(),
|
||||
wallet_spk_index.keychains().collect::<Vec<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
wallet.spk_index().last_revealed_indices(),
|
||||
wallet_spk_index.last_revealed_indices()
|
||||
);
|
||||
let secp = Secp256k1::new();
|
||||
assert_eq!(
|
||||
*wallet.get_descriptor_for_keychain(KeychainKind::External),
|
||||
get_test_tr_single_sig_xprv()
|
||||
.into_wallet_descriptor(&secp, wallet.network())
|
||||
.unwrap()
|
||||
.0
|
||||
);
|
||||
}
|
||||
|
||||
// `new` can only be called on empty db
|
||||
@ -108,12 +120,12 @@ fn new_or_load() {
|
||||
let file_path = temp_dir.path().join("store.db");
|
||||
|
||||
// init wallet when non-existent
|
||||
let wallet_keychains = {
|
||||
let wallet_keychains: BTreeMap<_, _> = {
|
||||
let db = bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path)
|
||||
.expect("must create db");
|
||||
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
|
||||
.expect("must init wallet");
|
||||
wallet.keychains().clone()
|
||||
wallet.keychains().map(|(k, v)| (*k, v.clone())).collect()
|
||||
};
|
||||
|
||||
// wrong network
|
||||
@ -162,6 +174,49 @@ fn new_or_load() {
|
||||
);
|
||||
}
|
||||
|
||||
// wrong external descriptor
|
||||
{
|
||||
let exp_descriptor = get_test_tr_single_sig();
|
||||
let got_descriptor = get_test_wpkh()
|
||||
.into_wallet_descriptor(&Secp256k1::new(), Network::Testnet)
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
let db =
|
||||
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
|
||||
let err = Wallet::new_or_load(exp_descriptor, None, db, Network::Testnet)
|
||||
.expect_err("wrong external descriptor");
|
||||
assert!(
|
||||
matches!(
|
||||
err,
|
||||
bdk::wallet::NewOrLoadError::LoadedDescriptorDoesNotMatch { ref got, keychain }
|
||||
if got == &Some(got_descriptor) && keychain == KeychainKind::External
|
||||
),
|
||||
"err: {}",
|
||||
err,
|
||||
);
|
||||
}
|
||||
|
||||
// wrong internal descriptor
|
||||
{
|
||||
let exp_descriptor = Some(get_test_tr_single_sig());
|
||||
let got_descriptor = None;
|
||||
|
||||
let db =
|
||||
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
|
||||
let err = Wallet::new_or_load(get_test_wpkh(), exp_descriptor, db, Network::Testnet)
|
||||
.expect_err("wrong internal descriptor");
|
||||
assert!(
|
||||
matches!(
|
||||
err,
|
||||
bdk::wallet::NewOrLoadError::LoadedDescriptorDoesNotMatch { ref got, keychain }
|
||||
if got == &got_descriptor && keychain == KeychainKind::Internal
|
||||
),
|
||||
"err: {}",
|
||||
err,
|
||||
);
|
||||
}
|
||||
|
||||
// all parameters match
|
||||
{
|
||||
let db =
|
||||
@ -169,7 +224,10 @@ fn new_or_load() {
|
||||
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
|
||||
.expect("must recover wallet");
|
||||
assert_eq!(wallet.network(), Network::Testnet);
|
||||
assert_eq!(wallet.keychains(), &wallet_keychains);
|
||||
assert!(wallet
|
||||
.keychains()
|
||||
.map(|(k, v)| (*k, v.clone()))
|
||||
.eq(wallet_keychains));
|
||||
}
|
||||
}
|
||||
|
||||
@ -181,7 +239,6 @@ fn test_descriptor_checksum() {
|
||||
|
||||
let raw_descriptor = wallet
|
||||
.keychains()
|
||||
.iter()
|
||||
.next()
|
||||
.unwrap()
|
||||
.1
|
||||
|
@ -26,6 +26,6 @@ rand = "0.8"
|
||||
proptest = "1.2.0"
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = ["bitcoin/std", "miniscript/std"]
|
||||
serde = ["serde_crate", "bitcoin/serde"]
|
||||
default = ["std", "miniscript"]
|
||||
std = ["bitcoin/std", "miniscript?/std"]
|
||||
serde = ["serde_crate", "bitcoin/serde", "miniscript?/serde"]
|
||||
|
@ -1,10 +1,29 @@
|
||||
use crate::miniscript::{Descriptor, DescriptorPublicKey};
|
||||
use crate::{
|
||||
alloc::{string::ToString, vec::Vec},
|
||||
miniscript::{Descriptor, DescriptorPublicKey},
|
||||
};
|
||||
use bitcoin::hashes::{hash_newtype, sha256, Hash};
|
||||
|
||||
hash_newtype! {
|
||||
/// Represents the ID of a descriptor, defined as the sha256 hash of
|
||||
/// the descriptor string, checksum excluded.
|
||||
///
|
||||
/// This is useful for having a fixed-length unique representation of a descriptor,
|
||||
/// in particular, we use it to persist application state changes related to the
|
||||
/// descriptor without having to re-write the whole descriptor each time.
|
||||
///
|
||||
pub struct DescriptorId(pub sha256::Hash);
|
||||
}
|
||||
|
||||
/// A trait to extend the functionality of a miniscript descriptor.
|
||||
pub trait DescriptorExt {
|
||||
/// Returns the minimum value (in satoshis) at which an output is broadcastable.
|
||||
/// Panics if the descriptor wildcard is hardened.
|
||||
fn dust_value(&self) -> u64;
|
||||
|
||||
/// Returns the descriptor id, calculated as the sha256 of the descriptor, checksum not
|
||||
/// included.
|
||||
fn descriptor_id(&self) -> DescriptorId;
|
||||
}
|
||||
|
||||
impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
||||
@ -15,4 +34,11 @@ impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
||||
.dust_value()
|
||||
.to_sat()
|
||||
}
|
||||
|
||||
fn descriptor_id(&self) -> DescriptorId {
|
||||
let desc = self.to_string();
|
||||
let desc_without_checksum = desc.split('#').next().expect("Must be here");
|
||||
let descriptor_bytes = <Vec<u8>>::from(desc_without_checksum.as_bytes());
|
||||
DescriptorId(sha256::Hash::hash(&descriptor_bytes))
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,6 @@ use alloc::vec::Vec;
|
||||
use bitcoin::{Block, OutPoint, Transaction, TxOut, Txid};
|
||||
|
||||
use crate::{
|
||||
keychain,
|
||||
tx_graph::{self, TxGraph},
|
||||
Anchor, AnchorFromBlockPosition, Append, BlockId,
|
||||
};
|
||||
@ -320,8 +319,9 @@ impl<A, IA: Default> From<tx_graph::ChangeSet<A>> for ChangeSet<A, IA> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, K> From<keychain::ChangeSet<K>> for ChangeSet<A, keychain::ChangeSet<K>> {
|
||||
fn from(indexer: keychain::ChangeSet<K>) -> Self {
|
||||
#[cfg(feature = "miniscript")]
|
||||
impl<A, K> From<crate::keychain::ChangeSet<K>> for ChangeSet<A, crate::keychain::ChangeSet<K>> {
|
||||
fn from(indexer: crate::keychain::ChangeSet<K>) -> Self {
|
||||
Self {
|
||||
graph: Default::default(),
|
||||
indexer,
|
||||
|
@ -10,78 +10,12 @@
|
||||
//!
|
||||
//! [`SpkTxOutIndex`]: crate::SpkTxOutIndex
|
||||
|
||||
use crate::{collections::BTreeMap, Append};
|
||||
|
||||
#[cfg(feature = "miniscript")]
|
||||
mod txout_index;
|
||||
use bitcoin::Amount;
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub use txout_index::*;
|
||||
|
||||
/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
|
||||
/// It maps each keychain `K` to its last revealed index.
|
||||
///
|
||||
/// It can be applied to [`KeychainTxOutIndex`] with [`apply_changeset`]. [`ChangeSet`]s are
|
||||
/// monotone in that they will never decrease the revealed derivation index.
|
||||
///
|
||||
/// [`KeychainTxOutIndex`]: crate::keychain::KeychainTxOutIndex
|
||||
/// [`apply_changeset`]: crate::keychain::KeychainTxOutIndex::apply_changeset
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(
|
||||
crate = "serde_crate",
|
||||
bound(
|
||||
deserialize = "K: Ord + serde::Deserialize<'de>",
|
||||
serialize = "K: Ord + serde::Serialize"
|
||||
)
|
||||
)
|
||||
)]
|
||||
#[must_use]
|
||||
pub struct ChangeSet<K>(pub BTreeMap<K, u32>);
|
||||
|
||||
impl<K> ChangeSet<K> {
|
||||
/// Get the inner map of the keychain to its new derivation index.
|
||||
pub fn as_inner(&self) -> &BTreeMap<K, u32> {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ord> Append for ChangeSet<K> {
|
||||
/// Append another [`ChangeSet`] into self.
|
||||
///
|
||||
/// If the keychain already exists, increase the index when the other's index > self's index.
|
||||
/// If the keychain did not exist, append the new keychain.
|
||||
fn append(&mut self, mut other: Self) {
|
||||
self.0.iter_mut().for_each(|(key, index)| {
|
||||
if let Some(other_index) = other.0.remove(key) {
|
||||
*index = other_index.max(*index);
|
||||
}
|
||||
});
|
||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
||||
self.0.extend(other.0);
|
||||
}
|
||||
|
||||
/// Returns whether the changeset are empty.
|
||||
fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for ChangeSet<K> {
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> AsRef<BTreeMap<K, u32>> for ChangeSet<K> {
|
||||
fn as_ref(&self) -> &BTreeMap<K, u32> {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Balance, differentiated into various categories.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
||||
#[cfg_attr(
|
||||
@ -137,40 +71,3 @@ impl core::ops::Add for Balance {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn append_keychain_derivation_indices() {
|
||||
#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)]
|
||||
enum Keychain {
|
||||
One,
|
||||
Two,
|
||||
Three,
|
||||
Four,
|
||||
}
|
||||
let mut lhs_di = BTreeMap::<Keychain, u32>::default();
|
||||
let mut rhs_di = BTreeMap::<Keychain, u32>::default();
|
||||
lhs_di.insert(Keychain::One, 7);
|
||||
lhs_di.insert(Keychain::Two, 0);
|
||||
rhs_di.insert(Keychain::One, 3);
|
||||
rhs_di.insert(Keychain::Two, 5);
|
||||
lhs_di.insert(Keychain::Three, 3);
|
||||
rhs_di.insert(Keychain::Four, 4);
|
||||
|
||||
let mut lhs = ChangeSet(lhs_di);
|
||||
let rhs = ChangeSet(rhs_di);
|
||||
lhs.append(rhs);
|
||||
|
||||
// Exiting index doesn't update if the new index in `other` is lower than `self`.
|
||||
assert_eq!(lhs.0.get(&Keychain::One), Some(&7));
|
||||
// Existing index updates if the new index in `other` is higher than `self`.
|
||||
assert_eq!(lhs.0.get(&Keychain::Two), Some(&5));
|
||||
// Existing index is unchanged if keychain doesn't exist in `other`.
|
||||
assert_eq!(lhs.0.get(&Keychain::Three), Some(&3));
|
||||
// New keychain gets added if the keychain is in `other` but not in `self`.
|
||||
assert_eq!(lhs.0.get(&Keychain::Four), Some(&4));
|
||||
}
|
||||
}
|
||||
|
@ -3,9 +3,9 @@ use crate::{
|
||||
indexed_tx_graph::Indexer,
|
||||
miniscript::{Descriptor, DescriptorPublicKey},
|
||||
spk_iter::BIP32_MAX_INDEX,
|
||||
SpkIterator, SpkTxOutIndex,
|
||||
DescriptorExt, DescriptorId, SpkIterator, SpkTxOutIndex,
|
||||
};
|
||||
use bitcoin::{Amount, OutPoint, Script, SignedAmount, Transaction, TxOut, Txid};
|
||||
use bitcoin::{hashes::Hash, Amount, OutPoint, Script, SignedAmount, Transaction, TxOut, Txid};
|
||||
use core::{
|
||||
fmt::Debug,
|
||||
ops::{Bound, RangeBounds},
|
||||
@ -13,6 +13,79 @@ use core::{
|
||||
|
||||
use crate::Append;
|
||||
|
||||
/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
|
||||
/// It maps each keychain `K` to a descriptor and its last revealed index.
|
||||
///
|
||||
/// It can be applied to [`KeychainTxOutIndex`] with [`apply_changeset`]. [`ChangeSet] are
|
||||
/// monotone in that they will never decrease the revealed derivation index.
|
||||
///
|
||||
/// [`KeychainTxOutIndex`]: crate::keychain::KeychainTxOutIndex
|
||||
/// [`apply_changeset`]: crate::keychain::KeychainTxOutIndex::apply_changeset
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Deserialize, serde::Serialize),
|
||||
serde(
|
||||
crate = "serde_crate",
|
||||
bound(
|
||||
deserialize = "K: Ord + serde::Deserialize<'de>",
|
||||
serialize = "K: Ord + serde::Serialize"
|
||||
)
|
||||
)
|
||||
)]
|
||||
#[must_use]
|
||||
pub struct ChangeSet<K> {
|
||||
/// Contains the keychains that have been added and their respective descriptor
|
||||
pub keychains_added: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
|
||||
/// Contains for each descriptor_id the last revealed index of derivation
|
||||
pub last_revealed: BTreeMap<DescriptorId, u32>,
|
||||
}
|
||||
|
||||
impl<K: Ord> Append for ChangeSet<K> {
|
||||
/// Append another [`ChangeSet`] into self.
|
||||
///
|
||||
/// For each keychain in `keychains_added` in the given [`ChangeSet`]:
|
||||
/// If the keychain already exist with a different descriptor, we overwrite the old descriptor.
|
||||
///
|
||||
/// For each `last_revealed` in the given [`ChangeSet`]:
|
||||
/// If the keychain already exists, increase the index when the other's index > self's index.
|
||||
fn append(&mut self, other: Self) {
|
||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
||||
self.keychains_added.extend(other.keychains_added);
|
||||
|
||||
// for `last_revealed`, entries of `other` will take precedence ONLY if it is greater than
|
||||
// what was originally in `self`.
|
||||
for (desc_id, index) in other.last_revealed {
|
||||
use crate::collections::btree_map::Entry;
|
||||
match self.last_revealed.entry(desc_id) {
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(index);
|
||||
}
|
||||
Entry::Occupied(mut entry) => {
|
||||
if *entry.get() < index {
|
||||
entry.insert(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the changeset are empty.
|
||||
fn is_empty(&self) -> bool {
|
||||
self.last_revealed.is_empty() && self.keychains_added.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for ChangeSet<K> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
last_revealed: BTreeMap::default(),
|
||||
keychains_added: BTreeMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||
|
||||
/// [`KeychainTxOutIndex`] controls how script pubkeys are revealed for multiple keychains, and
|
||||
@ -54,7 +127,7 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||
///
|
||||
/// # Change sets
|
||||
///
|
||||
/// Methods that can update the last revealed index will return [`super::ChangeSet`] to report
|
||||
/// Methods that can update the last revealed index or add keychains will return [`super::ChangeSet`] to report
|
||||
/// these changes. This can be persisted for future recovery.
|
||||
///
|
||||
/// ## Synopsis
|
||||
@ -79,14 +152,43 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||
/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
/// # let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
/// # let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||
/// # let (descriptor_for_user_42, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/2/*)").unwrap();
|
||||
/// txout_index.add_keychain(MyKeychain::External, external_descriptor);
|
||||
/// txout_index.add_keychain(MyKeychain::Internal, internal_descriptor);
|
||||
/// txout_index.add_keychain(MyKeychain::MyAppUser { user_id: 42 }, descriptor_for_user_42);
|
||||
/// # let (descriptor_42, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/2/*)").unwrap();
|
||||
/// let _ = txout_index.insert_descriptor(MyKeychain::External, external_descriptor);
|
||||
/// let _ = txout_index.insert_descriptor(MyKeychain::Internal, internal_descriptor);
|
||||
/// let _ = txout_index.insert_descriptor(MyKeychain::MyAppUser { user_id: 42 }, descriptor_42);
|
||||
///
|
||||
/// let new_spk_for_user = txout_index.reveal_next_spk(&MyKeychain::MyAppUser{ user_id: 42 });
|
||||
/// ```
|
||||
///
|
||||
/// # Non-recommend keychain to descriptor assignments
|
||||
///
|
||||
/// A keychain (`K`) is used to identify a descriptor. However, the following keychain to descriptor
|
||||
/// arrangements result in behavior that is harder to reason about and is not recommended.
|
||||
///
|
||||
/// ## Multiple keychains identifying the same descriptor
|
||||
///
|
||||
/// Although a single keychain variant can only identify a single descriptor, multiple keychain
|
||||
/// variants can identify the same descriptor.
|
||||
///
|
||||
/// If multiple keychains identify the same descriptor:
|
||||
/// 1. Methods that take in a keychain (such as [`reveal_next_spk`]) will work normally when any
|
||||
/// keychain (that identifies that descriptor) is passed in.
|
||||
/// 2. Methods that return data which associates with a descriptor (such as [`outpoints`],
|
||||
/// [`txouts`], [`unused_spks`], etc.) the method will return the highest-ranked keychain variant
|
||||
/// that identifies the descriptor. Rank is determined by the [`Ord`] implementation of the keychain
|
||||
/// type.
|
||||
///
|
||||
/// This arrangement is not recommended since some methods will return a single keychain variant
|
||||
/// even though multiple keychain variants identify the same descriptor.
|
||||
///
|
||||
/// ## Reassigning the descriptor of a single keychain
|
||||
///
|
||||
/// Descriptors added to [`KeychainTxOutIndex`] are never removed. However, a keychain that
|
||||
/// identifies a descriptor can be reassigned to identify a different descriptor. This may result in
|
||||
/// a situation where a descriptor has no associated keychain(s), and relevant [`TxOut`]s,
|
||||
/// [`OutPoint`]s and [`Script`]s (of that descriptor) will not be return by [`KeychainTxOutIndex`].
|
||||
/// Therefore, reassigning the descriptor of a single keychain is not recommended.
|
||||
///
|
||||
/// [`Ord`]: core::cmp::Ord
|
||||
/// [`SpkTxOutIndex`]: crate::spk_txout_index::SpkTxOutIndex
|
||||
/// [`Descriptor`]: crate::miniscript::Descriptor
|
||||
@ -99,13 +201,27 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||
/// [`new`]: KeychainTxOutIndex::new
|
||||
/// [`unbounded_spk_iter`]: KeychainTxOutIndex::unbounded_spk_iter
|
||||
/// [`all_unbounded_spk_iters`]: KeychainTxOutIndex::all_unbounded_spk_iters
|
||||
/// [`outpoints`]: KeychainTxOutIndex::outpoints
|
||||
/// [`txouts`]: KeychainTxOutIndex::txouts
|
||||
/// [`unused_spks`]: KeychainTxOutIndex::unused_spks
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct KeychainTxOutIndex<K> {
|
||||
inner: SpkTxOutIndex<(K, u32)>,
|
||||
// descriptors of each keychain
|
||||
keychains: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
|
||||
inner: SpkTxOutIndex<(DescriptorId, u32)>,
|
||||
// keychain -> (descriptor, descriptor id) map
|
||||
keychains_to_descriptors: BTreeMap<K, (DescriptorId, Descriptor<DescriptorPublicKey>)>,
|
||||
// descriptor id -> keychain set
|
||||
// Because different keychains can have the same descriptor, we rank keychains by `Ord` so that
|
||||
// that the first keychain variant (according to `Ord`) has the highest rank. When associated
|
||||
// data (such as spks, outpoints) are returned with a keychain, we return the highest-ranked
|
||||
// keychain with it.
|
||||
descriptor_ids_to_keychain_set: HashMap<DescriptorId, BTreeSet<K>>,
|
||||
// descriptor_id -> descriptor map
|
||||
// This is a "monotone" map, meaning that its size keeps growing, i.e., we never delete
|
||||
// descriptors from it. This is useful for revealing spks for descriptors that don't have
|
||||
// keychains associated.
|
||||
descriptor_ids_to_descriptors: BTreeMap<DescriptorId, Descriptor<DescriptorPublicKey>>,
|
||||
// last revealed indexes
|
||||
last_revealed: BTreeMap<K, u32>,
|
||||
last_revealed: BTreeMap<DescriptorId, u32>,
|
||||
// lookahead settings for each keychain
|
||||
lookahead: u32,
|
||||
}
|
||||
@ -121,7 +237,13 @@ impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
||||
|
||||
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
||||
match self.inner.scan_txout(outpoint, txout).cloned() {
|
||||
Some((keychain, index)) => self.reveal_to_target(&keychain, index).1,
|
||||
Some((descriptor_id, index)) => {
|
||||
// We want to reveal spks for descriptors that aren't tracked by any keychain, and
|
||||
// so we call reveal with descriptor_id
|
||||
let (_, changeset) = self.reveal_to_target_with_id(descriptor_id, index)
|
||||
.expect("descriptors are added in a monotone manner, there cannot be a descriptor id with no corresponding descriptor");
|
||||
changeset
|
||||
}
|
||||
None => super::ChangeSet::default(),
|
||||
}
|
||||
}
|
||||
@ -135,7 +257,13 @@ impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
||||
}
|
||||
|
||||
fn initial_changeset(&self) -> Self::ChangeSet {
|
||||
super::ChangeSet(self.last_revealed.clone())
|
||||
super::ChangeSet {
|
||||
keychains_added: self
|
||||
.keychains()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
last_revealed: self.last_revealed.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_changeset(&mut self, changeset: Self::ChangeSet) {
|
||||
@ -161,7 +289,9 @@ impl<K> KeychainTxOutIndex<K> {
|
||||
pub fn new(lookahead: u32) -> Self {
|
||||
Self {
|
||||
inner: SpkTxOutIndex::default(),
|
||||
keychains: BTreeMap::new(),
|
||||
keychains_to_descriptors: BTreeMap::new(),
|
||||
descriptor_ids_to_keychain_set: HashMap::new(),
|
||||
descriptor_ids_to_descriptors: BTreeMap::new(),
|
||||
last_revealed: BTreeMap::new(),
|
||||
lookahead,
|
||||
}
|
||||
@ -170,26 +300,37 @@ impl<K> KeychainTxOutIndex<K> {
|
||||
|
||||
/// Methods that are *re-exposed* from the internal [`SpkTxOutIndex`].
|
||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// Get the highest-ranked keychain that is currently associated with the given `desc_id`.
|
||||
fn keychain_of_desc_id(&self, desc_id: &DescriptorId) -> Option<&K> {
|
||||
let keychains = self.descriptor_ids_to_keychain_set.get(desc_id)?;
|
||||
keychains.iter().next()
|
||||
}
|
||||
|
||||
/// Return a reference to the internal [`SpkTxOutIndex`].
|
||||
///
|
||||
/// **WARNING:** The internal index will contain lookahead spks. Refer to
|
||||
/// [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
||||
pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> {
|
||||
pub fn inner(&self) -> &SpkTxOutIndex<(DescriptorId, u32)> {
|
||||
&self.inner
|
||||
}
|
||||
|
||||
/// Get a reference to the set of indexed outpoints.
|
||||
pub fn outpoints(&self) -> &BTreeSet<((K, u32), OutPoint)> {
|
||||
self.inner.outpoints()
|
||||
/// Get the set of indexed outpoints, corresponding to tracked keychains.
|
||||
pub fn outpoints(&self) -> impl DoubleEndedIterator<Item = ((K, u32), OutPoint)> + '_ {
|
||||
self.inner
|
||||
.outpoints()
|
||||
.iter()
|
||||
.filter_map(|((desc_id, index), op)| {
|
||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||
Some(((keychain.clone(), *index), *op))
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterate over known txouts that spend to tracked script pubkeys.
|
||||
pub fn txouts(
|
||||
&self,
|
||||
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> + ExactSizeIterator {
|
||||
self.inner
|
||||
.txouts()
|
||||
.map(|((k, i), op, txo)| (k.clone(), *i, op, txo))
|
||||
pub fn txouts(&self) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> + '_ {
|
||||
self.inner.txouts().filter_map(|((desc_id, i), op, txo)| {
|
||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||
Some((keychain.clone(), *i, op, txo))
|
||||
})
|
||||
}
|
||||
|
||||
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
||||
@ -199,32 +340,39 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> {
|
||||
self.inner
|
||||
.txouts_in_tx(txid)
|
||||
.map(|((k, i), op, txo)| (k.clone(), *i, op, txo))
|
||||
.filter_map(|((desc_id, i), op, txo)| {
|
||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||
Some((keychain.clone(), *i, op, txo))
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the [`TxOut`] of `outpoint` if it has been indexed.
|
||||
/// Return the [`TxOut`] of `outpoint` if it has been indexed, and if it corresponds to a
|
||||
/// tracked keychain.
|
||||
///
|
||||
/// The associated keychain and keychain index of the txout's spk is also returned.
|
||||
///
|
||||
/// This calls [`SpkTxOutIndex::txout`] internally.
|
||||
pub fn txout(&self, outpoint: OutPoint) -> Option<(K, u32, &TxOut)> {
|
||||
self.inner
|
||||
.txout(outpoint)
|
||||
.map(|((k, i), txo)| (k.clone(), *i, txo))
|
||||
let ((descriptor_id, index), txo) = self.inner.txout(outpoint)?;
|
||||
let keychain = self.keychain_of_desc_id(descriptor_id)?;
|
||||
Some((keychain.clone(), *index, txo))
|
||||
}
|
||||
|
||||
/// Return the script that exists under the given `keychain`'s `index`.
|
||||
///
|
||||
/// This calls [`SpkTxOutIndex::spk_at_index`] internally.
|
||||
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<&Script> {
|
||||
self.inner.spk_at_index(&(keychain, index))
|
||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain)?.0;
|
||||
self.inner.spk_at_index(&(descriptor_id, index))
|
||||
}
|
||||
|
||||
/// Returns the keychain and keychain index associated with the spk.
|
||||
///
|
||||
/// This calls [`SpkTxOutIndex::index_of_spk`] internally.
|
||||
pub fn index_of_spk(&self, script: &Script) -> Option<(K, u32)> {
|
||||
self.inner.index_of_spk(script).cloned()
|
||||
let (desc_id, last_index) = self.inner.index_of_spk(script)?;
|
||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||
Some((keychain.clone(), *last_index))
|
||||
}
|
||||
|
||||
/// Returns whether the spk under the `keychain`'s `index` has been used.
|
||||
@ -234,7 +382,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// This calls [`SpkTxOutIndex::is_used`] internally.
|
||||
pub fn is_used(&self, keychain: K, index: u32) -> bool {
|
||||
self.inner.is_used(&(keychain, index))
|
||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||
match descriptor_id {
|
||||
Some(descriptor_id) => self.inner.is_used(&(descriptor_id, index)),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output
|
||||
@ -242,7 +394,9 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// This only has an effect when the `index` had been added to `self` already and was unused.
|
||||
///
|
||||
/// Returns whether the `index` was initially present as `unused`.
|
||||
/// Returns whether the spk under the given `keychain` and `index` is successfully
|
||||
/// marked as used. Returns false either when there is no descriptor under the given
|
||||
/// keychain, or when the spk is already marked as used.
|
||||
///
|
||||
/// This is useful when you want to reserve a script pubkey for something but don't want to add
|
||||
/// the transaction output using it to the index yet. Other callers will consider `index` on
|
||||
@ -252,7 +406,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// [`unmark_used`]: Self::unmark_used
|
||||
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||
self.inner.mark_used(&(keychain, index))
|
||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||
match descriptor_id {
|
||||
Some(descriptor_id) => self.inner.mark_used(&(descriptor_id, index)),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
||||
@ -265,7 +423,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// [`mark_used`]: Self::mark_used
|
||||
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||
self.inner.unmark_used(&(keychain, index))
|
||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||
match descriptor_id {
|
||||
Some(descriptor_id) => self.inner.unmark_used(&(descriptor_id, index)),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the total value transfer effect `tx` has on the script pubkeys belonging to the
|
||||
@ -279,7 +441,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
range: impl RangeBounds<K>,
|
||||
) -> (Amount, Amount) {
|
||||
self.inner
|
||||
.sent_and_received(tx, Self::map_to_inner_bounds(range))
|
||||
.sent_and_received(tx, self.map_to_inner_bounds(range))
|
||||
}
|
||||
|
||||
/// Computes the net value that this transaction gives to the script pubkeys in the index and
|
||||
@ -290,34 +452,76 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// [`sent_and_received`]: Self::sent_and_received
|
||||
pub fn net_value(&self, tx: &Transaction, range: impl RangeBounds<K>) -> SignedAmount {
|
||||
self.inner.net_value(tx, Self::map_to_inner_bounds(range))
|
||||
self.inner.net_value(tx, self.map_to_inner_bounds(range))
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// Return a reference to the internal map of keychain to descriptors.
|
||||
pub fn keychains(&self) -> &BTreeMap<K, Descriptor<DescriptorPublicKey>> {
|
||||
&self.keychains
|
||||
/// Return the map of the keychain to descriptors.
|
||||
pub fn keychains(
|
||||
&self,
|
||||
) -> impl DoubleEndedIterator<Item = (&K, &Descriptor<DescriptorPublicKey>)> + ExactSizeIterator + '_
|
||||
{
|
||||
self.keychains_to_descriptors
|
||||
.iter()
|
||||
.map(|(k, (_, d))| (k, d))
|
||||
}
|
||||
|
||||
/// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses.
|
||||
/// Insert a descriptor with a keychain associated to it.
|
||||
///
|
||||
/// Adding a keychain means you will be able to derive new script pubkeys under that keychain
|
||||
/// Adding a descriptor means you will be able to derive new script pubkeys under it
|
||||
/// and the txout index will discover transaction outputs with those script pubkeys.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This will panic if a different `descriptor` is introduced to the same `keychain`.
|
||||
pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor<DescriptorPublicKey>) {
|
||||
let old_descriptor = &*self
|
||||
.keychains
|
||||
.entry(keychain.clone())
|
||||
.or_insert_with(|| descriptor.clone());
|
||||
assert_eq!(
|
||||
&descriptor, old_descriptor,
|
||||
"keychain already contains a different descriptor"
|
||||
);
|
||||
/// When trying to add a keychain that already existed under a different descriptor, or a descriptor
|
||||
/// that already existed with a different keychain, the old keychain (or descriptor) will be
|
||||
/// overwritten.
|
||||
pub fn insert_descriptor(
|
||||
&mut self,
|
||||
keychain: K,
|
||||
descriptor: Descriptor<DescriptorPublicKey>,
|
||||
) -> super::ChangeSet<K> {
|
||||
let mut changeset = super::ChangeSet::<K>::default();
|
||||
let desc_id = descriptor.descriptor_id();
|
||||
|
||||
let old_desc = self
|
||||
.keychains_to_descriptors
|
||||
.insert(keychain.clone(), (desc_id, descriptor.clone()));
|
||||
|
||||
if let Some((old_desc_id, _)) = old_desc {
|
||||
// nothing needs to be done if caller reinsterted the same descriptor under the same
|
||||
// keychain
|
||||
if old_desc_id == desc_id {
|
||||
return changeset;
|
||||
}
|
||||
// we should remove old descriptor that is associated with this keychain as the index
|
||||
// is designed to track one descriptor per keychain (however different keychains can
|
||||
// share the same descriptor)
|
||||
let _is_keychain_removed = self
|
||||
.descriptor_ids_to_keychain_set
|
||||
.get_mut(&old_desc_id)
|
||||
.expect("we must have already inserted this descriptor")
|
||||
.remove(&keychain);
|
||||
debug_assert!(_is_keychain_removed);
|
||||
}
|
||||
|
||||
self.descriptor_ids_to_keychain_set
|
||||
.entry(desc_id)
|
||||
.or_default()
|
||||
.insert(keychain.clone());
|
||||
self.descriptor_ids_to_descriptors
|
||||
.insert(desc_id, descriptor.clone());
|
||||
self.replenish_lookahead(&keychain, self.lookahead);
|
||||
|
||||
changeset
|
||||
.keychains_added
|
||||
.insert(keychain.clone(), descriptor);
|
||||
changeset
|
||||
}
|
||||
|
||||
/// Gets the descriptor associated with the keychain. Returns `None` if the keychain doesn't
|
||||
/// have a descriptor associated with it.
|
||||
pub fn get_descriptor(&self, keychain: &K) -> Option<&Descriptor<DescriptorPublicKey>> {
|
||||
self.keychains_to_descriptors.get(keychain).map(|(_, d)| d)
|
||||
}
|
||||
|
||||
/// Get the lookahead setting.
|
||||
@ -333,8 +537,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// This does not change the global `lookahead` setting.
|
||||
pub fn lookahead_to_target(&mut self, keychain: &K, target_index: u32) {
|
||||
let (next_index, _) = self.next_index(keychain);
|
||||
|
||||
if let Some((next_index, _)) = self.next_index(keychain) {
|
||||
let temp_lookahead = (target_index + 1)
|
||||
.checked_sub(next_index)
|
||||
.filter(|&index| index > 0);
|
||||
@ -343,53 +546,51 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
self.replenish_lookahead(keychain, temp_lookahead);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn replenish_lookahead(&mut self, keychain: &K, lookahead: u32) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let next_store_index = self.next_store_index(keychain);
|
||||
let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
|
||||
let descriptor_opt = self.keychains_to_descriptors.get(keychain).cloned();
|
||||
if let Some((descriptor_id, descriptor)) = descriptor_opt {
|
||||
let next_store_index = self.next_store_index(descriptor_id);
|
||||
let next_reveal_index = self.last_revealed.get(&descriptor_id).map_or(0, |v| *v + 1);
|
||||
|
||||
for (new_index, new_spk) in
|
||||
SpkIterator::new_with_range(descriptor, next_store_index..next_reveal_index + lookahead)
|
||||
{
|
||||
let _inserted = self
|
||||
.inner
|
||||
.insert_spk((keychain.clone(), new_index), new_spk);
|
||||
for (new_index, new_spk) in SpkIterator::new_with_range(
|
||||
descriptor,
|
||||
next_store_index..next_reveal_index + lookahead,
|
||||
) {
|
||||
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
||||
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next_store_index(&self, keychain: &K) -> u32 {
|
||||
fn next_store_index(&self, descriptor_id: DescriptorId) -> u32 {
|
||||
self.inner()
|
||||
.all_spks()
|
||||
// This range is filtering out the spks with a keychain different than
|
||||
// `keychain`. We don't use filter here as range is more optimized.
|
||||
.range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX))
|
||||
// This range is keeping only the spks with descriptor_id equal to
|
||||
// `descriptor_id`. We don't use filter here as range is more optimized.
|
||||
.range((descriptor_id, u32::MIN)..(descriptor_id, u32::MAX))
|
||||
.last()
|
||||
.map_or(0, |((_, index), _)| *index + 1)
|
||||
}
|
||||
|
||||
/// Get an unbounded spk iterator over a given `keychain`.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This will panic if the given `keychain`'s descriptor does not exist.
|
||||
pub fn unbounded_spk_iter(&self, keychain: &K) -> SpkIterator<Descriptor<DescriptorPublicKey>> {
|
||||
SpkIterator::new(
|
||||
self.keychains
|
||||
.get(keychain)
|
||||
.expect("keychain does not exist")
|
||||
.clone(),
|
||||
)
|
||||
/// Get an unbounded spk iterator over a given `keychain`. Returns `None` if the provided
|
||||
/// keychain doesn't exist
|
||||
pub fn unbounded_spk_iter(
|
||||
&self,
|
||||
keychain: &K,
|
||||
) -> Option<SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||
let descriptor = self.keychains_to_descriptors.get(keychain)?.1.clone();
|
||||
Some(SpkIterator::new(descriptor))
|
||||
}
|
||||
|
||||
/// Get unbounded spk iterators for all keychains.
|
||||
pub fn all_unbounded_spk_iters(
|
||||
&self,
|
||||
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||
self.keychains
|
||||
self.keychains_to_descriptors
|
||||
.iter()
|
||||
.map(|(k, descriptor)| (k.clone(), SpkIterator::new(descriptor.clone())))
|
||||
.map(|(k, (_, descriptor))| (k.clone(), SpkIterator::new(descriptor.clone())))
|
||||
.collect()
|
||||
}
|
||||
|
||||
@ -398,17 +599,26 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
&self,
|
||||
range: impl RangeBounds<K>,
|
||||
) -> impl DoubleEndedIterator<Item = (&K, u32, &Script)> + Clone {
|
||||
self.keychains.range(range).flat_map(|(keychain, _)| {
|
||||
let start = Bound::Included((keychain.clone(), u32::MIN));
|
||||
let end = match self.last_revealed.get(keychain) {
|
||||
Some(last_revealed) => Bound::Included((keychain.clone(), *last_revealed)),
|
||||
None => Bound::Excluded((keychain.clone(), u32::MIN)),
|
||||
self.keychains_to_descriptors
|
||||
.range(range)
|
||||
.flat_map(|(_, (descriptor_id, _))| {
|
||||
let start = Bound::Included((*descriptor_id, u32::MIN));
|
||||
let end = match self.last_revealed.get(descriptor_id) {
|
||||
Some(last_revealed) => Bound::Included((*descriptor_id, *last_revealed)),
|
||||
None => Bound::Excluded((*descriptor_id, u32::MIN)),
|
||||
};
|
||||
|
||||
self.inner
|
||||
.all_spks()
|
||||
.range((start, end))
|
||||
.map(|((keychain, i), spk)| (keychain, *i, spk.as_script()))
|
||||
.map(|((descriptor_id, i), spk)| {
|
||||
(
|
||||
self.keychain_of_desc_id(descriptor_id)
|
||||
.expect("must have keychain"),
|
||||
*i,
|
||||
spk.as_script(),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@ -423,20 +633,29 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
|
||||
/// Iterate over revealed, but unused, spks of all keychains.
|
||||
pub fn unused_spks(&self) -> impl DoubleEndedIterator<Item = (K, u32, &Script)> + Clone {
|
||||
self.keychains.keys().flat_map(|keychain| {
|
||||
self.keychains_to_descriptors.keys().flat_map(|keychain| {
|
||||
self.unused_keychain_spks(keychain)
|
||||
.map(|(i, spk)| (keychain.clone(), i, spk))
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterate over revealed, but unused, spks of the given `keychain`.
|
||||
/// Returns an empty iterator if the provided keychain doesn't exist.
|
||||
pub fn unused_keychain_spks(
|
||||
&self,
|
||||
keychain: &K,
|
||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
|
||||
let next_i = self.last_revealed.get(keychain).map_or(0, |&i| i + 1);
|
||||
let desc_id = self
|
||||
.keychains_to_descriptors
|
||||
.get(keychain)
|
||||
.map(|(desc_id, _)| *desc_id)
|
||||
// We use a dummy desc id if we can't find the real one in our map. In this way,
|
||||
// if this method was to be called with a non-existent keychain, we would return an
|
||||
// empty iterator
|
||||
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]));
|
||||
let next_i = self.last_revealed.get(&desc_id).map_or(0, |&i| i + 1);
|
||||
self.inner
|
||||
.unused_spks((keychain.clone(), u32::MIN)..(keychain.clone(), next_i))
|
||||
.unused_spks((desc_id, u32::MIN)..(desc_id, next_i))
|
||||
.map(|((_, i), spk)| (*i, spk))
|
||||
}
|
||||
|
||||
@ -451,17 +670,15 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
///
|
||||
/// Not checking the second field of the tuple may result in address reuse.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the `keychain` does not exist.
|
||||
pub fn next_index(&self, keychain: &K) -> (u32, bool) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let last_index = self.last_revealed.get(keychain).cloned();
|
||||
/// Returns None if the provided `keychain` doesn't exist.
|
||||
pub fn next_index(&self, keychain: &K) -> Option<(u32, bool)> {
|
||||
let (descriptor_id, descriptor) = self.keychains_to_descriptors.get(keychain)?;
|
||||
let last_index = self.last_revealed.get(descriptor_id).cloned();
|
||||
|
||||
// we can only get the next index if the wildcard exists.
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
|
||||
match last_index {
|
||||
Some(match last_index {
|
||||
// if there is no index, next_index is always 0.
|
||||
None => (0, true),
|
||||
// descriptors without wildcards can only have one index.
|
||||
@ -473,19 +690,27 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
Some(index) if index == BIP32_MAX_INDEX => (index, false),
|
||||
// get the next derivation index.
|
||||
Some(index) => (index + 1, true),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the last derivation index that is revealed for each keychain.
|
||||
///
|
||||
/// Keychains with no revealed indices will not be included in the returned [`BTreeMap`].
|
||||
pub fn last_revealed_indices(&self) -> &BTreeMap<K, u32> {
|
||||
&self.last_revealed
|
||||
pub fn last_revealed_indices(&self) -> BTreeMap<K, u32> {
|
||||
self.last_revealed
|
||||
.iter()
|
||||
.filter_map(|(desc_id, index)| {
|
||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||
Some((keychain.clone(), *index))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get the last derivation index revealed for `keychain`.
|
||||
/// Get the last derivation index revealed for `keychain`. Returns None if the keychain doesn't
|
||||
/// exist, or if the keychain doesn't have any revealed scripts.
|
||||
pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
|
||||
self.last_revealed.get(keychain).cloned()
|
||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||
self.last_revealed.get(&descriptor_id).cloned()
|
||||
}
|
||||
|
||||
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
||||
@ -500,16 +725,77 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
let mut spks = BTreeMap::new();
|
||||
|
||||
for (keychain, &index) in keychains {
|
||||
let (new_spks, new_changeset) = self.reveal_to_target(keychain, index);
|
||||
if let Some((new_spks, new_changeset)) = self.reveal_to_target(keychain, index) {
|
||||
if !new_changeset.is_empty() {
|
||||
spks.insert(keychain.clone(), new_spks);
|
||||
changeset.append(new_changeset.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(spks, changeset)
|
||||
}
|
||||
|
||||
/// Convenience method to call `reveal_to_target` with a descriptor_id instead of a keychain.
|
||||
/// This is useful for revealing spks of descriptors for which we don't have a keychain
|
||||
/// tracked.
|
||||
/// Refer to the `reveal_to_target` documentation for more.
|
||||
///
|
||||
/// Returns None if the provided `descriptor_id` doesn't correspond to a tracked descriptor.
|
||||
fn reveal_to_target_with_id(
|
||||
&mut self,
|
||||
descriptor_id: DescriptorId,
|
||||
target_index: u32,
|
||||
) -> Option<(
|
||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
||||
super::ChangeSet<K>,
|
||||
)> {
|
||||
let descriptor = self
|
||||
.descriptor_ids_to_descriptors
|
||||
.get(&descriptor_id)?
|
||||
.clone();
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
|
||||
let target_index = if has_wildcard { target_index } else { 0 };
|
||||
let next_reveal_index = self
|
||||
.last_revealed
|
||||
.get(&descriptor_id)
|
||||
.map_or(0, |index| *index + 1);
|
||||
|
||||
debug_assert!(next_reveal_index + self.lookahead >= self.next_store_index(descriptor_id));
|
||||
|
||||
// If the target_index is already revealed, we are done
|
||||
if next_reveal_index > target_index {
|
||||
return Some((
|
||||
SpkIterator::new_with_range(descriptor, next_reveal_index..next_reveal_index),
|
||||
super::ChangeSet::default(),
|
||||
));
|
||||
}
|
||||
|
||||
// We range over the indexes that are not stored and insert their spks in the index.
|
||||
// Indexes from next_reveal_index to next_reveal_index + lookahead are already stored (due
|
||||
// to lookahead), so we only range from next_reveal_index + lookahead to target + lookahead
|
||||
let range = next_reveal_index + self.lookahead..=target_index + self.lookahead;
|
||||
for (new_index, new_spk) in SpkIterator::new_with_range(descriptor.clone(), range) {
|
||||
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
||||
debug_assert!(_inserted, "must not have existing spk");
|
||||
debug_assert!(
|
||||
has_wildcard || new_index == 0,
|
||||
"non-wildcard descriptors must not iterate past index 0"
|
||||
);
|
||||
}
|
||||
|
||||
let _old_index = self.last_revealed.insert(descriptor_id, target_index);
|
||||
debug_assert!(_old_index < Some(target_index));
|
||||
Some((
|
||||
SpkIterator::new_with_range(descriptor, next_reveal_index..target_index + 1),
|
||||
super::ChangeSet {
|
||||
keychains_added: BTreeMap::new(),
|
||||
last_revealed: core::iter::once((descriptor_id, target_index)).collect(),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
||||
/// `target_index`.
|
||||
///
|
||||
@ -521,84 +807,46 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// [`super::ChangeSet`], which reports updates to the latest revealed index. If no new script
|
||||
/// pubkeys are revealed, then both of these will be empty.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `keychain` does not exist.
|
||||
/// Returns None if the provided `keychain` doesn't exist.
|
||||
pub fn reveal_to_target(
|
||||
&mut self,
|
||||
keychain: &K,
|
||||
target_index: u32,
|
||||
) -> (
|
||||
) -> Option<(
|
||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
||||
super::ChangeSet<K>,
|
||||
) {
|
||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
||||
let has_wildcard = descriptor.has_wildcard();
|
||||
|
||||
let target_index = if has_wildcard { target_index } else { 0 };
|
||||
let next_reveal_index = self
|
||||
.last_revealed
|
||||
.get(keychain)
|
||||
.map_or(0, |index| *index + 1);
|
||||
|
||||
debug_assert!(next_reveal_index + self.lookahead >= self.next_store_index(keychain));
|
||||
|
||||
// If the target_index is already revealed, we are done
|
||||
if next_reveal_index > target_index {
|
||||
return (
|
||||
SpkIterator::new_with_range(
|
||||
descriptor.clone(),
|
||||
next_reveal_index..next_reveal_index,
|
||||
),
|
||||
super::ChangeSet::default(),
|
||||
);
|
||||
}
|
||||
|
||||
// We range over the indexes that are not stored and insert their spks in the index.
|
||||
// Indexes from next_reveal_index to next_reveal_index + lookahead are already stored (due
|
||||
// to lookahead), so we only range from next_reveal_index + lookahead to target + lookahead
|
||||
let range = next_reveal_index + self.lookahead..=target_index + self.lookahead;
|
||||
for (new_index, new_spk) in SpkIterator::new_with_range(descriptor, range) {
|
||||
let _inserted = self
|
||||
.inner
|
||||
.insert_spk((keychain.clone(), new_index), new_spk);
|
||||
debug_assert!(_inserted, "must not have existing spk");
|
||||
debug_assert!(
|
||||
has_wildcard || new_index == 0,
|
||||
"non-wildcard descriptors must not iterate past index 0"
|
||||
);
|
||||
}
|
||||
|
||||
let _old_index = self.last_revealed.insert(keychain.clone(), target_index);
|
||||
debug_assert!(_old_index < Some(target_index));
|
||||
(
|
||||
SpkIterator::new_with_range(descriptor.clone(), next_reveal_index..target_index + 1),
|
||||
super::ChangeSet(core::iter::once((keychain.clone(), target_index)).collect()),
|
||||
)
|
||||
)> {
|
||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||
self.reveal_to_target_with_id(descriptor_id, target_index)
|
||||
}
|
||||
|
||||
/// Attempts to reveal the next script pubkey for `keychain`.
|
||||
///
|
||||
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
||||
/// [`super::ChangeSet`] which represents changes in the last revealed index (if any).
|
||||
/// Returns None if the provided keychain doesn't exist.
|
||||
///
|
||||
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
||||
/// [`super::ChangeSet`]. There are two scenarios when a new script pubkey cannot be derived:
|
||||
///
|
||||
/// 1. The descriptor has no wildcard and already has one script revealed.
|
||||
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the `keychain` does not exist.
|
||||
pub fn reveal_next_spk(&mut self, keychain: &K) -> ((u32, &Script), super::ChangeSet<K>) {
|
||||
let (next_index, _) = self.next_index(keychain);
|
||||
let changeset = self.reveal_to_target(keychain, next_index).1;
|
||||
/// 3. There is no descriptor associated with the given keychain.
|
||||
pub fn reveal_next_spk(
|
||||
&mut self,
|
||||
keychain: &K,
|
||||
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||
let (next_index, _) = self.next_index(keychain).expect("We know keychain exists");
|
||||
let changeset = self
|
||||
.reveal_to_target(keychain, next_index)
|
||||
.expect("We know keychain exists")
|
||||
.1;
|
||||
let script = self
|
||||
.inner
|
||||
.spk_at_index(&(keychain.clone(), next_index))
|
||||
.spk_at_index(&(descriptor_id, next_index))
|
||||
.expect("script must already be stored");
|
||||
((next_index, script), changeset)
|
||||
Some(((next_index, script), changeset))
|
||||
}
|
||||
|
||||
/// Gets the next unused script pubkey in the keychain. I.e., the script pubkey with the lowest
|
||||
@ -610,21 +858,22 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// has used all scripts up to the derivation bounds, then the last derived script pubkey will be
|
||||
/// returned.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `keychain` has never been added to the index
|
||||
pub fn next_unused_spk(&mut self, keychain: &K) -> ((u32, &Script), super::ChangeSet<K>) {
|
||||
/// Returns None if the provided keychain doesn't exist.
|
||||
pub fn next_unused_spk(
|
||||
&mut self,
|
||||
keychain: &K,
|
||||
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
||||
let need_new = self.unused_keychain_spks(keychain).next().is_none();
|
||||
// this rather strange branch is needed because of some lifetime issues
|
||||
if need_new {
|
||||
self.reveal_next_spk(keychain)
|
||||
} else {
|
||||
(
|
||||
Some((
|
||||
self.unused_keychain_spks(keychain)
|
||||
.next()
|
||||
.expect("we already know next exists"),
|
||||
super::ChangeSet::default(),
|
||||
)
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@ -643,21 +892,35 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
&'a self,
|
||||
range: impl RangeBounds<K> + 'a,
|
||||
) -> impl DoubleEndedIterator<Item = (&'a K, u32, OutPoint)> + 'a {
|
||||
let bounds = Self::map_to_inner_bounds(range);
|
||||
let bounds = self.map_to_inner_bounds(range);
|
||||
self.inner
|
||||
.outputs_in_range(bounds)
|
||||
.map(move |((keychain, i), op)| (keychain, *i, op))
|
||||
.map(move |((desc_id, i), op)| {
|
||||
let keychain = self
|
||||
.keychain_of_desc_id(desc_id)
|
||||
.expect("keychain must exist");
|
||||
(keychain, *i, op)
|
||||
})
|
||||
}
|
||||
|
||||
fn map_to_inner_bounds(bound: impl RangeBounds<K>) -> impl RangeBounds<(K, u32)> {
|
||||
fn map_to_inner_bounds(
|
||||
&self,
|
||||
bound: impl RangeBounds<K>,
|
||||
) -> impl RangeBounds<(DescriptorId, u32)> {
|
||||
let get_desc_id = |keychain| {
|
||||
self.keychains_to_descriptors
|
||||
.get(keychain)
|
||||
.map(|(desc_id, _)| *desc_id)
|
||||
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]))
|
||||
};
|
||||
let start = match bound.start_bound() {
|
||||
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MIN)),
|
||||
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MAX)),
|
||||
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MIN)),
|
||||
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MAX)),
|
||||
Bound::Unbounded => Bound::Unbounded,
|
||||
};
|
||||
let end = match bound.end_bound() {
|
||||
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MAX)),
|
||||
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MIN)),
|
||||
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MAX)),
|
||||
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MIN)),
|
||||
Bound::Unbounded => Bound::Unbounded,
|
||||
};
|
||||
|
||||
@ -673,7 +936,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
||||
/// a [`TxOut`] with it's script pubkey.
|
||||
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
||||
self.keychains
|
||||
self.keychains_to_descriptors
|
||||
.iter()
|
||||
.filter_map(|(keychain, _)| {
|
||||
self.last_used_index(keychain)
|
||||
@ -682,9 +945,27 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Applies the derivation changeset to the [`KeychainTxOutIndex`], extending the number of
|
||||
/// derived scripts per keychain, as specified in the `changeset`.
|
||||
/// Applies the derivation changeset to the [`KeychainTxOutIndex`], as specified in the
|
||||
/// [`ChangeSet::append`] documentation:
|
||||
/// - Extends the number of derived scripts per keychain
|
||||
/// - Adds new descriptors introduced
|
||||
/// - If a descriptor is introduced for a keychain that already had a descriptor, overwrites
|
||||
/// the old descriptor
|
||||
pub fn apply_changeset(&mut self, changeset: super::ChangeSet<K>) {
|
||||
let _ = self.reveal_to_target_multi(&changeset.0);
|
||||
let ChangeSet {
|
||||
keychains_added,
|
||||
last_revealed,
|
||||
} = changeset;
|
||||
for (keychain, descriptor) in keychains_added {
|
||||
let _ = self.insert_descriptor(keychain, descriptor);
|
||||
}
|
||||
let last_revealed = last_revealed
|
||||
.into_iter()
|
||||
.filter_map(|(desc_id, index)| {
|
||||
let keychain = self.keychain_of_desc_id(&desc_id)?;
|
||||
Some((keychain.clone(), index))
|
||||
})
|
||||
.collect();
|
||||
let _ = self.reveal_to_target_multi(&last_revealed);
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ pub use miniscript;
|
||||
#[cfg(feature = "miniscript")]
|
||||
mod descriptor_ext;
|
||||
#[cfg(feature = "miniscript")]
|
||||
pub use descriptor_ext::DescriptorExt;
|
||||
pub use descriptor_ext::{DescriptorExt, DescriptorId};
|
||||
#[cfg(feature = "miniscript")]
|
||||
mod spk_iter;
|
||||
#[cfg(feature = "miniscript")]
|
||||
|
@ -158,8 +158,8 @@ mod test {
|
||||
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||
|
||||
txout_index.add_keychain(TestKeychain::External, external_descriptor.clone());
|
||||
txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone());
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::External, external_descriptor.clone());
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::Internal, internal_descriptor.clone());
|
||||
|
||||
(txout_index, external_descriptor, internal_descriptor)
|
||||
}
|
||||
@ -258,18 +258,10 @@ mod test {
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
// The following dummy traits were created to test if SpkIterator is working properly.
|
||||
#[allow(unused)]
|
||||
trait TestSendStatic: Send + 'static {
|
||||
fn test(&self) -> u32 {
|
||||
20
|
||||
}
|
||||
}
|
||||
|
||||
impl TestSendStatic for SpkIterator<Descriptor<DescriptorPublicKey>> {
|
||||
fn test(&self) -> u32 {
|
||||
20
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn spk_iterator_is_send_and_static() {
|
||||
fn is_send_and_static<A: Send + 'static>() {}
|
||||
is_send_and_static::<SpkIterator<Descriptor<DescriptorPublicKey>>>()
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
#![cfg(feature = "miniscript")]
|
||||
|
||||
mod tx_template;
|
||||
#[allow(unused_imports)]
|
||||
pub use tx_template::*;
|
||||
@ -73,3 +75,15 @@ pub fn new_tx(lt: u32) -> bitcoin::Transaction {
|
||||
output: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub const DESCRIPTORS: [&str; 7] = [
|
||||
"tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)",
|
||||
"tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)",
|
||||
"wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0/*)",
|
||||
"tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/0/*)",
|
||||
"tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/1/*)",
|
||||
"wpkh(xprv9s21ZrQH143K4EXURwMHuLS469fFzZyXk7UUpdKfQwhoHcAiYTakpe8pMU2RiEdvrU9McyuE7YDoKcXkoAwEGoK53WBDnKKv2zZbb9BzttX/1/0/*)",
|
||||
// non-wildcard
|
||||
"wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)",
|
||||
];
|
||||
|
@ -1,3 +1,5 @@
|
||||
#![cfg(feature = "miniscript")]
|
||||
|
||||
use rand::distributions::{Alphanumeric, DistString};
|
||||
use std::collections::HashMap;
|
||||
|
||||
@ -52,7 +54,8 @@ impl TxOutTemplate {
|
||||
pub fn init_graph<'a, A: Anchor + Clone + 'a>(
|
||||
tx_templates: impl IntoIterator<Item = &'a TxTemplate<'a, A>>,
|
||||
) -> (TxGraph<A>, SpkTxOutIndex<u32>, HashMap<&'a str, Txid>) {
|
||||
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), "tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/0/*)").unwrap();
|
||||
let (descriptor, _) =
|
||||
Descriptor::parse_descriptor(&Secp256k1::signing_only(), super::DESCRIPTORS[2]).unwrap();
|
||||
let mut graph = TxGraph::<A>::default();
|
||||
let mut spk_index = SpkTxOutIndex::default();
|
||||
(0..10).for_each(|index| {
|
||||
|
@ -1,13 +1,16 @@
|
||||
#![cfg(feature = "miniscript")]
|
||||
|
||||
#[macro_use]
|
||||
mod common;
|
||||
|
||||
use std::{collections::BTreeSet, sync::Arc};
|
||||
|
||||
use crate::common::DESCRIPTORS;
|
||||
use bdk_chain::{
|
||||
indexed_tx_graph::{self, IndexedTxGraph},
|
||||
keychain::{self, Balance, KeychainTxOutIndex},
|
||||
local_chain::LocalChain,
|
||||
tx_graph, ChainPosition, ConfirmationHeightAnchor,
|
||||
tx_graph, ChainPosition, ConfirmationHeightAnchor, DescriptorExt,
|
||||
};
|
||||
use bitcoin::{
|
||||
secp256k1::Secp256k1, Amount, OutPoint, Script, ScriptBuf, Transaction, TxIn, TxOut,
|
||||
@ -23,8 +26,7 @@ use miniscript::Descriptor;
|
||||
/// agnostic.
|
||||
#[test]
|
||||
fn insert_relevant_txs() {
|
||||
const DESCRIPTOR: &str = "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)";
|
||||
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), DESCRIPTOR)
|
||||
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), DESCRIPTORS[0])
|
||||
.expect("must be valid");
|
||||
let spk_0 = descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
||||
let spk_1 = descriptor.at_derivation_index(9).unwrap().script_pubkey();
|
||||
@ -32,7 +34,7 @@ fn insert_relevant_txs() {
|
||||
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<()>>::new(
|
||||
KeychainTxOutIndex::new(10),
|
||||
);
|
||||
graph.index.add_keychain((), descriptor);
|
||||
let _ = graph.index.insert_descriptor((), descriptor.clone());
|
||||
|
||||
let tx_a = Transaction {
|
||||
output: vec![
|
||||
@ -71,7 +73,10 @@ fn insert_relevant_txs() {
|
||||
txs: txs.iter().cloned().map(Arc::new).collect(),
|
||||
..Default::default()
|
||||
},
|
||||
indexer: keychain::ChangeSet([((), 9_u32)].into()),
|
||||
indexer: keychain::ChangeSet {
|
||||
last_revealed: [(descriptor.descriptor_id(), 9_u32)].into(),
|
||||
keychains_added: [].into(),
|
||||
},
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
@ -79,7 +84,16 @@ fn insert_relevant_txs() {
|
||||
changeset,
|
||||
);
|
||||
|
||||
assert_eq!(graph.initial_changeset(), changeset,);
|
||||
// The initial changeset will also contain info about the keychain we added
|
||||
let initial_changeset = indexed_tx_graph::ChangeSet {
|
||||
graph: changeset.graph,
|
||||
indexer: keychain::ChangeSet {
|
||||
last_revealed: changeset.indexer.last_revealed,
|
||||
keychains_added: [((), descriptor)].into(),
|
||||
},
|
||||
};
|
||||
|
||||
assert_eq!(graph.initial_changeset(), initial_changeset);
|
||||
}
|
||||
|
||||
/// Ensure consistency IndexedTxGraph list_* and balance methods. These methods lists
|
||||
@ -117,15 +131,17 @@ fn test_list_owned_txouts() {
|
||||
|
||||
// Initiate IndexedTxGraph
|
||||
|
||||
let (desc_1, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), "tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/0/*)").unwrap();
|
||||
let (desc_2, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), "tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/1/*)").unwrap();
|
||||
let (desc_1, _) =
|
||||
Descriptor::parse_descriptor(&Secp256k1::signing_only(), common::DESCRIPTORS[2]).unwrap();
|
||||
let (desc_2, _) =
|
||||
Descriptor::parse_descriptor(&Secp256k1::signing_only(), common::DESCRIPTORS[3]).unwrap();
|
||||
|
||||
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<String>>::new(
|
||||
KeychainTxOutIndex::new(10),
|
||||
);
|
||||
|
||||
graph.index.add_keychain("keychain_1".into(), desc_1);
|
||||
graph.index.add_keychain("keychain_2".into(), desc_2);
|
||||
let _ = graph.index.insert_descriptor("keychain_1".into(), desc_1);
|
||||
let _ = graph.index.insert_descriptor("keychain_2".into(), desc_2);
|
||||
|
||||
// Get trusted and untrusted addresses
|
||||
|
||||
@ -135,14 +151,20 @@ fn test_list_owned_txouts() {
|
||||
{
|
||||
// we need to scope here to take immutanble reference of the graph
|
||||
for _ in 0..10 {
|
||||
let ((_, script), _) = graph.index.reveal_next_spk(&"keychain_1".to_string());
|
||||
let ((_, script), _) = graph
|
||||
.index
|
||||
.reveal_next_spk(&"keychain_1".to_string())
|
||||
.unwrap();
|
||||
// TODO Assert indexes
|
||||
trusted_spks.push(script.to_owned());
|
||||
}
|
||||
}
|
||||
{
|
||||
for _ in 0..10 {
|
||||
let ((_, script), _) = graph.index.reveal_next_spk(&"keychain_2".to_string());
|
||||
let ((_, script), _) = graph
|
||||
.index
|
||||
.reveal_next_spk(&"keychain_2".to_string())
|
||||
.unwrap();
|
||||
untrusted_spks.push(script.to_owned());
|
||||
}
|
||||
}
|
||||
@ -235,26 +257,18 @@ fn test_list_owned_txouts() {
|
||||
.unwrap_or_else(|| panic!("block must exist at {}", height));
|
||||
let txouts = graph
|
||||
.graph()
|
||||
.filter_chain_txouts(
|
||||
&local_chain,
|
||||
chain_tip,
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
)
|
||||
.filter_chain_txouts(&local_chain, chain_tip, graph.index.outpoints())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let utxos = graph
|
||||
.graph()
|
||||
.filter_chain_unspents(
|
||||
&local_chain,
|
||||
chain_tip,
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
)
|
||||
.filter_chain_unspents(&local_chain, chain_tip, graph.index.outpoints())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let balance = graph.graph().balance(
|
||||
&local_chain,
|
||||
chain_tip,
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
graph.index.outpoints(),
|
||||
|_, spk: &Script| trusted_spks.contains(&spk.to_owned()),
|
||||
);
|
||||
|
||||
|
@ -5,36 +5,39 @@ mod common;
|
||||
use bdk_chain::{
|
||||
collections::BTreeMap,
|
||||
indexed_tx_graph::Indexer,
|
||||
keychain::{self, KeychainTxOutIndex},
|
||||
Append,
|
||||
keychain::{self, ChangeSet, KeychainTxOutIndex},
|
||||
Append, DescriptorExt, DescriptorId,
|
||||
};
|
||||
|
||||
use bitcoin::{secp256k1::Secp256k1, Amount, OutPoint, ScriptBuf, Transaction, TxOut};
|
||||
use miniscript::{Descriptor, DescriptorPublicKey};
|
||||
|
||||
use crate::common::DESCRIPTORS;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
||||
enum TestKeychain {
|
||||
External,
|
||||
Internal,
|
||||
}
|
||||
|
||||
fn parse_descriptor(descriptor: &str) -> Descriptor<DescriptorPublicKey> {
|
||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, descriptor)
|
||||
.unwrap()
|
||||
.0
|
||||
}
|
||||
|
||||
fn init_txout_index(
|
||||
external_descriptor: Descriptor<DescriptorPublicKey>,
|
||||
internal_descriptor: Descriptor<DescriptorPublicKey>,
|
||||
lookahead: u32,
|
||||
) -> (
|
||||
bdk_chain::keychain::KeychainTxOutIndex<TestKeychain>,
|
||||
Descriptor<DescriptorPublicKey>,
|
||||
Descriptor<DescriptorPublicKey>,
|
||||
) {
|
||||
) -> bdk_chain::keychain::KeychainTxOutIndex<TestKeychain> {
|
||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<TestKeychain>::new(lookahead);
|
||||
|
||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::External, external_descriptor);
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::Internal, internal_descriptor);
|
||||
|
||||
txout_index.add_keychain(TestKeychain::External, external_descriptor.clone());
|
||||
txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone());
|
||||
|
||||
(txout_index, external_descriptor, internal_descriptor)
|
||||
txout_index
|
||||
}
|
||||
|
||||
fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> ScriptBuf {
|
||||
@ -44,29 +47,136 @@ fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> Scr
|
||||
.script_pubkey()
|
||||
}
|
||||
|
||||
// We create two empty changesets lhs and rhs, we then insert various descriptors with various
|
||||
// last_revealed, append rhs to lhs, and check that the result is consistent with these rules:
|
||||
// - Existing index doesn't update if the new index in `other` is lower than `self`.
|
||||
// - Existing index updates if the new index in `other` is higher than `self`.
|
||||
// - Existing index is unchanged if keychain doesn't exist in `other`.
|
||||
// - New keychain gets added if the keychain is in `other` but not in `self`.
|
||||
#[test]
|
||||
fn append_changesets_check_last_revealed() {
|
||||
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let descriptor_ids: Vec<_> = DESCRIPTORS
|
||||
.iter()
|
||||
.take(4)
|
||||
.map(|d| {
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, d)
|
||||
.unwrap()
|
||||
.0
|
||||
.descriptor_id()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut lhs_di = BTreeMap::<DescriptorId, u32>::default();
|
||||
let mut rhs_di = BTreeMap::<DescriptorId, u32>::default();
|
||||
lhs_di.insert(descriptor_ids[0], 7);
|
||||
lhs_di.insert(descriptor_ids[1], 0);
|
||||
lhs_di.insert(descriptor_ids[2], 3);
|
||||
|
||||
rhs_di.insert(descriptor_ids[0], 3); // value less than lhs desc 0
|
||||
rhs_di.insert(descriptor_ids[1], 5); // value more than lhs desc 1
|
||||
lhs_di.insert(descriptor_ids[3], 4); // key doesn't exist in lhs
|
||||
|
||||
let mut lhs = ChangeSet {
|
||||
keychains_added: BTreeMap::<(), _>::new(),
|
||||
last_revealed: lhs_di,
|
||||
};
|
||||
let rhs = ChangeSet {
|
||||
keychains_added: BTreeMap::<(), _>::new(),
|
||||
last_revealed: rhs_di,
|
||||
};
|
||||
lhs.append(rhs);
|
||||
|
||||
// Existing index doesn't update if the new index in `other` is lower than `self`.
|
||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[0]), Some(&7));
|
||||
// Existing index updates if the new index in `other` is higher than `self`.
|
||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[1]), Some(&5));
|
||||
// Existing index is unchanged if keychain doesn't exist in `other`.
|
||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[2]), Some(&3));
|
||||
// New keychain gets added if the keychain is in `other` but not in `self`.
|
||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[3]), Some(&4));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_changeset_with_different_descriptors_to_same_keychain() {
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index =
|
||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||
assert_eq!(
|
||||
txout_index.keychains().collect::<Vec<_>>(),
|
||||
vec![
|
||||
(&TestKeychain::External, &external_descriptor),
|
||||
(&TestKeychain::Internal, &internal_descriptor)
|
||||
]
|
||||
);
|
||||
|
||||
let changeset = ChangeSet {
|
||||
keychains_added: [(TestKeychain::External, internal_descriptor.clone())].into(),
|
||||
last_revealed: [].into(),
|
||||
};
|
||||
txout_index.apply_changeset(changeset);
|
||||
|
||||
assert_eq!(
|
||||
txout_index.keychains().collect::<Vec<_>>(),
|
||||
vec![
|
||||
(&TestKeychain::External, &internal_descriptor),
|
||||
(&TestKeychain::Internal, &internal_descriptor)
|
||||
]
|
||||
);
|
||||
|
||||
let changeset = ChangeSet {
|
||||
keychains_added: [(TestKeychain::Internal, external_descriptor.clone())].into(),
|
||||
last_revealed: [].into(),
|
||||
};
|
||||
txout_index.apply_changeset(changeset);
|
||||
|
||||
assert_eq!(
|
||||
txout_index.keychains().collect::<Vec<_>>(),
|
||||
vec![
|
||||
(&TestKeychain::External, &internal_descriptor),
|
||||
(&TestKeychain::Internal, &external_descriptor)
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_all_derivation_indices() {
|
||||
use bdk_chain::indexed_tx_graph::Indexer;
|
||||
|
||||
let (mut txout_index, _, _) = init_txout_index(0);
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index =
|
||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||
let derive_to: BTreeMap<_, _> =
|
||||
[(TestKeychain::External, 12), (TestKeychain::Internal, 24)].into();
|
||||
let last_revealed: BTreeMap<_, _> = [
|
||||
(external_descriptor.descriptor_id(), 12),
|
||||
(internal_descriptor.descriptor_id(), 24),
|
||||
]
|
||||
.into();
|
||||
assert_eq!(
|
||||
txout_index.reveal_to_target_multi(&derive_to).1.as_inner(),
|
||||
&derive_to
|
||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
||||
ChangeSet {
|
||||
keychains_added: BTreeMap::new(),
|
||||
last_revealed: last_revealed.clone()
|
||||
}
|
||||
);
|
||||
assert_eq!(txout_index.last_revealed_indices(), &derive_to);
|
||||
assert_eq!(txout_index.last_revealed_indices(), derive_to);
|
||||
assert_eq!(
|
||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
||||
keychain::ChangeSet::default(),
|
||||
"no changes if we set to the same thing"
|
||||
);
|
||||
assert_eq!(txout_index.initial_changeset().as_inner(), &derive_to);
|
||||
assert_eq!(txout_index.initial_changeset().last_revealed, last_revealed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lookahead() {
|
||||
let (mut txout_index, external_desc, internal_desc) = init_txout_index(10);
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index =
|
||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 10);
|
||||
|
||||
// given:
|
||||
// - external lookahead set to 10
|
||||
@ -76,15 +186,16 @@ fn test_lookahead() {
|
||||
// - scripts cached in spk_txout_index should increase correctly
|
||||
// - stored scripts of external keychain should be of expected counts
|
||||
for index in (0..20).skip_while(|i| i % 2 == 1) {
|
||||
let (revealed_spks, revealed_changeset) =
|
||||
txout_index.reveal_to_target(&TestKeychain::External, index);
|
||||
let (revealed_spks, revealed_changeset) = txout_index
|
||||
.reveal_to_target(&TestKeychain::External, index)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
revealed_spks.collect::<Vec<_>>(),
|
||||
vec![(index, spk_at_index(&external_desc, index))],
|
||||
vec![(index, spk_at_index(&external_descriptor, index))],
|
||||
);
|
||||
assert_eq!(
|
||||
revealed_changeset.as_inner(),
|
||||
&[(TestKeychain::External, index)].into()
|
||||
&revealed_changeset.last_revealed,
|
||||
&[(external_descriptor.descriptor_id(), index)].into()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
@ -126,17 +237,18 @@ fn test_lookahead() {
|
||||
// - derivation index is set ahead of current derivation index + lookahead
|
||||
// expect:
|
||||
// - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped
|
||||
let (revealed_spks, revealed_changeset) =
|
||||
txout_index.reveal_to_target(&TestKeychain::Internal, 24);
|
||||
let (revealed_spks, revealed_changeset) = txout_index
|
||||
.reveal_to_target(&TestKeychain::Internal, 24)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
revealed_spks.collect::<Vec<_>>(),
|
||||
(0..=24)
|
||||
.map(|index| (index, spk_at_index(&internal_desc, index)))
|
||||
.map(|index| (index, spk_at_index(&internal_descriptor, index)))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(
|
||||
revealed_changeset.as_inner(),
|
||||
&[(TestKeychain::Internal, 24)].into()
|
||||
&revealed_changeset.last_revealed,
|
||||
&[(internal_descriptor.descriptor_id(), 24)].into()
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.inner().all_spks().len(),
|
||||
@ -172,14 +284,14 @@ fn test_lookahead() {
|
||||
let tx = Transaction {
|
||||
output: vec![
|
||||
TxOut {
|
||||
script_pubkey: external_desc
|
||||
script_pubkey: external_descriptor
|
||||
.at_derivation_index(external_index)
|
||||
.unwrap()
|
||||
.script_pubkey(),
|
||||
value: Amount::from_sat(10_000),
|
||||
},
|
||||
TxOut {
|
||||
script_pubkey: internal_desc
|
||||
script_pubkey: internal_descriptor
|
||||
.at_derivation_index(internal_index)
|
||||
.unwrap()
|
||||
.script_pubkey(),
|
||||
@ -219,14 +331,17 @@ fn test_lookahead() {
|
||||
// - last used index should change as expected
|
||||
#[test]
|
||||
fn test_scan_with_lookahead() {
|
||||
let (mut txout_index, external_desc, _) = init_txout_index(10);
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index =
|
||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 10);
|
||||
|
||||
let spks: BTreeMap<u32, ScriptBuf> = [0, 10, 20, 30]
|
||||
.into_iter()
|
||||
.map(|i| {
|
||||
(
|
||||
i,
|
||||
external_desc
|
||||
external_descriptor
|
||||
.at_derivation_index(i)
|
||||
.unwrap()
|
||||
.script_pubkey(),
|
||||
@ -243,8 +358,8 @@ fn test_scan_with_lookahead() {
|
||||
|
||||
let changeset = txout_index.index_txout(op, &txout);
|
||||
assert_eq!(
|
||||
changeset.as_inner(),
|
||||
&[(TestKeychain::External, spk_i)].into()
|
||||
&changeset.last_revealed,
|
||||
&[(external_descriptor.descriptor_id(), spk_i)].into()
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.last_revealed_index(&TestKeychain::External),
|
||||
@ -257,7 +372,7 @@ fn test_scan_with_lookahead() {
|
||||
}
|
||||
|
||||
// now try with index 41 (lookahead surpassed), we expect that the txout to not be indexed
|
||||
let spk_41 = external_desc
|
||||
let spk_41 = external_descriptor
|
||||
.at_derivation_index(41)
|
||||
.unwrap()
|
||||
.script_pubkey();
|
||||
@ -273,11 +388,13 @@ fn test_scan_with_lookahead() {
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn test_wildcard_derivations() {
|
||||
let (mut txout_index, external_desc, _) = init_txout_index(0);
|
||||
let external_spk_0 = external_desc.at_derivation_index(0).unwrap().script_pubkey();
|
||||
let external_spk_16 = external_desc.at_derivation_index(16).unwrap().script_pubkey();
|
||||
let external_spk_26 = external_desc.at_derivation_index(26).unwrap().script_pubkey();
|
||||
let external_spk_27 = external_desc.at_derivation_index(27).unwrap().script_pubkey();
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut txout_index = init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||
let external_spk_0 = external_descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
||||
let external_spk_16 = external_descriptor.at_derivation_index(16).unwrap().script_pubkey();
|
||||
let external_spk_26 = external_descriptor.at_derivation_index(26).unwrap().script_pubkey();
|
||||
let external_spk_27 = external_descriptor.at_derivation_index(27).unwrap().script_pubkey();
|
||||
|
||||
// - nothing is derived
|
||||
// - unused list is also empty
|
||||
@ -285,13 +402,13 @@ fn test_wildcard_derivations() {
|
||||
// - next_derivation_index() == (0, true)
|
||||
// - derive_new() == ((0, <spk>), keychain::ChangeSet)
|
||||
// - next_unused() == ((0, <spk>), keychain::ChangeSet:is_empty())
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External).unwrap(), (0, true));
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 0)].into());
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
|
||||
// - derived till 25
|
||||
// - used all spks till 15.
|
||||
@ -307,16 +424,16 @@ fn test_wildcard_derivations() {
|
||||
.chain([17, 20, 23])
|
||||
.for_each(|index| assert!(txout_index.mark_used(TestKeychain::External, index)));
|
||||
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true));
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External).unwrap(), (26, true));
|
||||
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (26, external_spk_26.as_script()));
|
||||
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 26)].into());
|
||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 26)].into());
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (16, external_spk_16.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
|
||||
// - Use all the derived till 26.
|
||||
// - next_unused() = ((27, <spk>), keychain::ChangeSet)
|
||||
@ -324,9 +441,9 @@ fn test_wildcard_derivations() {
|
||||
txout_index.mark_used(TestKeychain::External, index);
|
||||
});
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||
assert_eq!(spk, (27, external_spk_27.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 27)].into());
|
||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 27)].into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -334,13 +451,14 @@ fn test_non_wildcard_derivations() {
|
||||
let mut txout_index = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||
|
||||
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let (no_wildcard_descriptor, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)").unwrap();
|
||||
let (no_wildcard_descriptor, _) =
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[6]).unwrap();
|
||||
let external_spk = no_wildcard_descriptor
|
||||
.at_derivation_index(0)
|
||||
.unwrap()
|
||||
.script_pubkey();
|
||||
|
||||
txout_index.add_keychain(TestKeychain::External, no_wildcard_descriptor);
|
||||
let _ = txout_index.insert_descriptor(TestKeychain::External, no_wildcard_descriptor.clone());
|
||||
|
||||
// given:
|
||||
// - `txout_index` with no stored scripts
|
||||
@ -348,14 +466,24 @@ fn test_non_wildcard_derivations() {
|
||||
// - next derivation index should be new
|
||||
// - when we derive a new script, script @ index 0
|
||||
// - when we get the next unused script, script @ index 0
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
assert_eq!(
|
||||
txout_index.next_index(&TestKeychain::External).unwrap(),
|
||||
(0, true)
|
||||
);
|
||||
let (spk, changeset) = txout_index
|
||||
.reveal_next_spk(&TestKeychain::External)
|
||||
.unwrap();
|
||||
assert_eq!(spk, (0, external_spk.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
|
||||
assert_eq!(
|
||||
&changeset.last_revealed,
|
||||
&[(no_wildcard_descriptor.descriptor_id(), 0)].into()
|
||||
);
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index
|
||||
.next_unused_spk(&TestKeychain::External)
|
||||
.unwrap();
|
||||
assert_eq!(spk, (0, external_spk.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
|
||||
// given:
|
||||
// - the non-wildcard descriptor already has a stored and used script
|
||||
@ -363,18 +491,26 @@ fn test_non_wildcard_derivations() {
|
||||
// - next derivation index should not be new
|
||||
// - derive new and next unused should return the old script
|
||||
// - store_up_to should not panic and return empty changeset
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, false));
|
||||
assert_eq!(
|
||||
txout_index.next_index(&TestKeychain::External).unwrap(),
|
||||
(0, false)
|
||||
);
|
||||
txout_index.mark_used(TestKeychain::External, 0);
|
||||
|
||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index
|
||||
.reveal_next_spk(&TestKeychain::External)
|
||||
.unwrap();
|
||||
assert_eq!(spk, (0, external_spk.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
|
||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
||||
let (spk, changeset) = txout_index
|
||||
.next_unused_spk(&TestKeychain::External)
|
||||
.unwrap();
|
||||
assert_eq!(spk, (0, external_spk.as_script()));
|
||||
assert_eq!(changeset.as_inner(), &[].into());
|
||||
let (revealed_spks, revealed_changeset) =
|
||||
txout_index.reveal_to_target(&TestKeychain::External, 200);
|
||||
assert_eq!(&changeset.last_revealed, &[].into());
|
||||
let (revealed_spks, revealed_changeset) = txout_index
|
||||
.reveal_to_target(&TestKeychain::External, 200)
|
||||
.unwrap();
|
||||
assert_eq!(revealed_spks.count(), 0);
|
||||
assert!(revealed_changeset.is_empty());
|
||||
|
||||
@ -438,7 +574,13 @@ fn lookahead_to_target() {
|
||||
];
|
||||
|
||||
for t in test_cases {
|
||||
let (mut index, _, _) = init_txout_index(t.lookahead);
|
||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||
let mut index = init_txout_index(
|
||||
external_descriptor.clone(),
|
||||
internal_descriptor.clone(),
|
||||
t.lookahead,
|
||||
);
|
||||
|
||||
if let Some(last_revealed) = t.external_last_revealed {
|
||||
let _ = index.reveal_to_target(&TestKeychain::External, last_revealed);
|
||||
@ -449,17 +591,19 @@ fn lookahead_to_target() {
|
||||
|
||||
let keychain_test_cases = [
|
||||
(
|
||||
external_descriptor.descriptor_id(),
|
||||
TestKeychain::External,
|
||||
t.external_last_revealed,
|
||||
t.external_target,
|
||||
),
|
||||
(
|
||||
internal_descriptor.descriptor_id(),
|
||||
TestKeychain::Internal,
|
||||
t.internal_last_revealed,
|
||||
t.internal_target,
|
||||
),
|
||||
];
|
||||
for (keychain, last_revealed, target) in keychain_test_cases {
|
||||
for (descriptor_id, keychain, last_revealed, target) in keychain_test_cases {
|
||||
if let Some(target) = target {
|
||||
let original_last_stored_index = match last_revealed {
|
||||
Some(last_revealed) => Some(last_revealed + t.lookahead),
|
||||
@ -475,10 +619,10 @@ fn lookahead_to_target() {
|
||||
let keys = index
|
||||
.inner()
|
||||
.all_spks()
|
||||
.range((keychain.clone(), 0)..=(keychain.clone(), u32::MAX))
|
||||
.map(|(k, _)| k.clone())
|
||||
.range((descriptor_id, 0)..=(descriptor_id, u32::MAX))
|
||||
.map(|(k, _)| *k)
|
||||
.collect::<Vec<_>>();
|
||||
let exp_keys = core::iter::repeat(keychain)
|
||||
let exp_keys = core::iter::repeat(descriptor_id)
|
||||
.zip(0_u32..=exp_last_stored_index)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(keys, exp_keys);
|
||||
@ -486,3 +630,150 @@ fn lookahead_to_target() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `::index_txout` should still index txouts with spks derived from descriptors without keychains.
|
||||
/// This includes properly refilling the lookahead for said descriptors.
|
||||
#[test]
|
||||
fn index_txout_after_changing_descriptor_under_keychain() {
|
||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||
let (desc_a, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0])
|
||||
.expect("descriptor 0 must be valid");
|
||||
let (desc_b, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[1])
|
||||
.expect("descriptor 1 must be valid");
|
||||
let desc_id_a = desc_a.descriptor_id();
|
||||
|
||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<()>::new(10);
|
||||
|
||||
// Introduce `desc_a` under keychain `()` and replace the descriptor.
|
||||
let _ = txout_index.insert_descriptor((), desc_a.clone());
|
||||
let _ = txout_index.insert_descriptor((), desc_b.clone());
|
||||
|
||||
// Loop through spks in intervals of `lookahead` to create outputs with. We should always be
|
||||
// able to index these outputs if `lookahead` is respected.
|
||||
let spk_indices = [9, 19, 29, 39];
|
||||
for i in spk_indices {
|
||||
let spk_at_index = desc_a
|
||||
.at_derivation_index(i)
|
||||
.expect("must derive")
|
||||
.script_pubkey();
|
||||
let index_changeset = txout_index.index_txout(
|
||||
// Use spk derivation index as vout as we just want an unique outpoint.
|
||||
OutPoint::new(h!("mock_tx"), i as _),
|
||||
&TxOut {
|
||||
value: Amount::from_sat(10_000),
|
||||
script_pubkey: spk_at_index,
|
||||
},
|
||||
);
|
||||
assert_eq!(
|
||||
index_changeset,
|
||||
bdk_chain::keychain::ChangeSet {
|
||||
keychains_added: BTreeMap::default(),
|
||||
last_revealed: [(desc_id_a, i)].into(),
|
||||
},
|
||||
"must always increase last active if impl respects lookahead"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_descriptor_no_change() {
|
||||
let secp = Secp256k1::signing_only();
|
||||
let (desc, _) =
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0]).unwrap();
|
||||
let mut txout_index = KeychainTxOutIndex::<()>::default();
|
||||
assert_eq!(
|
||||
txout_index.insert_descriptor((), desc.clone()),
|
||||
keychain::ChangeSet {
|
||||
keychains_added: [((), desc.clone())].into(),
|
||||
last_revealed: Default::default()
|
||||
},
|
||||
);
|
||||
assert_eq!(
|
||||
txout_index.insert_descriptor((), desc.clone()),
|
||||
keychain::ChangeSet::default(),
|
||||
"inserting the same descriptor for keychain should return an empty changeset",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn applying_changesets_one_by_one_vs_aggregate_must_have_same_result() {
|
||||
let desc = parse_descriptor(DESCRIPTORS[0]);
|
||||
let changesets: &[ChangeSet<TestKeychain>] = &[
|
||||
ChangeSet {
|
||||
keychains_added: [(TestKeychain::Internal, desc.clone())].into(),
|
||||
last_revealed: [].into(),
|
||||
},
|
||||
ChangeSet {
|
||||
keychains_added: [(TestKeychain::External, desc.clone())].into(),
|
||||
last_revealed: [(desc.descriptor_id(), 12)].into(),
|
||||
},
|
||||
];
|
||||
|
||||
let mut indexer_a = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||
for changeset in changesets {
|
||||
indexer_a.apply_changeset(changeset.clone());
|
||||
}
|
||||
|
||||
let mut indexer_b = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||
let aggregate_changesets = changesets
|
||||
.iter()
|
||||
.cloned()
|
||||
.reduce(|mut agg, cs| {
|
||||
agg.append(cs);
|
||||
agg
|
||||
})
|
||||
.expect("must aggregate changesets");
|
||||
indexer_b.apply_changeset(aggregate_changesets);
|
||||
|
||||
assert_eq!(
|
||||
indexer_a.keychains().collect::<Vec<_>>(),
|
||||
indexer_b.keychains().collect::<Vec<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
indexer_a.spk_at_index(TestKeychain::External, 0),
|
||||
indexer_b.spk_at_index(TestKeychain::External, 0)
|
||||
);
|
||||
assert_eq!(
|
||||
indexer_a.spk_at_index(TestKeychain::Internal, 0),
|
||||
indexer_b.spk_at_index(TestKeychain::Internal, 0)
|
||||
);
|
||||
assert_eq!(
|
||||
indexer_a.last_revealed_indices(),
|
||||
indexer_b.last_revealed_indices()
|
||||
);
|
||||
}
|
||||
|
||||
// When the same descriptor is associated with various keychains,
|
||||
// index methods only return the highest keychain by Ord
|
||||
#[test]
|
||||
fn test_only_highest_ord_keychain_is_returned() {
|
||||
let desc = parse_descriptor(DESCRIPTORS[0]);
|
||||
|
||||
let mut indexer = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||
let _ = indexer.insert_descriptor(TestKeychain::Internal, desc.clone());
|
||||
let _ = indexer.insert_descriptor(TestKeychain::External, desc);
|
||||
|
||||
// reveal_next_spk will work with either keychain
|
||||
let spk0: ScriptBuf = indexer
|
||||
.reveal_next_spk(&TestKeychain::External)
|
||||
.unwrap()
|
||||
.0
|
||||
.1
|
||||
.into();
|
||||
let spk1: ScriptBuf = indexer
|
||||
.reveal_next_spk(&TestKeychain::Internal)
|
||||
.unwrap()
|
||||
.0
|
||||
.1
|
||||
.into();
|
||||
|
||||
// index_of_spk will always return External
|
||||
assert_eq!(
|
||||
indexer.index_of_spk(&spk0),
|
||||
Some((TestKeychain::External, 0))
|
||||
);
|
||||
assert_eq!(
|
||||
indexer.index_of_spk(&spk1),
|
||||
Some((TestKeychain::External, 1))
|
||||
);
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
#![cfg(feature = "miniscript")]
|
||||
|
||||
use std::ops::{Bound, RangeBounds};
|
||||
|
||||
use bdk_chain::{
|
||||
|
@ -1,3 +1,5 @@
|
||||
#![cfg(feature = "miniscript")]
|
||||
|
||||
#[macro_use]
|
||||
mod common;
|
||||
use bdk_chain::tx_graph::CalculateFeeError;
|
||||
|
@ -1,3 +1,5 @@
|
||||
#![cfg(feature = "miniscript")]
|
||||
|
||||
#[macro_use]
|
||||
mod common;
|
||||
|
||||
|
@ -212,7 +212,7 @@ fn main() -> anyhow::Result<()> {
|
||||
graph.graph().balance(
|
||||
&*chain,
|
||||
synced_to.block_id(),
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
graph.index.outpoints(),
|
||||
|(k, _), _| k == &Keychain::Internal,
|
||||
)
|
||||
};
|
||||
@ -336,7 +336,7 @@ fn main() -> anyhow::Result<()> {
|
||||
graph.graph().balance(
|
||||
&*chain,
|
||||
synced_to.block_id(),
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
graph.index.outpoints(),
|
||||
|(k, _), _| k == &Keychain::Internal,
|
||||
)
|
||||
};
|
||||
|
@ -249,14 +249,20 @@ where
|
||||
script_pubkey: address.script_pubkey(),
|
||||
}];
|
||||
|
||||
let internal_keychain = if graph.index.keychains().get(&Keychain::Internal).is_some() {
|
||||
let internal_keychain = if graph
|
||||
.index
|
||||
.keychains()
|
||||
.any(|(k, _)| *k == Keychain::Internal)
|
||||
{
|
||||
Keychain::Internal
|
||||
} else {
|
||||
Keychain::External
|
||||
};
|
||||
|
||||
let ((change_index, change_script), change_changeset) =
|
||||
graph.index.next_unused_spk(&internal_keychain);
|
||||
let ((change_index, change_script), change_changeset) = graph
|
||||
.index
|
||||
.next_unused_spk(&internal_keychain)
|
||||
.expect("Must exist");
|
||||
changeset.append(change_changeset);
|
||||
|
||||
// Clone to drop the immutable reference.
|
||||
@ -266,8 +272,9 @@ where
|
||||
&graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&internal_keychain)
|
||||
.find(|(k, _)| *k == &internal_keychain)
|
||||
.expect("must exist")
|
||||
.1
|
||||
.at_derivation_index(change_index)
|
||||
.expect("change_index can't be hardened"),
|
||||
&assets,
|
||||
@ -284,8 +291,9 @@ where
|
||||
min_drain_value: graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&internal_keychain)
|
||||
.find(|(k, _)| *k == &internal_keychain)
|
||||
.expect("must exist")
|
||||
.1
|
||||
.dust_value(),
|
||||
..CoinSelectorOpt::fund_outputs(
|
||||
&outputs,
|
||||
@ -416,7 +424,7 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
||||
assets: &bdk_tmp_plan::Assets<K>,
|
||||
) -> Result<Vec<PlannedUtxo<K, A>>, O::Error> {
|
||||
let chain_tip = chain.get_chain_tip()?;
|
||||
let outpoints = graph.index.outpoints().iter().cloned();
|
||||
let outpoints = graph.index.outpoints();
|
||||
graph
|
||||
.graph()
|
||||
.try_filter_chain_unspents(chain, chain_tip, outpoints)
|
||||
@ -428,8 +436,9 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
||||
let desc = graph
|
||||
.index
|
||||
.keychains()
|
||||
.get(&k)
|
||||
.find(|(keychain, _)| *keychain == &k)
|
||||
.expect("keychain must exist")
|
||||
.1
|
||||
.at_derivation_index(i)
|
||||
.expect("i can't be hardened");
|
||||
let plan = bdk_tmp_plan::plan_satisfaction(&desc, assets)?;
|
||||
@ -465,7 +474,8 @@ where
|
||||
_ => unreachable!("only these two variants exist in match arm"),
|
||||
};
|
||||
|
||||
let ((spk_i, spk), index_changeset) = spk_chooser(index, &Keychain::External);
|
||||
let ((spk_i, spk), index_changeset) =
|
||||
spk_chooser(index, &Keychain::External).expect("Must exist");
|
||||
let db = &mut *db.lock().unwrap();
|
||||
db.stage_and_commit(C::from((
|
||||
local_chain::ChangeSet::default(),
|
||||
@ -517,7 +527,7 @@ where
|
||||
let balance = graph.graph().try_balance(
|
||||
chain,
|
||||
chain.get_chain_tip()?,
|
||||
graph.index.outpoints().iter().cloned(),
|
||||
graph.index.outpoints(),
|
||||
|(k, _), _| k == &Keychain::Internal,
|
||||
)?;
|
||||
|
||||
@ -547,7 +557,7 @@ where
|
||||
let graph = &*graph.lock().unwrap();
|
||||
let chain = &*chain.lock().unwrap();
|
||||
let chain_tip = chain.get_chain_tip()?;
|
||||
let outpoints = graph.index.outpoints().iter().cloned();
|
||||
let outpoints = graph.index.outpoints();
|
||||
|
||||
match txout_cmd {
|
||||
TxOutCmd::List {
|
||||
@ -695,9 +705,11 @@ where
|
||||
|
||||
let mut index = KeychainTxOutIndex::<Keychain>::default();
|
||||
|
||||
// TODO: descriptors are already stored in the db, so we shouldn't re-insert
|
||||
// them in the index here. However, the keymap is not stored in the database.
|
||||
let (descriptor, mut keymap) =
|
||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &args.descriptor)?;
|
||||
index.add_keychain(Keychain::External, descriptor);
|
||||
let _ = index.insert_descriptor(Keychain::External, descriptor);
|
||||
|
||||
if let Some((internal_descriptor, internal_keymap)) = args
|
||||
.change_descriptor
|
||||
@ -706,7 +718,7 @@ where
|
||||
.transpose()?
|
||||
{
|
||||
keymap.extend(internal_keymap);
|
||||
index.add_keychain(Keychain::Internal, internal_descriptor);
|
||||
let _ = index.insert_descriptor(Keychain::Internal, internal_descriptor);
|
||||
}
|
||||
|
||||
let mut db_backend = match Store::<C>::open_or_create_new(db_magic, &args.db_path) {
|
||||
|
@ -238,7 +238,7 @@ fn main() -> anyhow::Result<()> {
|
||||
let mut outpoints: Box<dyn Iterator<Item = OutPoint>> = Box::new(core::iter::empty());
|
||||
|
||||
if utxos {
|
||||
let init_outpoints = graph.index.outpoints().iter().cloned();
|
||||
let init_outpoints = graph.index.outpoints();
|
||||
|
||||
let utxos = graph
|
||||
.graph()
|
||||
|
@ -277,7 +277,7 @@ fn main() -> anyhow::Result<()> {
|
||||
// We want to search for whether the UTXO is spent, and spent by which
|
||||
// transaction. We provide the outpoint of the UTXO to
|
||||
// `EsploraExt::update_tx_graph_without_keychain`.
|
||||
let init_outpoints = graph.index.outpoints().iter().cloned();
|
||||
let init_outpoints = graph.index.outpoints();
|
||||
let utxos = graph
|
||||
.graph()
|
||||
.filter_chain_unspents(&*chain, local_tip.block_id(), init_outpoints)
|
||||
|
Loading…
x
Reference in New Issue
Block a user