Merge bitcoindevkit/bdk#1203: Include the descriptor in keychain::Changeset
86711d4f46f467c651238ad3804fdbe1d22a8600 doc(chain): add section for non-recommended K to descriptor assignments (Daniela Brozzoni) de53d721913537f56281a134270eafd356f908ad test: Only the highest ord keychain is returned (Daniela Brozzoni) 9d8023bf56a693f1cb2ba340ed024c654307c069 fix(chain): introduce keychain-variant-ranking to `KeychainTxOutIndex` (志宇) 6c8748124fd40e0fee37f78ca30457441b13fbcb chore(chain): move `use` in `indexed_tx_graph.rs` so clippy is happy (志宇) 537aa03ae0f8bec4dc799d33738e9bb7977bdac1 chore(chain): update test so clippy does not complain (志宇) ed117de7a5b1756482b2e6487855b80e97c597ba test(chain): applying changesets one-by-one vs aggregate should be same (志宇) 6a3fb849e86e0bc21086519ae0201b95ddde5bf4 fix(chain): simplify `Append::append` impl for `keychain::ChangeSet` (志宇) 1d294b734dd6f4639075cba271e2b40f437f998f fix: Run tests only if the miniscript feature is.. ..enabled, enable it by default (Daniela Brozzoni) 0e3e136f6fa7215f6391dbcc1c4781262111ce64 doc(bdk): Add instructions for manually inserting... ...secret keys in the wallet in Wallet::load (Daniela Brozzoni) 76afccc555feff084867d6f9406e4e74bee938cc fix(wallet): add expected descriptors as signers after creating from wallet::ChangeSet (Steve Myers) 4f05441a00b921efd661da0dff94d9c28e38b70d keychain::ChangeSet includes the descriptor (Daniela Brozzoni) 8ff99f27dfe45643bf401409ca72429a9b812873 ref(chain): Define test descriptors, use them... ...everywhere (Daniela Brozzoni) b9902936a0d17498eec8866233d89b2882d0af8f ref(chain): move `keychain::ChangeSet` into `txout_index.rs` (志宇) Pull request description: Fixes #1101 - Moves keychain::ChangeSet inside `keychain/txout_index.rs` as now the `ChangeSet` depends on miniscript - Slightly cleans up tests by introducing some constant descriptors - The KeychainTxOutIndex's internal SpkIterator now uses DescriptorId instead of K. The DescriptorId -> K translation is made at the KeychainTxOutIndex level. - The keychain::Changeset is now a struct, which includes a map for last revealed indexes, and one for newly added keychains and their descriptor. ### Changelog notice API changes in bdk: - Wallet::keychains returns a `impl Iterator` instead of `BTreeMap` - Wallet::load doesn't take descriptors anymore, since they're stored in the db - Wallet::new_or_load checks if the loaded descriptor from db is the same as the provided one API changes in bdk_chain: - `ChangeSet` is now a struct, which includes a map for last revealed indexes, and one for keychains and descriptors. - `KeychainTxOutIndex::inner` returns a `SpkIterator<(DescriptorId, u32)>` - `KeychainTxOutIndex::outpoints` returns a `BTreeSet` instead of `&BTreeSet` - `KeychainTxOutIndex::keychains` returns a `impl Iterator` instead of `&BTreeMap` - `KeychainTxOutIndex::txouts` doesn't return a ExactSizeIterator anymore - `KeychainTxOutIndex::last_revealed_indices` returns a `BTreeMap` instead of `&BTreeMap` - `KeychainTxOutIndex::add_keychain` has been renamed to `KeychainTxOutIndex::insert_descriptor`, and now it returns a ChangeSet - `KeychainTxOutIndex::reveal_next_spk` returns Option - `KeychainTxOutIndex::next_unused_spk` returns Option - `KeychainTxOutIndex::unbounded_spk_iter` returns Option - `KeychainTxOutIndex::next_index` returns Option - `KeychainTxOutIndex::reveal_to_target` returns Option - `KeychainTxOutIndex::revealed_keychain_spks` returns Option - `KeychainTxOutIndex::unused_keychain_spks` returns Option - `KeychainTxOutIndex::last_revealed_index` returns Option - `KeychainTxOutIndex::keychain_outpoints` returns Option - `KeychainTxOutIndex::keychain_outpoints_in_range` returns Option - `KeychainTxOutIndex::last_used_index` returns None if the keychain has never been used, or if it doesn't exist ### Checklists #### All Submissions: * [x] I've signed all my commits * [x] I followed the [contribution guidelines](https://github.com/bitcoindevkit/bdk/blob/master/CONTRIBUTING.md) * [x] I ran `cargo fmt` and `cargo clippy` before committing #### New Features: * [x] I've added tests for the new feature * [x] I've added docs for the new feature ACKs for top commit: evanlinjin: ACK 86711d4f46f467c651238ad3804fdbe1d22a8600 Tree-SHA512: 4b1c9a31951f67b18037b7dd9837acbc35823f21de644ab833754b74d20f5373549f81e66965ecd3953ebf4f99644c9fd834812acfa65f9188950f1bda17ab60
This commit is contained in:
commit
fb7ff298a4
@ -54,6 +54,7 @@ pub mod tx_builder;
|
|||||||
pub(crate) mod utils;
|
pub(crate) mod utils;
|
||||||
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
|
||||||
pub use utils::IsDust;
|
pub use utils::IsDust;
|
||||||
|
|
||||||
use coin_selection::DefaultCoinSelectionAlgorithm;
|
use coin_selection::DefaultCoinSelectionAlgorithm;
|
||||||
@ -305,6 +306,8 @@ pub enum LoadError {
|
|||||||
MissingNetwork,
|
MissingNetwork,
|
||||||
/// Data loaded from persistence is missing genesis hash.
|
/// Data loaded from persistence is missing genesis hash.
|
||||||
MissingGenesis,
|
MissingGenesis,
|
||||||
|
/// Data loaded from persistence is missing descriptor.
|
||||||
|
MissingDescriptor,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for LoadError {
|
impl fmt::Display for LoadError {
|
||||||
@ -317,6 +320,7 @@ impl fmt::Display for LoadError {
|
|||||||
}
|
}
|
||||||
LoadError::MissingNetwork => write!(f, "loaded data is missing network type"),
|
LoadError::MissingNetwork => write!(f, "loaded data is missing network type"),
|
||||||
LoadError::MissingGenesis => write!(f, "loaded data is missing genesis hash"),
|
LoadError::MissingGenesis => write!(f, "loaded data is missing genesis hash"),
|
||||||
|
LoadError::MissingDescriptor => write!(f, "loaded data is missing descriptor"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -352,6 +356,13 @@ pub enum NewOrLoadError {
|
|||||||
/// The network type loaded from persistence.
|
/// The network type loaded from persistence.
|
||||||
got: Option<Network>,
|
got: Option<Network>,
|
||||||
},
|
},
|
||||||
|
/// The loaded desccriptor does not match what was provided.
|
||||||
|
LoadedDescriptorDoesNotMatch {
|
||||||
|
/// The descriptor loaded from persistence.
|
||||||
|
got: Option<ExtendedDescriptor>,
|
||||||
|
/// The keychain of the descriptor not matching
|
||||||
|
keychain: KeychainKind,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for NewOrLoadError {
|
impl fmt::Display for NewOrLoadError {
|
||||||
@ -372,6 +383,13 @@ impl fmt::Display for NewOrLoadError {
|
|||||||
NewOrLoadError::LoadedNetworkDoesNotMatch { expected, got } => {
|
NewOrLoadError::LoadedNetworkDoesNotMatch { expected, got } => {
|
||||||
write!(f, "loaded network type is not {}, got {:?}", expected, got)
|
write!(f, "loaded network type is not {}, got {:?}", expected, got)
|
||||||
}
|
}
|
||||||
|
NewOrLoadError::LoadedDescriptorDoesNotMatch { got, keychain } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"loaded descriptor is different from what was provided, got {:?} for keychain {:?}",
|
||||||
|
got, keychain
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -499,21 +517,57 @@ impl Wallet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Load [`Wallet`] from the given persistence backend.
|
/// Load [`Wallet`] from the given persistence backend.
|
||||||
pub fn load<E: IntoWalletDescriptor>(
|
///
|
||||||
descriptor: E,
|
/// Note that the descriptor secret keys are not persisted to the db; this means that after
|
||||||
change_descriptor: Option<E>,
|
/// calling this method the [`Wallet`] **won't** know the secret keys, and as such, won't be
|
||||||
|
/// able to sign transactions.
|
||||||
|
///
|
||||||
|
/// If you wish to use the wallet to sign transactions, you need to add the secret keys
|
||||||
|
/// manually to the [`Wallet`]:
|
||||||
|
///
|
||||||
|
/// ```rust,no_run
|
||||||
|
/// # use bdk::Wallet;
|
||||||
|
/// # use bdk::signer::{SignersContainer, SignerOrdering};
|
||||||
|
/// # use bdk::descriptor::Descriptor;
|
||||||
|
/// # use bitcoin::key::Secp256k1;
|
||||||
|
/// # use bdk::KeychainKind;
|
||||||
|
/// # use bdk_file_store::Store;
|
||||||
|
/// #
|
||||||
|
/// # fn main() -> Result<(), anyhow::Error> {
|
||||||
|
/// # let temp_dir = tempfile::tempdir().expect("must create tempdir");
|
||||||
|
/// # let file_path = temp_dir.path().join("store.db");
|
||||||
|
/// # let db: Store<bdk::wallet::ChangeSet> = Store::create_new(&[], &file_path).expect("must create db");
|
||||||
|
/// let secp = Secp256k1::new();
|
||||||
|
///
|
||||||
|
/// let (external_descriptor, external_keymap) = Descriptor::parse_descriptor(&secp, "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)").unwrap();
|
||||||
|
/// let (internal_descriptor, internal_keymap) = Descriptor::parse_descriptor(&secp, "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)").unwrap();
|
||||||
|
///
|
||||||
|
/// let external_signer_container = SignersContainer::build(external_keymap, &external_descriptor, &secp);
|
||||||
|
/// let internal_signer_container = SignersContainer::build(internal_keymap, &internal_descriptor, &secp);
|
||||||
|
///
|
||||||
|
/// let mut wallet = Wallet::load(db)?;
|
||||||
|
///
|
||||||
|
/// external_signer_container.signers().into_iter()
|
||||||
|
/// .for_each(|s| wallet.add_signer(KeychainKind::External, SignerOrdering::default(), s.clone()));
|
||||||
|
/// internal_signer_container.signers().into_iter()
|
||||||
|
/// .for_each(|s| wallet.add_signer(KeychainKind::Internal, SignerOrdering::default(), s.clone()));
|
||||||
|
/// # Ok(())
|
||||||
|
/// # }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Alternatively, you can call [`Wallet::new_or_load`], which will add the private keys of the
|
||||||
|
/// passed-in descriptors to the [`Wallet`].
|
||||||
|
pub fn load(
|
||||||
mut db: impl PersistBackend<ChangeSet> + Send + Sync + 'static,
|
mut db: impl PersistBackend<ChangeSet> + Send + Sync + 'static,
|
||||||
) -> Result<Self, LoadError> {
|
) -> Result<Self, LoadError> {
|
||||||
let changeset = db
|
let changeset = db
|
||||||
.load_from_persistence()
|
.load_from_persistence()
|
||||||
.map_err(LoadError::Persist)?
|
.map_err(LoadError::Persist)?
|
||||||
.ok_or(LoadError::NotInitialized)?;
|
.ok_or(LoadError::NotInitialized)?;
|
||||||
Self::load_from_changeset(descriptor, change_descriptor, db, changeset)
|
Self::load_from_changeset(db, changeset)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_from_changeset<E: IntoWalletDescriptor>(
|
fn load_from_changeset(
|
||||||
descriptor: E,
|
|
||||||
change_descriptor: Option<E>,
|
|
||||||
db: impl PersistBackend<ChangeSet> + Send + Sync + 'static,
|
db: impl PersistBackend<ChangeSet> + Send + Sync + 'static,
|
||||||
changeset: ChangeSet,
|
changeset: ChangeSet,
|
||||||
) -> Result<Self, LoadError> {
|
) -> Result<Self, LoadError> {
|
||||||
@ -522,10 +576,23 @@ impl Wallet {
|
|||||||
let chain =
|
let chain =
|
||||||
LocalChain::from_changeset(changeset.chain).map_err(|_| LoadError::MissingGenesis)?;
|
LocalChain::from_changeset(changeset.chain).map_err(|_| LoadError::MissingGenesis)?;
|
||||||
let mut index = KeychainTxOutIndex::<KeychainKind>::default();
|
let mut index = KeychainTxOutIndex::<KeychainKind>::default();
|
||||||
|
let descriptor = changeset
|
||||||
|
.indexed_tx_graph
|
||||||
|
.indexer
|
||||||
|
.keychains_added
|
||||||
|
.get(&KeychainKind::External)
|
||||||
|
.ok_or(LoadError::MissingDescriptor)?
|
||||||
|
.clone();
|
||||||
|
let change_descriptor = changeset
|
||||||
|
.indexed_tx_graph
|
||||||
|
.indexer
|
||||||
|
.keychains_added
|
||||||
|
.get(&KeychainKind::Internal)
|
||||||
|
.cloned();
|
||||||
|
|
||||||
let (signers, change_signers) =
|
let (signers, change_signers) =
|
||||||
create_signers(&mut index, &secp, descriptor, change_descriptor, network)
|
create_signers(&mut index, &secp, descriptor, change_descriptor, network)
|
||||||
.map_err(LoadError::Descriptor)?;
|
.expect("Can't fail: we passed in valid descriptors, recovered from the changeset");
|
||||||
|
|
||||||
let mut indexed_graph = IndexedTxGraph::new(index);
|
let mut indexed_graph = IndexedTxGraph::new(index);
|
||||||
indexed_graph.apply_changeset(changeset.indexed_tx_graph);
|
indexed_graph.apply_changeset(changeset.indexed_tx_graph);
|
||||||
@ -562,8 +629,8 @@ impl Wallet {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Either loads [`Wallet`] from persistence, or initializes it if it does not exist (with a
|
/// Either loads [`Wallet`] from persistence, or initializes it if it does not exist, using the
|
||||||
/// custom genesis hash).
|
/// provided descriptor, change descriptor, network, and custom genesis hash.
|
||||||
///
|
///
|
||||||
/// This method will fail if the loaded [`Wallet`] has different parameters to those provided.
|
/// This method will fail if the loaded [`Wallet`] has different parameters to those provided.
|
||||||
/// This is like [`Wallet::new_or_load`] with an additional `genesis_hash` parameter. This is
|
/// This is like [`Wallet::new_or_load`] with an additional `genesis_hash` parameter. This is
|
||||||
@ -580,25 +647,23 @@ impl Wallet {
|
|||||||
.map_err(NewOrLoadError::Persist)?;
|
.map_err(NewOrLoadError::Persist)?;
|
||||||
match changeset {
|
match changeset {
|
||||||
Some(changeset) => {
|
Some(changeset) => {
|
||||||
let wallet =
|
let mut wallet = Self::load_from_changeset(db, changeset).map_err(|e| match e {
|
||||||
Self::load_from_changeset(descriptor, change_descriptor, db, changeset)
|
LoadError::Descriptor(e) => NewOrLoadError::Descriptor(e),
|
||||||
.map_err(|e| match e {
|
LoadError::Persist(e) => NewOrLoadError::Persist(e),
|
||||||
LoadError::Descriptor(e) => NewOrLoadError::Descriptor(e),
|
LoadError::NotInitialized => NewOrLoadError::NotInitialized,
|
||||||
LoadError::Persist(e) => NewOrLoadError::Persist(e),
|
LoadError::MissingNetwork => NewOrLoadError::LoadedNetworkDoesNotMatch {
|
||||||
LoadError::NotInitialized => NewOrLoadError::NotInitialized,
|
expected: network,
|
||||||
LoadError::MissingNetwork => {
|
got: None,
|
||||||
NewOrLoadError::LoadedNetworkDoesNotMatch {
|
},
|
||||||
expected: network,
|
LoadError::MissingGenesis => NewOrLoadError::LoadedGenesisDoesNotMatch {
|
||||||
got: None,
|
expected: genesis_hash,
|
||||||
}
|
got: None,
|
||||||
}
|
},
|
||||||
LoadError::MissingGenesis => {
|
LoadError::MissingDescriptor => NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||||
NewOrLoadError::LoadedGenesisDoesNotMatch {
|
got: None,
|
||||||
expected: genesis_hash,
|
keychain: KeychainKind::External,
|
||||||
got: None,
|
},
|
||||||
}
|
})?;
|
||||||
}
|
|
||||||
})?;
|
|
||||||
if wallet.network != network {
|
if wallet.network != network {
|
||||||
return Err(NewOrLoadError::LoadedNetworkDoesNotMatch {
|
return Err(NewOrLoadError::LoadedNetworkDoesNotMatch {
|
||||||
expected: network,
|
expected: network,
|
||||||
@ -611,6 +676,73 @@ impl Wallet {
|
|||||||
got: Some(wallet.chain.genesis_hash()),
|
got: Some(wallet.chain.genesis_hash()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let (expected_descriptor, expected_descriptor_keymap) = descriptor
|
||||||
|
.into_wallet_descriptor(&wallet.secp, network)
|
||||||
|
.map_err(NewOrLoadError::Descriptor)?;
|
||||||
|
let wallet_descriptor = wallet.public_descriptor(KeychainKind::External).cloned();
|
||||||
|
if wallet_descriptor != Some(expected_descriptor.clone()) {
|
||||||
|
return Err(NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||||
|
got: wallet_descriptor,
|
||||||
|
keychain: KeychainKind::External,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// if expected descriptor has private keys add them as new signers
|
||||||
|
if !expected_descriptor_keymap.is_empty() {
|
||||||
|
let signer_container = SignersContainer::build(
|
||||||
|
expected_descriptor_keymap,
|
||||||
|
&expected_descriptor,
|
||||||
|
&wallet.secp,
|
||||||
|
);
|
||||||
|
signer_container.signers().into_iter().for_each(|signer| {
|
||||||
|
wallet.add_signer(
|
||||||
|
KeychainKind::External,
|
||||||
|
SignerOrdering::default(),
|
||||||
|
signer.clone(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let expected_change_descriptor = if let Some(c) = change_descriptor {
|
||||||
|
Some(
|
||||||
|
c.into_wallet_descriptor(&wallet.secp, network)
|
||||||
|
.map_err(NewOrLoadError::Descriptor)?,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let wallet_change_descriptor =
|
||||||
|
wallet.public_descriptor(KeychainKind::Internal).cloned();
|
||||||
|
|
||||||
|
match (expected_change_descriptor, wallet_change_descriptor) {
|
||||||
|
(Some((expected_descriptor, expected_keymap)), Some(wallet_descriptor))
|
||||||
|
if wallet_descriptor == expected_descriptor =>
|
||||||
|
{
|
||||||
|
// if expected change descriptor has private keys add them as new signers
|
||||||
|
if !expected_keymap.is_empty() {
|
||||||
|
let signer_container = SignersContainer::build(
|
||||||
|
expected_keymap,
|
||||||
|
&expected_descriptor,
|
||||||
|
&wallet.secp,
|
||||||
|
);
|
||||||
|
signer_container.signers().into_iter().for_each(|signer| {
|
||||||
|
wallet.add_signer(
|
||||||
|
KeychainKind::Internal,
|
||||||
|
SignerOrdering::default(),
|
||||||
|
signer.clone(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(None, None) => (),
|
||||||
|
(_, wallet_descriptor) => {
|
||||||
|
return Err(NewOrLoadError::LoadedDescriptorDoesNotMatch {
|
||||||
|
got: wallet_descriptor,
|
||||||
|
keychain: KeychainKind::Internal,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(wallet)
|
Ok(wallet)
|
||||||
}
|
}
|
||||||
None => Self::new_with_genesis_hash(
|
None => Self::new_with_genesis_hash(
|
||||||
@ -636,7 +768,7 @@ impl Wallet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Iterator over all keychains in this wallet
|
/// Iterator over all keychains in this wallet
|
||||||
pub fn keychains(&self) -> &BTreeMap<KeychainKind, ExtendedDescriptor> {
|
pub fn keychains(&self) -> impl Iterator<Item = (&KeychainKind, &ExtendedDescriptor)> {
|
||||||
self.indexed_graph.index.keychains()
|
self.indexed_graph.index.keychains()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -650,7 +782,11 @@ impl Wallet {
|
|||||||
/// [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) max index.
|
/// [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) max index.
|
||||||
pub fn peek_address(&self, keychain: KeychainKind, mut index: u32) -> AddressInfo {
|
pub fn peek_address(&self, keychain: KeychainKind, mut index: u32) -> AddressInfo {
|
||||||
let keychain = self.map_keychain(keychain);
|
let keychain = self.map_keychain(keychain);
|
||||||
let mut spk_iter = self.indexed_graph.index.unbounded_spk_iter(&keychain);
|
let mut spk_iter = self
|
||||||
|
.indexed_graph
|
||||||
|
.index
|
||||||
|
.unbounded_spk_iter(&keychain)
|
||||||
|
.expect("Must exist (we called map_keychain)");
|
||||||
if !spk_iter.descriptor().has_wildcard() {
|
if !spk_iter.descriptor().has_wildcard() {
|
||||||
index = 0;
|
index = 0;
|
||||||
}
|
}
|
||||||
@ -677,7 +813,11 @@ impl Wallet {
|
|||||||
/// If writing to persistent storage fails.
|
/// If writing to persistent storage fails.
|
||||||
pub fn reveal_next_address(&mut self, keychain: KeychainKind) -> anyhow::Result<AddressInfo> {
|
pub fn reveal_next_address(&mut self, keychain: KeychainKind) -> anyhow::Result<AddressInfo> {
|
||||||
let keychain = self.map_keychain(keychain);
|
let keychain = self.map_keychain(keychain);
|
||||||
let ((index, spk), index_changeset) = self.indexed_graph.index.reveal_next_spk(&keychain);
|
let ((index, spk), index_changeset) = self
|
||||||
|
.indexed_graph
|
||||||
|
.index
|
||||||
|
.reveal_next_spk(&keychain)
|
||||||
|
.expect("Must exist (we called map_keychain)");
|
||||||
|
|
||||||
self.persist
|
self.persist
|
||||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||||
@ -705,8 +845,11 @@ impl Wallet {
|
|||||||
index: u32,
|
index: u32,
|
||||||
) -> anyhow::Result<impl Iterator<Item = AddressInfo> + '_> {
|
) -> anyhow::Result<impl Iterator<Item = AddressInfo> + '_> {
|
||||||
let keychain = self.map_keychain(keychain);
|
let keychain = self.map_keychain(keychain);
|
||||||
let (spk_iter, index_changeset) =
|
let (spk_iter, index_changeset) = self
|
||||||
self.indexed_graph.index.reveal_to_target(&keychain, index);
|
.indexed_graph
|
||||||
|
.index
|
||||||
|
.reveal_to_target(&keychain, index)
|
||||||
|
.expect("must exist (we called map_keychain)");
|
||||||
|
|
||||||
self.persist
|
self.persist
|
||||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||||
@ -729,7 +872,11 @@ impl Wallet {
|
|||||||
/// If writing to persistent storage fails.
|
/// If writing to persistent storage fails.
|
||||||
pub fn next_unused_address(&mut self, keychain: KeychainKind) -> anyhow::Result<AddressInfo> {
|
pub fn next_unused_address(&mut self, keychain: KeychainKind) -> anyhow::Result<AddressInfo> {
|
||||||
let keychain = self.map_keychain(keychain);
|
let keychain = self.map_keychain(keychain);
|
||||||
let ((index, spk), index_changeset) = self.indexed_graph.index.next_unused_spk(&keychain);
|
let ((index, spk), index_changeset) = self
|
||||||
|
.indexed_graph
|
||||||
|
.index
|
||||||
|
.next_unused_spk(&keychain)
|
||||||
|
.expect("must exist (we called map_keychain)");
|
||||||
|
|
||||||
self.persist
|
self.persist
|
||||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||||
@ -799,7 +946,7 @@ impl Wallet {
|
|||||||
.filter_chain_unspents(
|
.filter_chain_unspents(
|
||||||
&self.chain,
|
&self.chain,
|
||||||
self.chain.tip().block_id(),
|
self.chain.tip().block_id(),
|
||||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
self.indexed_graph.index.outpoints(),
|
||||||
)
|
)
|
||||||
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
||||||
}
|
}
|
||||||
@ -813,7 +960,7 @@ impl Wallet {
|
|||||||
.filter_chain_txouts(
|
.filter_chain_txouts(
|
||||||
&self.chain,
|
&self.chain,
|
||||||
self.chain.tip().block_id(),
|
self.chain.tip().block_id(),
|
||||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
self.indexed_graph.index.outpoints(),
|
||||||
)
|
)
|
||||||
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
||||||
}
|
}
|
||||||
@ -851,7 +998,11 @@ impl Wallet {
|
|||||||
&self,
|
&self,
|
||||||
keychain: KeychainKind,
|
keychain: KeychainKind,
|
||||||
) -> impl Iterator<Item = (u32, ScriptBuf)> + Clone {
|
) -> impl Iterator<Item = (u32, ScriptBuf)> + Clone {
|
||||||
self.indexed_graph.index.unbounded_spk_iter(&keychain)
|
let keychain = self.map_keychain(keychain);
|
||||||
|
self.indexed_graph
|
||||||
|
.index
|
||||||
|
.unbounded_spk_iter(&keychain)
|
||||||
|
.expect("Must exist (we called map_keychain)")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the
|
/// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the
|
||||||
@ -1133,7 +1284,7 @@ impl Wallet {
|
|||||||
self.indexed_graph.graph().balance(
|
self.indexed_graph.graph().balance(
|
||||||
&self.chain,
|
&self.chain,
|
||||||
self.chain.tip().block_id(),
|
self.chain.tip().block_id(),
|
||||||
self.indexed_graph.index.outpoints().iter().cloned(),
|
self.indexed_graph.index.outpoints(),
|
||||||
|&(k, _), _| k == KeychainKind::Internal,
|
|&(k, _), _| k == KeychainKind::Internal,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -1220,17 +1371,9 @@ impl Wallet {
|
|||||||
coin_selection: Cs,
|
coin_selection: Cs,
|
||||||
params: TxParams,
|
params: TxParams,
|
||||||
) -> Result<Psbt, CreateTxError> {
|
) -> Result<Psbt, CreateTxError> {
|
||||||
let external_descriptor = self
|
let keychains: BTreeMap<_, _> = self.indexed_graph.index.keychains().collect();
|
||||||
.indexed_graph
|
let external_descriptor = keychains.get(&KeychainKind::External).expect("must exist");
|
||||||
.index
|
let internal_descriptor = keychains.get(&KeychainKind::Internal);
|
||||||
.keychains()
|
|
||||||
.get(&KeychainKind::External)
|
|
||||||
.expect("must exist");
|
|
||||||
let internal_descriptor = self
|
|
||||||
.indexed_graph
|
|
||||||
.index
|
|
||||||
.keychains()
|
|
||||||
.get(&KeychainKind::Internal);
|
|
||||||
|
|
||||||
let external_policy = external_descriptor
|
let external_policy = external_descriptor
|
||||||
.extract_policy(&self.signers, BuildSatisfaction::None, &self.secp)?
|
.extract_policy(&self.signers, BuildSatisfaction::None, &self.secp)?
|
||||||
@ -1464,8 +1607,11 @@ impl Wallet {
|
|||||||
Some(ref drain_recipient) => drain_recipient.clone(),
|
Some(ref drain_recipient) => drain_recipient.clone(),
|
||||||
None => {
|
None => {
|
||||||
let change_keychain = self.map_keychain(KeychainKind::Internal);
|
let change_keychain = self.map_keychain(KeychainKind::Internal);
|
||||||
let ((index, spk), index_changeset) =
|
let ((index, spk), index_changeset) = self
|
||||||
self.indexed_graph.index.next_unused_spk(&change_keychain);
|
.indexed_graph
|
||||||
|
.index
|
||||||
|
.next_unused_spk(&change_keychain)
|
||||||
|
.expect("Keychain exists (we called map_keychain)");
|
||||||
let spk = spk.into();
|
let spk = spk.into();
|
||||||
self.indexed_graph.index.mark_used(change_keychain, index);
|
self.indexed_graph.index.mark_used(change_keychain, index);
|
||||||
self.persist
|
self.persist
|
||||||
@ -1825,7 +1971,11 @@ impl Wallet {
|
|||||||
///
|
///
|
||||||
/// This can be used to build a watch-only version of a wallet
|
/// This can be used to build a watch-only version of a wallet
|
||||||
pub fn public_descriptor(&self, keychain: KeychainKind) -> Option<&ExtendedDescriptor> {
|
pub fn public_descriptor(&self, keychain: KeychainKind) -> Option<&ExtendedDescriptor> {
|
||||||
self.indexed_graph.index.keychains().get(&keychain)
|
self.indexed_graph
|
||||||
|
.index
|
||||||
|
.keychains()
|
||||||
|
.find(|(k, _)| *k == &keychain)
|
||||||
|
.map(|(_, d)| d)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finalize a PSBT, i.e., for each input determine if sufficient data is available to pass
|
/// Finalize a PSBT, i.e., for each input determine if sufficient data is available to pass
|
||||||
@ -1876,17 +2026,9 @@ impl Wallet {
|
|||||||
.get_utxo_for(n)
|
.get_utxo_for(n)
|
||||||
.and_then(|txout| self.get_descriptor_for_txout(&txout))
|
.and_then(|txout| self.get_descriptor_for_txout(&txout))
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
self.indexed_graph
|
self.indexed_graph.index.keychains().find_map(|(_, desc)| {
|
||||||
.index
|
desc.derive_from_psbt_input(psbt_input, psbt.get_utxo_for(n), &self.secp)
|
||||||
.keychains()
|
})
|
||||||
.iter()
|
|
||||||
.find_map(|(_, desc)| {
|
|
||||||
desc.derive_from_psbt_input(
|
|
||||||
psbt_input,
|
|
||||||
psbt.get_utxo_for(n),
|
|
||||||
&self.secp,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
});
|
});
|
||||||
|
|
||||||
match desc {
|
match desc {
|
||||||
@ -1952,7 +2094,12 @@ impl Wallet {
|
|||||||
|
|
||||||
/// The index of the next address that you would get if you were to ask the wallet for a new address
|
/// The index of the next address that you would get if you were to ask the wallet for a new address
|
||||||
pub fn next_derivation_index(&self, keychain: KeychainKind) -> u32 {
|
pub fn next_derivation_index(&self, keychain: KeychainKind) -> u32 {
|
||||||
self.indexed_graph.index.next_index(&keychain).0
|
let keychain = self.map_keychain(keychain);
|
||||||
|
self.indexed_graph
|
||||||
|
.index
|
||||||
|
.next_index(&keychain)
|
||||||
|
.expect("Keychain must exist (we called map_keychain)")
|
||||||
|
.0
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Informs the wallet that you no longer intend to broadcast a tx that was built from it.
|
/// Informs the wallet that you no longer intend to broadcast a tx that was built from it.
|
||||||
@ -2119,7 +2266,6 @@ impl Wallet {
|
|||||||
if params.add_global_xpubs {
|
if params.add_global_xpubs {
|
||||||
let all_xpubs = self
|
let all_xpubs = self
|
||||||
.keychains()
|
.keychains()
|
||||||
.iter()
|
|
||||||
.flat_map(|(_, desc)| desc.get_extended_keys())
|
.flat_map(|(_, desc)| desc.get_extended_keys())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
@ -2496,13 +2642,13 @@ fn create_signers<E: IntoWalletDescriptor>(
|
|||||||
) -> Result<(Arc<SignersContainer>, Arc<SignersContainer>), crate::descriptor::error::Error> {
|
) -> Result<(Arc<SignersContainer>, Arc<SignersContainer>), crate::descriptor::error::Error> {
|
||||||
let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
||||||
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
||||||
index.add_keychain(KeychainKind::External, descriptor);
|
let _ = index.insert_descriptor(KeychainKind::External, descriptor);
|
||||||
|
|
||||||
let change_signers = match change_descriptor {
|
let change_signers = match change_descriptor {
|
||||||
Some(descriptor) => {
|
Some(descriptor) => {
|
||||||
let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
||||||
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
||||||
index.add_keychain(KeychainKind::Internal, descriptor);
|
let _ = index.insert_descriptor(KeychainKind::Internal, descriptor);
|
||||||
signers
|
signers
|
||||||
}
|
}
|
||||||
None => Arc::new(SignersContainer::new()),
|
None => Arc::new(SignersContainer::new()),
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use assert_matches::assert_matches;
|
use assert_matches::assert_matches;
|
||||||
use bdk::descriptor::calc_checksum;
|
use bdk::descriptor::{calc_checksum, IntoWalletDescriptor};
|
||||||
use bdk::psbt::PsbtUtils;
|
use bdk::psbt::PsbtUtils;
|
||||||
use bdk::signer::{SignOptions, SignerError};
|
use bdk::signer::{SignOptions, SignerError};
|
||||||
use bdk::wallet::coin_selection::{self, LargestFirstCoinSelection};
|
use bdk::wallet::coin_selection::{self, LargestFirstCoinSelection};
|
||||||
@ -10,9 +10,11 @@ use bdk::wallet::tx_builder::AddForeignUtxoError;
|
|||||||
use bdk::wallet::NewError;
|
use bdk::wallet::NewError;
|
||||||
use bdk::wallet::{AddressInfo, Balance, Wallet};
|
use bdk::wallet::{AddressInfo, Balance, Wallet};
|
||||||
use bdk::KeychainKind;
|
use bdk::KeychainKind;
|
||||||
|
use bdk_chain::collections::BTreeMap;
|
||||||
use bdk_chain::COINBASE_MATURITY;
|
use bdk_chain::COINBASE_MATURITY;
|
||||||
use bdk_chain::{BlockId, ConfirmationTime};
|
use bdk_chain::{BlockId, ConfirmationTime};
|
||||||
use bitcoin::hashes::Hash;
|
use bitcoin::hashes::Hash;
|
||||||
|
use bitcoin::key::Secp256k1;
|
||||||
use bitcoin::psbt;
|
use bitcoin::psbt;
|
||||||
use bitcoin::script::PushBytesBuf;
|
use bitcoin::script::PushBytesBuf;
|
||||||
use bitcoin::sighash::{EcdsaSighashType, TapSighashType};
|
use bitcoin::sighash::{EcdsaSighashType, TapSighashType};
|
||||||
@ -84,14 +86,24 @@ fn load_recovers_wallet() {
|
|||||||
// recover wallet
|
// recover wallet
|
||||||
{
|
{
|
||||||
let db = bdk_file_store::Store::open(DB_MAGIC, &file_path).expect("must recover db");
|
let db = bdk_file_store::Store::open(DB_MAGIC, &file_path).expect("must recover db");
|
||||||
let wallet =
|
let wallet = Wallet::load(db).expect("must recover wallet");
|
||||||
Wallet::load(get_test_tr_single_sig_xprv(), None, db).expect("must recover wallet");
|
|
||||||
assert_eq!(wallet.network(), Network::Testnet);
|
assert_eq!(wallet.network(), Network::Testnet);
|
||||||
assert_eq!(wallet.spk_index().keychains(), wallet_spk_index.keychains());
|
assert_eq!(
|
||||||
|
wallet.spk_index().keychains().collect::<Vec<_>>(),
|
||||||
|
wallet_spk_index.keychains().collect::<Vec<_>>()
|
||||||
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
wallet.spk_index().last_revealed_indices(),
|
wallet.spk_index().last_revealed_indices(),
|
||||||
wallet_spk_index.last_revealed_indices()
|
wallet_spk_index.last_revealed_indices()
|
||||||
);
|
);
|
||||||
|
let secp = Secp256k1::new();
|
||||||
|
assert_eq!(
|
||||||
|
*wallet.get_descriptor_for_keychain(KeychainKind::External),
|
||||||
|
get_test_tr_single_sig_xprv()
|
||||||
|
.into_wallet_descriptor(&secp, wallet.network())
|
||||||
|
.unwrap()
|
||||||
|
.0
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// `new` can only be called on empty db
|
// `new` can only be called on empty db
|
||||||
@ -108,12 +120,12 @@ fn new_or_load() {
|
|||||||
let file_path = temp_dir.path().join("store.db");
|
let file_path = temp_dir.path().join("store.db");
|
||||||
|
|
||||||
// init wallet when non-existent
|
// init wallet when non-existent
|
||||||
let wallet_keychains = {
|
let wallet_keychains: BTreeMap<_, _> = {
|
||||||
let db = bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path)
|
let db = bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path)
|
||||||
.expect("must create db");
|
.expect("must create db");
|
||||||
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
|
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
|
||||||
.expect("must init wallet");
|
.expect("must init wallet");
|
||||||
wallet.keychains().clone()
|
wallet.keychains().map(|(k, v)| (*k, v.clone())).collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
// wrong network
|
// wrong network
|
||||||
@ -162,6 +174,49 @@ fn new_or_load() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// wrong external descriptor
|
||||||
|
{
|
||||||
|
let exp_descriptor = get_test_tr_single_sig();
|
||||||
|
let got_descriptor = get_test_wpkh()
|
||||||
|
.into_wallet_descriptor(&Secp256k1::new(), Network::Testnet)
|
||||||
|
.unwrap()
|
||||||
|
.0;
|
||||||
|
|
||||||
|
let db =
|
||||||
|
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
|
||||||
|
let err = Wallet::new_or_load(exp_descriptor, None, db, Network::Testnet)
|
||||||
|
.expect_err("wrong external descriptor");
|
||||||
|
assert!(
|
||||||
|
matches!(
|
||||||
|
err,
|
||||||
|
bdk::wallet::NewOrLoadError::LoadedDescriptorDoesNotMatch { ref got, keychain }
|
||||||
|
if got == &Some(got_descriptor) && keychain == KeychainKind::External
|
||||||
|
),
|
||||||
|
"err: {}",
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// wrong internal descriptor
|
||||||
|
{
|
||||||
|
let exp_descriptor = Some(get_test_tr_single_sig());
|
||||||
|
let got_descriptor = None;
|
||||||
|
|
||||||
|
let db =
|
||||||
|
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
|
||||||
|
let err = Wallet::new_or_load(get_test_wpkh(), exp_descriptor, db, Network::Testnet)
|
||||||
|
.expect_err("wrong internal descriptor");
|
||||||
|
assert!(
|
||||||
|
matches!(
|
||||||
|
err,
|
||||||
|
bdk::wallet::NewOrLoadError::LoadedDescriptorDoesNotMatch { ref got, keychain }
|
||||||
|
if got == &got_descriptor && keychain == KeychainKind::Internal
|
||||||
|
),
|
||||||
|
"err: {}",
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// all parameters match
|
// all parameters match
|
||||||
{
|
{
|
||||||
let db =
|
let db =
|
||||||
@ -169,7 +224,10 @@ fn new_or_load() {
|
|||||||
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
|
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
|
||||||
.expect("must recover wallet");
|
.expect("must recover wallet");
|
||||||
assert_eq!(wallet.network(), Network::Testnet);
|
assert_eq!(wallet.network(), Network::Testnet);
|
||||||
assert_eq!(wallet.keychains(), &wallet_keychains);
|
assert!(wallet
|
||||||
|
.keychains()
|
||||||
|
.map(|(k, v)| (*k, v.clone()))
|
||||||
|
.eq(wallet_keychains));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -181,7 +239,6 @@ fn test_descriptor_checksum() {
|
|||||||
|
|
||||||
let raw_descriptor = wallet
|
let raw_descriptor = wallet
|
||||||
.keychains()
|
.keychains()
|
||||||
.iter()
|
|
||||||
.next()
|
.next()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.1
|
.1
|
||||||
|
@ -26,6 +26,6 @@ rand = "0.8"
|
|||||||
proptest = "1.2.0"
|
proptest = "1.2.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std"]
|
default = ["std", "miniscript"]
|
||||||
std = ["bitcoin/std", "miniscript/std"]
|
std = ["bitcoin/std", "miniscript?/std"]
|
||||||
serde = ["serde_crate", "bitcoin/serde"]
|
serde = ["serde_crate", "bitcoin/serde", "miniscript?/serde"]
|
||||||
|
@ -1,10 +1,29 @@
|
|||||||
use crate::miniscript::{Descriptor, DescriptorPublicKey};
|
use crate::{
|
||||||
|
alloc::{string::ToString, vec::Vec},
|
||||||
|
miniscript::{Descriptor, DescriptorPublicKey},
|
||||||
|
};
|
||||||
|
use bitcoin::hashes::{hash_newtype, sha256, Hash};
|
||||||
|
|
||||||
|
hash_newtype! {
|
||||||
|
/// Represents the ID of a descriptor, defined as the sha256 hash of
|
||||||
|
/// the descriptor string, checksum excluded.
|
||||||
|
///
|
||||||
|
/// This is useful for having a fixed-length unique representation of a descriptor,
|
||||||
|
/// in particular, we use it to persist application state changes related to the
|
||||||
|
/// descriptor without having to re-write the whole descriptor each time.
|
||||||
|
///
|
||||||
|
pub struct DescriptorId(pub sha256::Hash);
|
||||||
|
}
|
||||||
|
|
||||||
/// A trait to extend the functionality of a miniscript descriptor.
|
/// A trait to extend the functionality of a miniscript descriptor.
|
||||||
pub trait DescriptorExt {
|
pub trait DescriptorExt {
|
||||||
/// Returns the minimum value (in satoshis) at which an output is broadcastable.
|
/// Returns the minimum value (in satoshis) at which an output is broadcastable.
|
||||||
/// Panics if the descriptor wildcard is hardened.
|
/// Panics if the descriptor wildcard is hardened.
|
||||||
fn dust_value(&self) -> u64;
|
fn dust_value(&self) -> u64;
|
||||||
|
|
||||||
|
/// Returns the descriptor id, calculated as the sha256 of the descriptor, checksum not
|
||||||
|
/// included.
|
||||||
|
fn descriptor_id(&self) -> DescriptorId;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
||||||
@ -15,4 +34,11 @@ impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
|||||||
.dust_value()
|
.dust_value()
|
||||||
.to_sat()
|
.to_sat()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn descriptor_id(&self) -> DescriptorId {
|
||||||
|
let desc = self.to_string();
|
||||||
|
let desc_without_checksum = desc.split('#').next().expect("Must be here");
|
||||||
|
let descriptor_bytes = <Vec<u8>>::from(desc_without_checksum.as_bytes());
|
||||||
|
DescriptorId(sha256::Hash::hash(&descriptor_bytes))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ use alloc::vec::Vec;
|
|||||||
use bitcoin::{Block, OutPoint, Transaction, TxOut, Txid};
|
use bitcoin::{Block, OutPoint, Transaction, TxOut, Txid};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
keychain,
|
|
||||||
tx_graph::{self, TxGraph},
|
tx_graph::{self, TxGraph},
|
||||||
Anchor, AnchorFromBlockPosition, Append, BlockId,
|
Anchor, AnchorFromBlockPosition, Append, BlockId,
|
||||||
};
|
};
|
||||||
@ -320,8 +319,9 @@ impl<A, IA: Default> From<tx_graph::ChangeSet<A>> for ChangeSet<A, IA> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A, K> From<keychain::ChangeSet<K>> for ChangeSet<A, keychain::ChangeSet<K>> {
|
#[cfg(feature = "miniscript")]
|
||||||
fn from(indexer: keychain::ChangeSet<K>) -> Self {
|
impl<A, K> From<crate::keychain::ChangeSet<K>> for ChangeSet<A, crate::keychain::ChangeSet<K>> {
|
||||||
|
fn from(indexer: crate::keychain::ChangeSet<K>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
graph: Default::default(),
|
graph: Default::default(),
|
||||||
indexer,
|
indexer,
|
||||||
|
@ -10,78 +10,12 @@
|
|||||||
//!
|
//!
|
||||||
//! [`SpkTxOutIndex`]: crate::SpkTxOutIndex
|
//! [`SpkTxOutIndex`]: crate::SpkTxOutIndex
|
||||||
|
|
||||||
use crate::{collections::BTreeMap, Append};
|
|
||||||
|
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
mod txout_index;
|
mod txout_index;
|
||||||
use bitcoin::Amount;
|
use bitcoin::Amount;
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
pub use txout_index::*;
|
pub use txout_index::*;
|
||||||
|
|
||||||
/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
|
|
||||||
/// It maps each keychain `K` to its last revealed index.
|
|
||||||
///
|
|
||||||
/// It can be applied to [`KeychainTxOutIndex`] with [`apply_changeset`]. [`ChangeSet`]s are
|
|
||||||
/// monotone in that they will never decrease the revealed derivation index.
|
|
||||||
///
|
|
||||||
/// [`KeychainTxOutIndex`]: crate::keychain::KeychainTxOutIndex
|
|
||||||
/// [`apply_changeset`]: crate::keychain::KeychainTxOutIndex::apply_changeset
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
#[cfg_attr(
|
|
||||||
feature = "serde",
|
|
||||||
derive(serde::Deserialize, serde::Serialize),
|
|
||||||
serde(
|
|
||||||
crate = "serde_crate",
|
|
||||||
bound(
|
|
||||||
deserialize = "K: Ord + serde::Deserialize<'de>",
|
|
||||||
serialize = "K: Ord + serde::Serialize"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)]
|
|
||||||
#[must_use]
|
|
||||||
pub struct ChangeSet<K>(pub BTreeMap<K, u32>);
|
|
||||||
|
|
||||||
impl<K> ChangeSet<K> {
|
|
||||||
/// Get the inner map of the keychain to its new derivation index.
|
|
||||||
pub fn as_inner(&self) -> &BTreeMap<K, u32> {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Ord> Append for ChangeSet<K> {
|
|
||||||
/// Append another [`ChangeSet`] into self.
|
|
||||||
///
|
|
||||||
/// If the keychain already exists, increase the index when the other's index > self's index.
|
|
||||||
/// If the keychain did not exist, append the new keychain.
|
|
||||||
fn append(&mut self, mut other: Self) {
|
|
||||||
self.0.iter_mut().for_each(|(key, index)| {
|
|
||||||
if let Some(other_index) = other.0.remove(key) {
|
|
||||||
*index = other_index.max(*index);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
|
||||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
|
||||||
self.0.extend(other.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns whether the changeset are empty.
|
|
||||||
fn is_empty(&self) -> bool {
|
|
||||||
self.0.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> Default for ChangeSet<K> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self(Default::default())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> AsRef<BTreeMap<K, u32>> for ChangeSet<K> {
|
|
||||||
fn as_ref(&self) -> &BTreeMap<K, u32> {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Balance, differentiated into various categories.
|
/// Balance, differentiated into various categories.
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
||||||
#[cfg_attr(
|
#[cfg_attr(
|
||||||
@ -137,40 +71,3 @@ impl core::ops::Add for Balance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn append_keychain_derivation_indices() {
|
|
||||||
#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)]
|
|
||||||
enum Keychain {
|
|
||||||
One,
|
|
||||||
Two,
|
|
||||||
Three,
|
|
||||||
Four,
|
|
||||||
}
|
|
||||||
let mut lhs_di = BTreeMap::<Keychain, u32>::default();
|
|
||||||
let mut rhs_di = BTreeMap::<Keychain, u32>::default();
|
|
||||||
lhs_di.insert(Keychain::One, 7);
|
|
||||||
lhs_di.insert(Keychain::Two, 0);
|
|
||||||
rhs_di.insert(Keychain::One, 3);
|
|
||||||
rhs_di.insert(Keychain::Two, 5);
|
|
||||||
lhs_di.insert(Keychain::Three, 3);
|
|
||||||
rhs_di.insert(Keychain::Four, 4);
|
|
||||||
|
|
||||||
let mut lhs = ChangeSet(lhs_di);
|
|
||||||
let rhs = ChangeSet(rhs_di);
|
|
||||||
lhs.append(rhs);
|
|
||||||
|
|
||||||
// Exiting index doesn't update if the new index in `other` is lower than `self`.
|
|
||||||
assert_eq!(lhs.0.get(&Keychain::One), Some(&7));
|
|
||||||
// Existing index updates if the new index in `other` is higher than `self`.
|
|
||||||
assert_eq!(lhs.0.get(&Keychain::Two), Some(&5));
|
|
||||||
// Existing index is unchanged if keychain doesn't exist in `other`.
|
|
||||||
assert_eq!(lhs.0.get(&Keychain::Three), Some(&3));
|
|
||||||
// New keychain gets added if the keychain is in `other` but not in `self`.
|
|
||||||
assert_eq!(lhs.0.get(&Keychain::Four), Some(&4));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -3,9 +3,9 @@ use crate::{
|
|||||||
indexed_tx_graph::Indexer,
|
indexed_tx_graph::Indexer,
|
||||||
miniscript::{Descriptor, DescriptorPublicKey},
|
miniscript::{Descriptor, DescriptorPublicKey},
|
||||||
spk_iter::BIP32_MAX_INDEX,
|
spk_iter::BIP32_MAX_INDEX,
|
||||||
SpkIterator, SpkTxOutIndex,
|
DescriptorExt, DescriptorId, SpkIterator, SpkTxOutIndex,
|
||||||
};
|
};
|
||||||
use bitcoin::{Amount, OutPoint, Script, SignedAmount, Transaction, TxOut, Txid};
|
use bitcoin::{hashes::Hash, Amount, OutPoint, Script, SignedAmount, Transaction, TxOut, Txid};
|
||||||
use core::{
|
use core::{
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
ops::{Bound, RangeBounds},
|
ops::{Bound, RangeBounds},
|
||||||
@ -13,6 +13,79 @@ use core::{
|
|||||||
|
|
||||||
use crate::Append;
|
use crate::Append;
|
||||||
|
|
||||||
|
/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
|
||||||
|
/// It maps each keychain `K` to a descriptor and its last revealed index.
|
||||||
|
///
|
||||||
|
/// It can be applied to [`KeychainTxOutIndex`] with [`apply_changeset`]. [`ChangeSet] are
|
||||||
|
/// monotone in that they will never decrease the revealed derivation index.
|
||||||
|
///
|
||||||
|
/// [`KeychainTxOutIndex`]: crate::keychain::KeychainTxOutIndex
|
||||||
|
/// [`apply_changeset`]: crate::keychain::KeychainTxOutIndex::apply_changeset
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
#[cfg_attr(
|
||||||
|
feature = "serde",
|
||||||
|
derive(serde::Deserialize, serde::Serialize),
|
||||||
|
serde(
|
||||||
|
crate = "serde_crate",
|
||||||
|
bound(
|
||||||
|
deserialize = "K: Ord + serde::Deserialize<'de>",
|
||||||
|
serialize = "K: Ord + serde::Serialize"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
#[must_use]
|
||||||
|
pub struct ChangeSet<K> {
|
||||||
|
/// Contains the keychains that have been added and their respective descriptor
|
||||||
|
pub keychains_added: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
|
||||||
|
/// Contains for each descriptor_id the last revealed index of derivation
|
||||||
|
pub last_revealed: BTreeMap<DescriptorId, u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Ord> Append for ChangeSet<K> {
|
||||||
|
/// Append another [`ChangeSet`] into self.
|
||||||
|
///
|
||||||
|
/// For each keychain in `keychains_added` in the given [`ChangeSet`]:
|
||||||
|
/// If the keychain already exist with a different descriptor, we overwrite the old descriptor.
|
||||||
|
///
|
||||||
|
/// For each `last_revealed` in the given [`ChangeSet`]:
|
||||||
|
/// If the keychain already exists, increase the index when the other's index > self's index.
|
||||||
|
fn append(&mut self, other: Self) {
|
||||||
|
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
||||||
|
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
||||||
|
self.keychains_added.extend(other.keychains_added);
|
||||||
|
|
||||||
|
// for `last_revealed`, entries of `other` will take precedence ONLY if it is greater than
|
||||||
|
// what was originally in `self`.
|
||||||
|
for (desc_id, index) in other.last_revealed {
|
||||||
|
use crate::collections::btree_map::Entry;
|
||||||
|
match self.last_revealed.entry(desc_id) {
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(index);
|
||||||
|
}
|
||||||
|
Entry::Occupied(mut entry) => {
|
||||||
|
if *entry.get() < index {
|
||||||
|
entry.insert(index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether the changeset are empty.
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.last_revealed.is_empty() && self.keychains_added.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K> Default for ChangeSet<K> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
last_revealed: BTreeMap::default(),
|
||||||
|
keychains_added: BTreeMap::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const DEFAULT_LOOKAHEAD: u32 = 25;
|
const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||||
|
|
||||||
/// [`KeychainTxOutIndex`] controls how script pubkeys are revealed for multiple keychains, and
|
/// [`KeychainTxOutIndex`] controls how script pubkeys are revealed for multiple keychains, and
|
||||||
@ -54,7 +127,7 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
|||||||
///
|
///
|
||||||
/// # Change sets
|
/// # Change sets
|
||||||
///
|
///
|
||||||
/// Methods that can update the last revealed index will return [`super::ChangeSet`] to report
|
/// Methods that can update the last revealed index or add keychains will return [`super::ChangeSet`] to report
|
||||||
/// these changes. This can be persisted for future recovery.
|
/// these changes. This can be persisted for future recovery.
|
||||||
///
|
///
|
||||||
/// ## Synopsis
|
/// ## Synopsis
|
||||||
@ -79,14 +152,43 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
|||||||
/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||||
/// # let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
/// # let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||||
/// # let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
/// # let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||||
/// # let (descriptor_for_user_42, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/2/*)").unwrap();
|
/// # let (descriptor_42, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/2/*)").unwrap();
|
||||||
/// txout_index.add_keychain(MyKeychain::External, external_descriptor);
|
/// let _ = txout_index.insert_descriptor(MyKeychain::External, external_descriptor);
|
||||||
/// txout_index.add_keychain(MyKeychain::Internal, internal_descriptor);
|
/// let _ = txout_index.insert_descriptor(MyKeychain::Internal, internal_descriptor);
|
||||||
/// txout_index.add_keychain(MyKeychain::MyAppUser { user_id: 42 }, descriptor_for_user_42);
|
/// let _ = txout_index.insert_descriptor(MyKeychain::MyAppUser { user_id: 42 }, descriptor_42);
|
||||||
///
|
///
|
||||||
/// let new_spk_for_user = txout_index.reveal_next_spk(&MyKeychain::MyAppUser{ user_id: 42 });
|
/// let new_spk_for_user = txout_index.reveal_next_spk(&MyKeychain::MyAppUser{ user_id: 42 });
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
|
/// # Non-recommend keychain to descriptor assignments
|
||||||
|
///
|
||||||
|
/// A keychain (`K`) is used to identify a descriptor. However, the following keychain to descriptor
|
||||||
|
/// arrangements result in behavior that is harder to reason about and is not recommended.
|
||||||
|
///
|
||||||
|
/// ## Multiple keychains identifying the same descriptor
|
||||||
|
///
|
||||||
|
/// Although a single keychain variant can only identify a single descriptor, multiple keychain
|
||||||
|
/// variants can identify the same descriptor.
|
||||||
|
///
|
||||||
|
/// If multiple keychains identify the same descriptor:
|
||||||
|
/// 1. Methods that take in a keychain (such as [`reveal_next_spk`]) will work normally when any
|
||||||
|
/// keychain (that identifies that descriptor) is passed in.
|
||||||
|
/// 2. Methods that return data which associates with a descriptor (such as [`outpoints`],
|
||||||
|
/// [`txouts`], [`unused_spks`], etc.) the method will return the highest-ranked keychain variant
|
||||||
|
/// that identifies the descriptor. Rank is determined by the [`Ord`] implementation of the keychain
|
||||||
|
/// type.
|
||||||
|
///
|
||||||
|
/// This arrangement is not recommended since some methods will return a single keychain variant
|
||||||
|
/// even though multiple keychain variants identify the same descriptor.
|
||||||
|
///
|
||||||
|
/// ## Reassigning the descriptor of a single keychain
|
||||||
|
///
|
||||||
|
/// Descriptors added to [`KeychainTxOutIndex`] are never removed. However, a keychain that
|
||||||
|
/// identifies a descriptor can be reassigned to identify a different descriptor. This may result in
|
||||||
|
/// a situation where a descriptor has no associated keychain(s), and relevant [`TxOut`]s,
|
||||||
|
/// [`OutPoint`]s and [`Script`]s (of that descriptor) will not be return by [`KeychainTxOutIndex`].
|
||||||
|
/// Therefore, reassigning the descriptor of a single keychain is not recommended.
|
||||||
|
///
|
||||||
/// [`Ord`]: core::cmp::Ord
|
/// [`Ord`]: core::cmp::Ord
|
||||||
/// [`SpkTxOutIndex`]: crate::spk_txout_index::SpkTxOutIndex
|
/// [`SpkTxOutIndex`]: crate::spk_txout_index::SpkTxOutIndex
|
||||||
/// [`Descriptor`]: crate::miniscript::Descriptor
|
/// [`Descriptor`]: crate::miniscript::Descriptor
|
||||||
@ -99,13 +201,27 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
|||||||
/// [`new`]: KeychainTxOutIndex::new
|
/// [`new`]: KeychainTxOutIndex::new
|
||||||
/// [`unbounded_spk_iter`]: KeychainTxOutIndex::unbounded_spk_iter
|
/// [`unbounded_spk_iter`]: KeychainTxOutIndex::unbounded_spk_iter
|
||||||
/// [`all_unbounded_spk_iters`]: KeychainTxOutIndex::all_unbounded_spk_iters
|
/// [`all_unbounded_spk_iters`]: KeychainTxOutIndex::all_unbounded_spk_iters
|
||||||
|
/// [`outpoints`]: KeychainTxOutIndex::outpoints
|
||||||
|
/// [`txouts`]: KeychainTxOutIndex::txouts
|
||||||
|
/// [`unused_spks`]: KeychainTxOutIndex::unused_spks
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct KeychainTxOutIndex<K> {
|
pub struct KeychainTxOutIndex<K> {
|
||||||
inner: SpkTxOutIndex<(K, u32)>,
|
inner: SpkTxOutIndex<(DescriptorId, u32)>,
|
||||||
// descriptors of each keychain
|
// keychain -> (descriptor, descriptor id) map
|
||||||
keychains: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
|
keychains_to_descriptors: BTreeMap<K, (DescriptorId, Descriptor<DescriptorPublicKey>)>,
|
||||||
|
// descriptor id -> keychain set
|
||||||
|
// Because different keychains can have the same descriptor, we rank keychains by `Ord` so that
|
||||||
|
// that the first keychain variant (according to `Ord`) has the highest rank. When associated
|
||||||
|
// data (such as spks, outpoints) are returned with a keychain, we return the highest-ranked
|
||||||
|
// keychain with it.
|
||||||
|
descriptor_ids_to_keychain_set: HashMap<DescriptorId, BTreeSet<K>>,
|
||||||
|
// descriptor_id -> descriptor map
|
||||||
|
// This is a "monotone" map, meaning that its size keeps growing, i.e., we never delete
|
||||||
|
// descriptors from it. This is useful for revealing spks for descriptors that don't have
|
||||||
|
// keychains associated.
|
||||||
|
descriptor_ids_to_descriptors: BTreeMap<DescriptorId, Descriptor<DescriptorPublicKey>>,
|
||||||
// last revealed indexes
|
// last revealed indexes
|
||||||
last_revealed: BTreeMap<K, u32>,
|
last_revealed: BTreeMap<DescriptorId, u32>,
|
||||||
// lookahead settings for each keychain
|
// lookahead settings for each keychain
|
||||||
lookahead: u32,
|
lookahead: u32,
|
||||||
}
|
}
|
||||||
@ -121,7 +237,13 @@ impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
|||||||
|
|
||||||
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
||||||
match self.inner.scan_txout(outpoint, txout).cloned() {
|
match self.inner.scan_txout(outpoint, txout).cloned() {
|
||||||
Some((keychain, index)) => self.reveal_to_target(&keychain, index).1,
|
Some((descriptor_id, index)) => {
|
||||||
|
// We want to reveal spks for descriptors that aren't tracked by any keychain, and
|
||||||
|
// so we call reveal with descriptor_id
|
||||||
|
let (_, changeset) = self.reveal_to_target_with_id(descriptor_id, index)
|
||||||
|
.expect("descriptors are added in a monotone manner, there cannot be a descriptor id with no corresponding descriptor");
|
||||||
|
changeset
|
||||||
|
}
|
||||||
None => super::ChangeSet::default(),
|
None => super::ChangeSet::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -135,7 +257,13 @@ impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn initial_changeset(&self) -> Self::ChangeSet {
|
fn initial_changeset(&self) -> Self::ChangeSet {
|
||||||
super::ChangeSet(self.last_revealed.clone())
|
super::ChangeSet {
|
||||||
|
keychains_added: self
|
||||||
|
.keychains()
|
||||||
|
.map(|(k, v)| (k.clone(), v.clone()))
|
||||||
|
.collect(),
|
||||||
|
last_revealed: self.last_revealed.clone(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_changeset(&mut self, changeset: Self::ChangeSet) {
|
fn apply_changeset(&mut self, changeset: Self::ChangeSet) {
|
||||||
@ -161,7 +289,9 @@ impl<K> KeychainTxOutIndex<K> {
|
|||||||
pub fn new(lookahead: u32) -> Self {
|
pub fn new(lookahead: u32) -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: SpkTxOutIndex::default(),
|
inner: SpkTxOutIndex::default(),
|
||||||
keychains: BTreeMap::new(),
|
keychains_to_descriptors: BTreeMap::new(),
|
||||||
|
descriptor_ids_to_keychain_set: HashMap::new(),
|
||||||
|
descriptor_ids_to_descriptors: BTreeMap::new(),
|
||||||
last_revealed: BTreeMap::new(),
|
last_revealed: BTreeMap::new(),
|
||||||
lookahead,
|
lookahead,
|
||||||
}
|
}
|
||||||
@ -170,26 +300,37 @@ impl<K> KeychainTxOutIndex<K> {
|
|||||||
|
|
||||||
/// Methods that are *re-exposed* from the internal [`SpkTxOutIndex`].
|
/// Methods that are *re-exposed* from the internal [`SpkTxOutIndex`].
|
||||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||||
|
/// Get the highest-ranked keychain that is currently associated with the given `desc_id`.
|
||||||
|
fn keychain_of_desc_id(&self, desc_id: &DescriptorId) -> Option<&K> {
|
||||||
|
let keychains = self.descriptor_ids_to_keychain_set.get(desc_id)?;
|
||||||
|
keychains.iter().next()
|
||||||
|
}
|
||||||
|
|
||||||
/// Return a reference to the internal [`SpkTxOutIndex`].
|
/// Return a reference to the internal [`SpkTxOutIndex`].
|
||||||
///
|
///
|
||||||
/// **WARNING:** The internal index will contain lookahead spks. Refer to
|
/// **WARNING:** The internal index will contain lookahead spks. Refer to
|
||||||
/// [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
/// [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
||||||
pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> {
|
pub fn inner(&self) -> &SpkTxOutIndex<(DescriptorId, u32)> {
|
||||||
&self.inner
|
&self.inner
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a reference to the set of indexed outpoints.
|
/// Get the set of indexed outpoints, corresponding to tracked keychains.
|
||||||
pub fn outpoints(&self) -> &BTreeSet<((K, u32), OutPoint)> {
|
pub fn outpoints(&self) -> impl DoubleEndedIterator<Item = ((K, u32), OutPoint)> + '_ {
|
||||||
self.inner.outpoints()
|
self.inner
|
||||||
|
.outpoints()
|
||||||
|
.iter()
|
||||||
|
.filter_map(|((desc_id, index), op)| {
|
||||||
|
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||||
|
Some(((keychain.clone(), *index), *op))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over known txouts that spend to tracked script pubkeys.
|
/// Iterate over known txouts that spend to tracked script pubkeys.
|
||||||
pub fn txouts(
|
pub fn txouts(&self) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> + '_ {
|
||||||
&self,
|
self.inner.txouts().filter_map(|((desc_id, i), op, txo)| {
|
||||||
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> + ExactSizeIterator {
|
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||||
self.inner
|
Some((keychain.clone(), *i, op, txo))
|
||||||
.txouts()
|
})
|
||||||
.map(|((k, i), op, txo)| (k.clone(), *i, op, txo))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
||||||
@ -199,32 +340,39 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> {
|
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> {
|
||||||
self.inner
|
self.inner
|
||||||
.txouts_in_tx(txid)
|
.txouts_in_tx(txid)
|
||||||
.map(|((k, i), op, txo)| (k.clone(), *i, op, txo))
|
.filter_map(|((desc_id, i), op, txo)| {
|
||||||
|
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||||
|
Some((keychain.clone(), *i, op, txo))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the [`TxOut`] of `outpoint` if it has been indexed.
|
/// Return the [`TxOut`] of `outpoint` if it has been indexed, and if it corresponds to a
|
||||||
|
/// tracked keychain.
|
||||||
///
|
///
|
||||||
/// The associated keychain and keychain index of the txout's spk is also returned.
|
/// The associated keychain and keychain index of the txout's spk is also returned.
|
||||||
///
|
///
|
||||||
/// This calls [`SpkTxOutIndex::txout`] internally.
|
/// This calls [`SpkTxOutIndex::txout`] internally.
|
||||||
pub fn txout(&self, outpoint: OutPoint) -> Option<(K, u32, &TxOut)> {
|
pub fn txout(&self, outpoint: OutPoint) -> Option<(K, u32, &TxOut)> {
|
||||||
self.inner
|
let ((descriptor_id, index), txo) = self.inner.txout(outpoint)?;
|
||||||
.txout(outpoint)
|
let keychain = self.keychain_of_desc_id(descriptor_id)?;
|
||||||
.map(|((k, i), txo)| (k.clone(), *i, txo))
|
Some((keychain.clone(), *index, txo))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the script that exists under the given `keychain`'s `index`.
|
/// Return the script that exists under the given `keychain`'s `index`.
|
||||||
///
|
///
|
||||||
/// This calls [`SpkTxOutIndex::spk_at_index`] internally.
|
/// This calls [`SpkTxOutIndex::spk_at_index`] internally.
|
||||||
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<&Script> {
|
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<&Script> {
|
||||||
self.inner.spk_at_index(&(keychain, index))
|
let descriptor_id = self.keychains_to_descriptors.get(&keychain)?.0;
|
||||||
|
self.inner.spk_at_index(&(descriptor_id, index))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the keychain and keychain index associated with the spk.
|
/// Returns the keychain and keychain index associated with the spk.
|
||||||
///
|
///
|
||||||
/// This calls [`SpkTxOutIndex::index_of_spk`] internally.
|
/// This calls [`SpkTxOutIndex::index_of_spk`] internally.
|
||||||
pub fn index_of_spk(&self, script: &Script) -> Option<(K, u32)> {
|
pub fn index_of_spk(&self, script: &Script) -> Option<(K, u32)> {
|
||||||
self.inner.index_of_spk(script).cloned()
|
let (desc_id, last_index) = self.inner.index_of_spk(script)?;
|
||||||
|
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||||
|
Some((keychain.clone(), *last_index))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the spk under the `keychain`'s `index` has been used.
|
/// Returns whether the spk under the `keychain`'s `index` has been used.
|
||||||
@ -234,7 +382,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// This calls [`SpkTxOutIndex::is_used`] internally.
|
/// This calls [`SpkTxOutIndex::is_used`] internally.
|
||||||
pub fn is_used(&self, keychain: K, index: u32) -> bool {
|
pub fn is_used(&self, keychain: K, index: u32) -> bool {
|
||||||
self.inner.is_used(&(keychain, index))
|
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||||
|
match descriptor_id {
|
||||||
|
Some(descriptor_id) => self.inner.is_used(&(descriptor_id, index)),
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output
|
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output
|
||||||
@ -242,7 +394,9 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// This only has an effect when the `index` had been added to `self` already and was unused.
|
/// This only has an effect when the `index` had been added to `self` already and was unused.
|
||||||
///
|
///
|
||||||
/// Returns whether the `index` was initially present as `unused`.
|
/// Returns whether the spk under the given `keychain` and `index` is successfully
|
||||||
|
/// marked as used. Returns false either when there is no descriptor under the given
|
||||||
|
/// keychain, or when the spk is already marked as used.
|
||||||
///
|
///
|
||||||
/// This is useful when you want to reserve a script pubkey for something but don't want to add
|
/// This is useful when you want to reserve a script pubkey for something but don't want to add
|
||||||
/// the transaction output using it to the index yet. Other callers will consider `index` on
|
/// the transaction output using it to the index yet. Other callers will consider `index` on
|
||||||
@ -252,7 +406,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// [`unmark_used`]: Self::unmark_used
|
/// [`unmark_used`]: Self::unmark_used
|
||||||
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
|
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||||
self.inner.mark_used(&(keychain, index))
|
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||||
|
match descriptor_id {
|
||||||
|
Some(descriptor_id) => self.inner.mark_used(&(descriptor_id, index)),
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
||||||
@ -265,7 +423,11 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// [`mark_used`]: Self::mark_used
|
/// [`mark_used`]: Self::mark_used
|
||||||
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
|
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||||
self.inner.unmark_used(&(keychain, index))
|
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
||||||
|
match descriptor_id {
|
||||||
|
Some(descriptor_id) => self.inner.unmark_used(&(descriptor_id, index)),
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes the total value transfer effect `tx` has on the script pubkeys belonging to the
|
/// Computes the total value transfer effect `tx` has on the script pubkeys belonging to the
|
||||||
@ -279,7 +441,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
range: impl RangeBounds<K>,
|
range: impl RangeBounds<K>,
|
||||||
) -> (Amount, Amount) {
|
) -> (Amount, Amount) {
|
||||||
self.inner
|
self.inner
|
||||||
.sent_and_received(tx, Self::map_to_inner_bounds(range))
|
.sent_and_received(tx, self.map_to_inner_bounds(range))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes the net value that this transaction gives to the script pubkeys in the index and
|
/// Computes the net value that this transaction gives to the script pubkeys in the index and
|
||||||
@ -290,34 +452,76 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// [`sent_and_received`]: Self::sent_and_received
|
/// [`sent_and_received`]: Self::sent_and_received
|
||||||
pub fn net_value(&self, tx: &Transaction, range: impl RangeBounds<K>) -> SignedAmount {
|
pub fn net_value(&self, tx: &Transaction, range: impl RangeBounds<K>) -> SignedAmount {
|
||||||
self.inner.net_value(tx, Self::map_to_inner_bounds(range))
|
self.inner.net_value(tx, self.map_to_inner_bounds(range))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||||
/// Return a reference to the internal map of keychain to descriptors.
|
/// Return the map of the keychain to descriptors.
|
||||||
pub fn keychains(&self) -> &BTreeMap<K, Descriptor<DescriptorPublicKey>> {
|
pub fn keychains(
|
||||||
&self.keychains
|
&self,
|
||||||
|
) -> impl DoubleEndedIterator<Item = (&K, &Descriptor<DescriptorPublicKey>)> + ExactSizeIterator + '_
|
||||||
|
{
|
||||||
|
self.keychains_to_descriptors
|
||||||
|
.iter()
|
||||||
|
.map(|(k, (_, d))| (k, d))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses.
|
/// Insert a descriptor with a keychain associated to it.
|
||||||
///
|
///
|
||||||
/// Adding a keychain means you will be able to derive new script pubkeys under that keychain
|
/// Adding a descriptor means you will be able to derive new script pubkeys under it
|
||||||
/// and the txout index will discover transaction outputs with those script pubkeys.
|
/// and the txout index will discover transaction outputs with those script pubkeys.
|
||||||
///
|
///
|
||||||
/// # Panics
|
/// When trying to add a keychain that already existed under a different descriptor, or a descriptor
|
||||||
///
|
/// that already existed with a different keychain, the old keychain (or descriptor) will be
|
||||||
/// This will panic if a different `descriptor` is introduced to the same `keychain`.
|
/// overwritten.
|
||||||
pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor<DescriptorPublicKey>) {
|
pub fn insert_descriptor(
|
||||||
let old_descriptor = &*self
|
&mut self,
|
||||||
.keychains
|
keychain: K,
|
||||||
.entry(keychain.clone())
|
descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
.or_insert_with(|| descriptor.clone());
|
) -> super::ChangeSet<K> {
|
||||||
assert_eq!(
|
let mut changeset = super::ChangeSet::<K>::default();
|
||||||
&descriptor, old_descriptor,
|
let desc_id = descriptor.descriptor_id();
|
||||||
"keychain already contains a different descriptor"
|
|
||||||
);
|
let old_desc = self
|
||||||
|
.keychains_to_descriptors
|
||||||
|
.insert(keychain.clone(), (desc_id, descriptor.clone()));
|
||||||
|
|
||||||
|
if let Some((old_desc_id, _)) = old_desc {
|
||||||
|
// nothing needs to be done if caller reinsterted the same descriptor under the same
|
||||||
|
// keychain
|
||||||
|
if old_desc_id == desc_id {
|
||||||
|
return changeset;
|
||||||
|
}
|
||||||
|
// we should remove old descriptor that is associated with this keychain as the index
|
||||||
|
// is designed to track one descriptor per keychain (however different keychains can
|
||||||
|
// share the same descriptor)
|
||||||
|
let _is_keychain_removed = self
|
||||||
|
.descriptor_ids_to_keychain_set
|
||||||
|
.get_mut(&old_desc_id)
|
||||||
|
.expect("we must have already inserted this descriptor")
|
||||||
|
.remove(&keychain);
|
||||||
|
debug_assert!(_is_keychain_removed);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.descriptor_ids_to_keychain_set
|
||||||
|
.entry(desc_id)
|
||||||
|
.or_default()
|
||||||
|
.insert(keychain.clone());
|
||||||
|
self.descriptor_ids_to_descriptors
|
||||||
|
.insert(desc_id, descriptor.clone());
|
||||||
self.replenish_lookahead(&keychain, self.lookahead);
|
self.replenish_lookahead(&keychain, self.lookahead);
|
||||||
|
|
||||||
|
changeset
|
||||||
|
.keychains_added
|
||||||
|
.insert(keychain.clone(), descriptor);
|
||||||
|
changeset
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the descriptor associated with the keychain. Returns `None` if the keychain doesn't
|
||||||
|
/// have a descriptor associated with it.
|
||||||
|
pub fn get_descriptor(&self, keychain: &K) -> Option<&Descriptor<DescriptorPublicKey>> {
|
||||||
|
self.keychains_to_descriptors.get(keychain).map(|(_, d)| d)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the lookahead setting.
|
/// Get the lookahead setting.
|
||||||
@ -333,63 +537,60 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// This does not change the global `lookahead` setting.
|
/// This does not change the global `lookahead` setting.
|
||||||
pub fn lookahead_to_target(&mut self, keychain: &K, target_index: u32) {
|
pub fn lookahead_to_target(&mut self, keychain: &K, target_index: u32) {
|
||||||
let (next_index, _) = self.next_index(keychain);
|
if let Some((next_index, _)) = self.next_index(keychain) {
|
||||||
|
let temp_lookahead = (target_index + 1)
|
||||||
|
.checked_sub(next_index)
|
||||||
|
.filter(|&index| index > 0);
|
||||||
|
|
||||||
let temp_lookahead = (target_index + 1)
|
if let Some(temp_lookahead) = temp_lookahead {
|
||||||
.checked_sub(next_index)
|
self.replenish_lookahead(keychain, temp_lookahead);
|
||||||
.filter(|&index| index > 0);
|
}
|
||||||
|
|
||||||
if let Some(temp_lookahead) = temp_lookahead {
|
|
||||||
self.replenish_lookahead(keychain, temp_lookahead);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn replenish_lookahead(&mut self, keychain: &K, lookahead: u32) {
|
fn replenish_lookahead(&mut self, keychain: &K, lookahead: u32) {
|
||||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
let descriptor_opt = self.keychains_to_descriptors.get(keychain).cloned();
|
||||||
let next_store_index = self.next_store_index(keychain);
|
if let Some((descriptor_id, descriptor)) = descriptor_opt {
|
||||||
let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
|
let next_store_index = self.next_store_index(descriptor_id);
|
||||||
|
let next_reveal_index = self.last_revealed.get(&descriptor_id).map_or(0, |v| *v + 1);
|
||||||
|
|
||||||
for (new_index, new_spk) in
|
for (new_index, new_spk) in SpkIterator::new_with_range(
|
||||||
SpkIterator::new_with_range(descriptor, next_store_index..next_reveal_index + lookahead)
|
descriptor,
|
||||||
{
|
next_store_index..next_reveal_index + lookahead,
|
||||||
let _inserted = self
|
) {
|
||||||
.inner
|
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
||||||
.insert_spk((keychain.clone(), new_index), new_spk);
|
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
||||||
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_store_index(&self, keychain: &K) -> u32 {
|
fn next_store_index(&self, descriptor_id: DescriptorId) -> u32 {
|
||||||
self.inner()
|
self.inner()
|
||||||
.all_spks()
|
.all_spks()
|
||||||
// This range is filtering out the spks with a keychain different than
|
// This range is keeping only the spks with descriptor_id equal to
|
||||||
// `keychain`. We don't use filter here as range is more optimized.
|
// `descriptor_id`. We don't use filter here as range is more optimized.
|
||||||
.range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX))
|
.range((descriptor_id, u32::MIN)..(descriptor_id, u32::MAX))
|
||||||
.last()
|
.last()
|
||||||
.map_or(0, |((_, index), _)| *index + 1)
|
.map_or(0, |((_, index), _)| *index + 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get an unbounded spk iterator over a given `keychain`.
|
/// Get an unbounded spk iterator over a given `keychain`. Returns `None` if the provided
|
||||||
///
|
/// keychain doesn't exist
|
||||||
/// # Panics
|
pub fn unbounded_spk_iter(
|
||||||
///
|
&self,
|
||||||
/// This will panic if the given `keychain`'s descriptor does not exist.
|
keychain: &K,
|
||||||
pub fn unbounded_spk_iter(&self, keychain: &K) -> SpkIterator<Descriptor<DescriptorPublicKey>> {
|
) -> Option<SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||||
SpkIterator::new(
|
let descriptor = self.keychains_to_descriptors.get(keychain)?.1.clone();
|
||||||
self.keychains
|
Some(SpkIterator::new(descriptor))
|
||||||
.get(keychain)
|
|
||||||
.expect("keychain does not exist")
|
|
||||||
.clone(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get unbounded spk iterators for all keychains.
|
/// Get unbounded spk iterators for all keychains.
|
||||||
pub fn all_unbounded_spk_iters(
|
pub fn all_unbounded_spk_iters(
|
||||||
&self,
|
&self,
|
||||||
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||||
self.keychains
|
self.keychains_to_descriptors
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(k, descriptor)| (k.clone(), SpkIterator::new(descriptor.clone())))
|
.map(|(k, (_, descriptor))| (k.clone(), SpkIterator::new(descriptor.clone())))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -398,18 +599,27 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
&self,
|
&self,
|
||||||
range: impl RangeBounds<K>,
|
range: impl RangeBounds<K>,
|
||||||
) -> impl DoubleEndedIterator<Item = (&K, u32, &Script)> + Clone {
|
) -> impl DoubleEndedIterator<Item = (&K, u32, &Script)> + Clone {
|
||||||
self.keychains.range(range).flat_map(|(keychain, _)| {
|
self.keychains_to_descriptors
|
||||||
let start = Bound::Included((keychain.clone(), u32::MIN));
|
.range(range)
|
||||||
let end = match self.last_revealed.get(keychain) {
|
.flat_map(|(_, (descriptor_id, _))| {
|
||||||
Some(last_revealed) => Bound::Included((keychain.clone(), *last_revealed)),
|
let start = Bound::Included((*descriptor_id, u32::MIN));
|
||||||
None => Bound::Excluded((keychain.clone(), u32::MIN)),
|
let end = match self.last_revealed.get(descriptor_id) {
|
||||||
};
|
Some(last_revealed) => Bound::Included((*descriptor_id, *last_revealed)),
|
||||||
|
None => Bound::Excluded((*descriptor_id, u32::MIN)),
|
||||||
|
};
|
||||||
|
|
||||||
self.inner
|
self.inner
|
||||||
.all_spks()
|
.all_spks()
|
||||||
.range((start, end))
|
.range((start, end))
|
||||||
.map(|((keychain, i), spk)| (keychain, *i, spk.as_script()))
|
.map(|((descriptor_id, i), spk)| {
|
||||||
})
|
(
|
||||||
|
self.keychain_of_desc_id(descriptor_id)
|
||||||
|
.expect("must have keychain"),
|
||||||
|
*i,
|
||||||
|
spk.as_script(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over revealed spks of the given `keychain`.
|
/// Iterate over revealed spks of the given `keychain`.
|
||||||
@ -423,20 +633,29 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
|
|
||||||
/// Iterate over revealed, but unused, spks of all keychains.
|
/// Iterate over revealed, but unused, spks of all keychains.
|
||||||
pub fn unused_spks(&self) -> impl DoubleEndedIterator<Item = (K, u32, &Script)> + Clone {
|
pub fn unused_spks(&self) -> impl DoubleEndedIterator<Item = (K, u32, &Script)> + Clone {
|
||||||
self.keychains.keys().flat_map(|keychain| {
|
self.keychains_to_descriptors.keys().flat_map(|keychain| {
|
||||||
self.unused_keychain_spks(keychain)
|
self.unused_keychain_spks(keychain)
|
||||||
.map(|(i, spk)| (keychain.clone(), i, spk))
|
.map(|(i, spk)| (keychain.clone(), i, spk))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over revealed, but unused, spks of the given `keychain`.
|
/// Iterate over revealed, but unused, spks of the given `keychain`.
|
||||||
|
/// Returns an empty iterator if the provided keychain doesn't exist.
|
||||||
pub fn unused_keychain_spks(
|
pub fn unused_keychain_spks(
|
||||||
&self,
|
&self,
|
||||||
keychain: &K,
|
keychain: &K,
|
||||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
|
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
|
||||||
let next_i = self.last_revealed.get(keychain).map_or(0, |&i| i + 1);
|
let desc_id = self
|
||||||
|
.keychains_to_descriptors
|
||||||
|
.get(keychain)
|
||||||
|
.map(|(desc_id, _)| *desc_id)
|
||||||
|
// We use a dummy desc id if we can't find the real one in our map. In this way,
|
||||||
|
// if this method was to be called with a non-existent keychain, we would return an
|
||||||
|
// empty iterator
|
||||||
|
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]));
|
||||||
|
let next_i = self.last_revealed.get(&desc_id).map_or(0, |&i| i + 1);
|
||||||
self.inner
|
self.inner
|
||||||
.unused_spks((keychain.clone(), u32::MIN)..(keychain.clone(), next_i))
|
.unused_spks((desc_id, u32::MIN)..(desc_id, next_i))
|
||||||
.map(|((_, i), spk)| (*i, spk))
|
.map(|((_, i), spk)| (*i, spk))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -451,17 +670,15 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// Not checking the second field of the tuple may result in address reuse.
|
/// Not checking the second field of the tuple may result in address reuse.
|
||||||
///
|
///
|
||||||
/// # Panics
|
/// Returns None if the provided `keychain` doesn't exist.
|
||||||
///
|
pub fn next_index(&self, keychain: &K) -> Option<(u32, bool)> {
|
||||||
/// Panics if the `keychain` does not exist.
|
let (descriptor_id, descriptor) = self.keychains_to_descriptors.get(keychain)?;
|
||||||
pub fn next_index(&self, keychain: &K) -> (u32, bool) {
|
let last_index = self.last_revealed.get(descriptor_id).cloned();
|
||||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
|
||||||
let last_index = self.last_revealed.get(keychain).cloned();
|
|
||||||
|
|
||||||
// we can only get the next index if the wildcard exists.
|
// we can only get the next index if the wildcard exists.
|
||||||
let has_wildcard = descriptor.has_wildcard();
|
let has_wildcard = descriptor.has_wildcard();
|
||||||
|
|
||||||
match last_index {
|
Some(match last_index {
|
||||||
// if there is no index, next_index is always 0.
|
// if there is no index, next_index is always 0.
|
||||||
None => (0, true),
|
None => (0, true),
|
||||||
// descriptors without wildcards can only have one index.
|
// descriptors without wildcards can only have one index.
|
||||||
@ -473,19 +690,27 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
Some(index) if index == BIP32_MAX_INDEX => (index, false),
|
Some(index) if index == BIP32_MAX_INDEX => (index, false),
|
||||||
// get the next derivation index.
|
// get the next derivation index.
|
||||||
Some(index) => (index + 1, true),
|
Some(index) => (index + 1, true),
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the last derivation index that is revealed for each keychain.
|
/// Get the last derivation index that is revealed for each keychain.
|
||||||
///
|
///
|
||||||
/// Keychains with no revealed indices will not be included in the returned [`BTreeMap`].
|
/// Keychains with no revealed indices will not be included in the returned [`BTreeMap`].
|
||||||
pub fn last_revealed_indices(&self) -> &BTreeMap<K, u32> {
|
pub fn last_revealed_indices(&self) -> BTreeMap<K, u32> {
|
||||||
&self.last_revealed
|
self.last_revealed
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(desc_id, index)| {
|
||||||
|
let keychain = self.keychain_of_desc_id(desc_id)?;
|
||||||
|
Some((keychain.clone(), *index))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the last derivation index revealed for `keychain`.
|
/// Get the last derivation index revealed for `keychain`. Returns None if the keychain doesn't
|
||||||
|
/// exist, or if the keychain doesn't have any revealed scripts.
|
||||||
pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
|
pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
|
||||||
self.last_revealed.get(keychain).cloned()
|
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||||
|
self.last_revealed.get(&descriptor_id).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
||||||
@ -500,16 +725,77 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
let mut spks = BTreeMap::new();
|
let mut spks = BTreeMap::new();
|
||||||
|
|
||||||
for (keychain, &index) in keychains {
|
for (keychain, &index) in keychains {
|
||||||
let (new_spks, new_changeset) = self.reveal_to_target(keychain, index);
|
if let Some((new_spks, new_changeset)) = self.reveal_to_target(keychain, index) {
|
||||||
if !new_changeset.is_empty() {
|
if !new_changeset.is_empty() {
|
||||||
spks.insert(keychain.clone(), new_spks);
|
spks.insert(keychain.clone(), new_spks);
|
||||||
changeset.append(new_changeset.clone());
|
changeset.append(new_changeset.clone());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(spks, changeset)
|
(spks, changeset)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convenience method to call `reveal_to_target` with a descriptor_id instead of a keychain.
|
||||||
|
/// This is useful for revealing spks of descriptors for which we don't have a keychain
|
||||||
|
/// tracked.
|
||||||
|
/// Refer to the `reveal_to_target` documentation for more.
|
||||||
|
///
|
||||||
|
/// Returns None if the provided `descriptor_id` doesn't correspond to a tracked descriptor.
|
||||||
|
fn reveal_to_target_with_id(
|
||||||
|
&mut self,
|
||||||
|
descriptor_id: DescriptorId,
|
||||||
|
target_index: u32,
|
||||||
|
) -> Option<(
|
||||||
|
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
||||||
|
super::ChangeSet<K>,
|
||||||
|
)> {
|
||||||
|
let descriptor = self
|
||||||
|
.descriptor_ids_to_descriptors
|
||||||
|
.get(&descriptor_id)?
|
||||||
|
.clone();
|
||||||
|
let has_wildcard = descriptor.has_wildcard();
|
||||||
|
|
||||||
|
let target_index = if has_wildcard { target_index } else { 0 };
|
||||||
|
let next_reveal_index = self
|
||||||
|
.last_revealed
|
||||||
|
.get(&descriptor_id)
|
||||||
|
.map_or(0, |index| *index + 1);
|
||||||
|
|
||||||
|
debug_assert!(next_reveal_index + self.lookahead >= self.next_store_index(descriptor_id));
|
||||||
|
|
||||||
|
// If the target_index is already revealed, we are done
|
||||||
|
if next_reveal_index > target_index {
|
||||||
|
return Some((
|
||||||
|
SpkIterator::new_with_range(descriptor, next_reveal_index..next_reveal_index),
|
||||||
|
super::ChangeSet::default(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// We range over the indexes that are not stored and insert their spks in the index.
|
||||||
|
// Indexes from next_reveal_index to next_reveal_index + lookahead are already stored (due
|
||||||
|
// to lookahead), so we only range from next_reveal_index + lookahead to target + lookahead
|
||||||
|
let range = next_reveal_index + self.lookahead..=target_index + self.lookahead;
|
||||||
|
for (new_index, new_spk) in SpkIterator::new_with_range(descriptor.clone(), range) {
|
||||||
|
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
||||||
|
debug_assert!(_inserted, "must not have existing spk");
|
||||||
|
debug_assert!(
|
||||||
|
has_wildcard || new_index == 0,
|
||||||
|
"non-wildcard descriptors must not iterate past index 0"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let _old_index = self.last_revealed.insert(descriptor_id, target_index);
|
||||||
|
debug_assert!(_old_index < Some(target_index));
|
||||||
|
Some((
|
||||||
|
SpkIterator::new_with_range(descriptor, next_reveal_index..target_index + 1),
|
||||||
|
super::ChangeSet {
|
||||||
|
keychains_added: BTreeMap::new(),
|
||||||
|
last_revealed: core::iter::once((descriptor_id, target_index)).collect(),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
||||||
/// `target_index`.
|
/// `target_index`.
|
||||||
///
|
///
|
||||||
@ -521,84 +807,46 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// [`super::ChangeSet`], which reports updates to the latest revealed index. If no new script
|
/// [`super::ChangeSet`], which reports updates to the latest revealed index. If no new script
|
||||||
/// pubkeys are revealed, then both of these will be empty.
|
/// pubkeys are revealed, then both of these will be empty.
|
||||||
///
|
///
|
||||||
/// # Panics
|
/// Returns None if the provided `keychain` doesn't exist.
|
||||||
///
|
|
||||||
/// Panics if `keychain` does not exist.
|
|
||||||
pub fn reveal_to_target(
|
pub fn reveal_to_target(
|
||||||
&mut self,
|
&mut self,
|
||||||
keychain: &K,
|
keychain: &K,
|
||||||
target_index: u32,
|
target_index: u32,
|
||||||
) -> (
|
) -> Option<(
|
||||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
||||||
super::ChangeSet<K>,
|
super::ChangeSet<K>,
|
||||||
) {
|
)> {
|
||||||
let descriptor = self.keychains.get(keychain).expect("keychain must exist");
|
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||||
let has_wildcard = descriptor.has_wildcard();
|
self.reveal_to_target_with_id(descriptor_id, target_index)
|
||||||
|
|
||||||
let target_index = if has_wildcard { target_index } else { 0 };
|
|
||||||
let next_reveal_index = self
|
|
||||||
.last_revealed
|
|
||||||
.get(keychain)
|
|
||||||
.map_or(0, |index| *index + 1);
|
|
||||||
|
|
||||||
debug_assert!(next_reveal_index + self.lookahead >= self.next_store_index(keychain));
|
|
||||||
|
|
||||||
// If the target_index is already revealed, we are done
|
|
||||||
if next_reveal_index > target_index {
|
|
||||||
return (
|
|
||||||
SpkIterator::new_with_range(
|
|
||||||
descriptor.clone(),
|
|
||||||
next_reveal_index..next_reveal_index,
|
|
||||||
),
|
|
||||||
super::ChangeSet::default(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// We range over the indexes that are not stored and insert their spks in the index.
|
|
||||||
// Indexes from next_reveal_index to next_reveal_index + lookahead are already stored (due
|
|
||||||
// to lookahead), so we only range from next_reveal_index + lookahead to target + lookahead
|
|
||||||
let range = next_reveal_index + self.lookahead..=target_index + self.lookahead;
|
|
||||||
for (new_index, new_spk) in SpkIterator::new_with_range(descriptor, range) {
|
|
||||||
let _inserted = self
|
|
||||||
.inner
|
|
||||||
.insert_spk((keychain.clone(), new_index), new_spk);
|
|
||||||
debug_assert!(_inserted, "must not have existing spk");
|
|
||||||
debug_assert!(
|
|
||||||
has_wildcard || new_index == 0,
|
|
||||||
"non-wildcard descriptors must not iterate past index 0"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let _old_index = self.last_revealed.insert(keychain.clone(), target_index);
|
|
||||||
debug_assert!(_old_index < Some(target_index));
|
|
||||||
(
|
|
||||||
SpkIterator::new_with_range(descriptor.clone(), next_reveal_index..target_index + 1),
|
|
||||||
super::ChangeSet(core::iter::once((keychain.clone(), target_index)).collect()),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to reveal the next script pubkey for `keychain`.
|
/// Attempts to reveal the next script pubkey for `keychain`.
|
||||||
///
|
///
|
||||||
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
||||||
/// [`super::ChangeSet`] which represents changes in the last revealed index (if any).
|
/// [`super::ChangeSet`] which represents changes in the last revealed index (if any).
|
||||||
|
/// Returns None if the provided keychain doesn't exist.
|
||||||
///
|
///
|
||||||
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
||||||
/// [`super::ChangeSet`]. There are two scenarios when a new script pubkey cannot be derived:
|
/// [`super::ChangeSet`]. There are two scenarios when a new script pubkey cannot be derived:
|
||||||
///
|
///
|
||||||
/// 1. The descriptor has no wildcard and already has one script revealed.
|
/// 1. The descriptor has no wildcard and already has one script revealed.
|
||||||
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
||||||
///
|
/// 3. There is no descriptor associated with the given keychain.
|
||||||
/// # Panics
|
pub fn reveal_next_spk(
|
||||||
///
|
&mut self,
|
||||||
/// Panics if the `keychain` does not exist.
|
keychain: &K,
|
||||||
pub fn reveal_next_spk(&mut self, keychain: &K) -> ((u32, &Script), super::ChangeSet<K>) {
|
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
||||||
let (next_index, _) = self.next_index(keychain);
|
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
||||||
let changeset = self.reveal_to_target(keychain, next_index).1;
|
let (next_index, _) = self.next_index(keychain).expect("We know keychain exists");
|
||||||
|
let changeset = self
|
||||||
|
.reveal_to_target(keychain, next_index)
|
||||||
|
.expect("We know keychain exists")
|
||||||
|
.1;
|
||||||
let script = self
|
let script = self
|
||||||
.inner
|
.inner
|
||||||
.spk_at_index(&(keychain.clone(), next_index))
|
.spk_at_index(&(descriptor_id, next_index))
|
||||||
.expect("script must already be stored");
|
.expect("script must already be stored");
|
||||||
((next_index, script), changeset)
|
Some(((next_index, script), changeset))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the next unused script pubkey in the keychain. I.e., the script pubkey with the lowest
|
/// Gets the next unused script pubkey in the keychain. I.e., the script pubkey with the lowest
|
||||||
@ -610,21 +858,22 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// has used all scripts up to the derivation bounds, then the last derived script pubkey will be
|
/// has used all scripts up to the derivation bounds, then the last derived script pubkey will be
|
||||||
/// returned.
|
/// returned.
|
||||||
///
|
///
|
||||||
/// # Panics
|
/// Returns None if the provided keychain doesn't exist.
|
||||||
///
|
pub fn next_unused_spk(
|
||||||
/// Panics if `keychain` has never been added to the index
|
&mut self,
|
||||||
pub fn next_unused_spk(&mut self, keychain: &K) -> ((u32, &Script), super::ChangeSet<K>) {
|
keychain: &K,
|
||||||
|
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
||||||
let need_new = self.unused_keychain_spks(keychain).next().is_none();
|
let need_new = self.unused_keychain_spks(keychain).next().is_none();
|
||||||
// this rather strange branch is needed because of some lifetime issues
|
// this rather strange branch is needed because of some lifetime issues
|
||||||
if need_new {
|
if need_new {
|
||||||
self.reveal_next_spk(keychain)
|
self.reveal_next_spk(keychain)
|
||||||
} else {
|
} else {
|
||||||
(
|
Some((
|
||||||
self.unused_keychain_spks(keychain)
|
self.unused_keychain_spks(keychain)
|
||||||
.next()
|
.next()
|
||||||
.expect("we already know next exists"),
|
.expect("we already know next exists"),
|
||||||
super::ChangeSet::default(),
|
super::ChangeSet::default(),
|
||||||
)
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -643,21 +892,35 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
&'a self,
|
&'a self,
|
||||||
range: impl RangeBounds<K> + 'a,
|
range: impl RangeBounds<K> + 'a,
|
||||||
) -> impl DoubleEndedIterator<Item = (&'a K, u32, OutPoint)> + 'a {
|
) -> impl DoubleEndedIterator<Item = (&'a K, u32, OutPoint)> + 'a {
|
||||||
let bounds = Self::map_to_inner_bounds(range);
|
let bounds = self.map_to_inner_bounds(range);
|
||||||
self.inner
|
self.inner
|
||||||
.outputs_in_range(bounds)
|
.outputs_in_range(bounds)
|
||||||
.map(move |((keychain, i), op)| (keychain, *i, op))
|
.map(move |((desc_id, i), op)| {
|
||||||
|
let keychain = self
|
||||||
|
.keychain_of_desc_id(desc_id)
|
||||||
|
.expect("keychain must exist");
|
||||||
|
(keychain, *i, op)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn map_to_inner_bounds(bound: impl RangeBounds<K>) -> impl RangeBounds<(K, u32)> {
|
fn map_to_inner_bounds(
|
||||||
|
&self,
|
||||||
|
bound: impl RangeBounds<K>,
|
||||||
|
) -> impl RangeBounds<(DescriptorId, u32)> {
|
||||||
|
let get_desc_id = |keychain| {
|
||||||
|
self.keychains_to_descriptors
|
||||||
|
.get(keychain)
|
||||||
|
.map(|(desc_id, _)| *desc_id)
|
||||||
|
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]))
|
||||||
|
};
|
||||||
let start = match bound.start_bound() {
|
let start = match bound.start_bound() {
|
||||||
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MIN)),
|
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MIN)),
|
||||||
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MAX)),
|
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MAX)),
|
||||||
Bound::Unbounded => Bound::Unbounded,
|
Bound::Unbounded => Bound::Unbounded,
|
||||||
};
|
};
|
||||||
let end = match bound.end_bound() {
|
let end = match bound.end_bound() {
|
||||||
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MAX)),
|
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MAX)),
|
||||||
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MIN)),
|
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MIN)),
|
||||||
Bound::Unbounded => Bound::Unbounded,
|
Bound::Unbounded => Bound::Unbounded,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -673,7 +936,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
||||||
/// a [`TxOut`] with it's script pubkey.
|
/// a [`TxOut`] with it's script pubkey.
|
||||||
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
||||||
self.keychains
|
self.keychains_to_descriptors
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(keychain, _)| {
|
.filter_map(|(keychain, _)| {
|
||||||
self.last_used_index(keychain)
|
self.last_used_index(keychain)
|
||||||
@ -682,9 +945,27 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Applies the derivation changeset to the [`KeychainTxOutIndex`], extending the number of
|
/// Applies the derivation changeset to the [`KeychainTxOutIndex`], as specified in the
|
||||||
/// derived scripts per keychain, as specified in the `changeset`.
|
/// [`ChangeSet::append`] documentation:
|
||||||
|
/// - Extends the number of derived scripts per keychain
|
||||||
|
/// - Adds new descriptors introduced
|
||||||
|
/// - If a descriptor is introduced for a keychain that already had a descriptor, overwrites
|
||||||
|
/// the old descriptor
|
||||||
pub fn apply_changeset(&mut self, changeset: super::ChangeSet<K>) {
|
pub fn apply_changeset(&mut self, changeset: super::ChangeSet<K>) {
|
||||||
let _ = self.reveal_to_target_multi(&changeset.0);
|
let ChangeSet {
|
||||||
|
keychains_added,
|
||||||
|
last_revealed,
|
||||||
|
} = changeset;
|
||||||
|
for (keychain, descriptor) in keychains_added {
|
||||||
|
let _ = self.insert_descriptor(keychain, descriptor);
|
||||||
|
}
|
||||||
|
let last_revealed = last_revealed
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(desc_id, index)| {
|
||||||
|
let keychain = self.keychain_of_desc_id(&desc_id)?;
|
||||||
|
Some((keychain.clone(), index))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let _ = self.reveal_to_target_multi(&last_revealed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -44,7 +44,7 @@ pub use miniscript;
|
|||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
mod descriptor_ext;
|
mod descriptor_ext;
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
pub use descriptor_ext::DescriptorExt;
|
pub use descriptor_ext::{DescriptorExt, DescriptorId};
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
mod spk_iter;
|
mod spk_iter;
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
|
@ -158,8 +158,8 @@ mod test {
|
|||||||
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||||
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||||
|
|
||||||
txout_index.add_keychain(TestKeychain::External, external_descriptor.clone());
|
let _ = txout_index.insert_descriptor(TestKeychain::External, external_descriptor.clone());
|
||||||
txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone());
|
let _ = txout_index.insert_descriptor(TestKeychain::Internal, internal_descriptor.clone());
|
||||||
|
|
||||||
(txout_index, external_descriptor, internal_descriptor)
|
(txout_index, external_descriptor, internal_descriptor)
|
||||||
}
|
}
|
||||||
@ -258,18 +258,10 @@ mod test {
|
|||||||
None
|
None
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
// The following dummy traits were created to test if SpkIterator is working properly.
|
|
||||||
#[allow(unused)]
|
#[test]
|
||||||
trait TestSendStatic: Send + 'static {
|
fn spk_iterator_is_send_and_static() {
|
||||||
fn test(&self) -> u32 {
|
fn is_send_and_static<A: Send + 'static>() {}
|
||||||
20
|
is_send_and_static::<SpkIterator<Descriptor<DescriptorPublicKey>>>()
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestSendStatic for SpkIterator<Descriptor<DescriptorPublicKey>> {
|
|
||||||
fn test(&self) -> u32 {
|
|
||||||
20
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![cfg(feature = "miniscript")]
|
||||||
|
|
||||||
mod tx_template;
|
mod tx_template;
|
||||||
#[allow(unused_imports)]
|
#[allow(unused_imports)]
|
||||||
pub use tx_template::*;
|
pub use tx_template::*;
|
||||||
@ -73,3 +75,15 @@ pub fn new_tx(lt: u32) -> bitcoin::Transaction {
|
|||||||
output: vec![],
|
output: vec![],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub const DESCRIPTORS: [&str; 7] = [
|
||||||
|
"tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)",
|
||||||
|
"tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)",
|
||||||
|
"wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0/*)",
|
||||||
|
"tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/0/*)",
|
||||||
|
"tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/1/*)",
|
||||||
|
"wpkh(xprv9s21ZrQH143K4EXURwMHuLS469fFzZyXk7UUpdKfQwhoHcAiYTakpe8pMU2RiEdvrU9McyuE7YDoKcXkoAwEGoK53WBDnKKv2zZbb9BzttX/1/0/*)",
|
||||||
|
// non-wildcard
|
||||||
|
"wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)",
|
||||||
|
];
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![cfg(feature = "miniscript")]
|
||||||
|
|
||||||
use rand::distributions::{Alphanumeric, DistString};
|
use rand::distributions::{Alphanumeric, DistString};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
@ -52,7 +54,8 @@ impl TxOutTemplate {
|
|||||||
pub fn init_graph<'a, A: Anchor + Clone + 'a>(
|
pub fn init_graph<'a, A: Anchor + Clone + 'a>(
|
||||||
tx_templates: impl IntoIterator<Item = &'a TxTemplate<'a, A>>,
|
tx_templates: impl IntoIterator<Item = &'a TxTemplate<'a, A>>,
|
||||||
) -> (TxGraph<A>, SpkTxOutIndex<u32>, HashMap<&'a str, Txid>) {
|
) -> (TxGraph<A>, SpkTxOutIndex<u32>, HashMap<&'a str, Txid>) {
|
||||||
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), "tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/0/*)").unwrap();
|
let (descriptor, _) =
|
||||||
|
Descriptor::parse_descriptor(&Secp256k1::signing_only(), super::DESCRIPTORS[2]).unwrap();
|
||||||
let mut graph = TxGraph::<A>::default();
|
let mut graph = TxGraph::<A>::default();
|
||||||
let mut spk_index = SpkTxOutIndex::default();
|
let mut spk_index = SpkTxOutIndex::default();
|
||||||
(0..10).for_each(|index| {
|
(0..10).for_each(|index| {
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
|
#![cfg(feature = "miniscript")]
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
use std::{collections::BTreeSet, sync::Arc};
|
use std::{collections::BTreeSet, sync::Arc};
|
||||||
|
|
||||||
|
use crate::common::DESCRIPTORS;
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
indexed_tx_graph::{self, IndexedTxGraph},
|
indexed_tx_graph::{self, IndexedTxGraph},
|
||||||
keychain::{self, Balance, KeychainTxOutIndex},
|
keychain::{self, Balance, KeychainTxOutIndex},
|
||||||
local_chain::LocalChain,
|
local_chain::LocalChain,
|
||||||
tx_graph, ChainPosition, ConfirmationHeightAnchor,
|
tx_graph, ChainPosition, ConfirmationHeightAnchor, DescriptorExt,
|
||||||
};
|
};
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
secp256k1::Secp256k1, Amount, OutPoint, Script, ScriptBuf, Transaction, TxIn, TxOut,
|
secp256k1::Secp256k1, Amount, OutPoint, Script, ScriptBuf, Transaction, TxIn, TxOut,
|
||||||
@ -23,8 +26,7 @@ use miniscript::Descriptor;
|
|||||||
/// agnostic.
|
/// agnostic.
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_relevant_txs() {
|
fn insert_relevant_txs() {
|
||||||
const DESCRIPTOR: &str = "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)";
|
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), DESCRIPTORS[0])
|
||||||
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), DESCRIPTOR)
|
|
||||||
.expect("must be valid");
|
.expect("must be valid");
|
||||||
let spk_0 = descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
let spk_0 = descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
||||||
let spk_1 = descriptor.at_derivation_index(9).unwrap().script_pubkey();
|
let spk_1 = descriptor.at_derivation_index(9).unwrap().script_pubkey();
|
||||||
@ -32,7 +34,7 @@ fn insert_relevant_txs() {
|
|||||||
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<()>>::new(
|
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<()>>::new(
|
||||||
KeychainTxOutIndex::new(10),
|
KeychainTxOutIndex::new(10),
|
||||||
);
|
);
|
||||||
graph.index.add_keychain((), descriptor);
|
let _ = graph.index.insert_descriptor((), descriptor.clone());
|
||||||
|
|
||||||
let tx_a = Transaction {
|
let tx_a = Transaction {
|
||||||
output: vec![
|
output: vec![
|
||||||
@ -71,7 +73,10 @@ fn insert_relevant_txs() {
|
|||||||
txs: txs.iter().cloned().map(Arc::new).collect(),
|
txs: txs.iter().cloned().map(Arc::new).collect(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
indexer: keychain::ChangeSet([((), 9_u32)].into()),
|
indexer: keychain::ChangeSet {
|
||||||
|
last_revealed: [(descriptor.descriptor_id(), 9_u32)].into(),
|
||||||
|
keychains_added: [].into(),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -79,7 +84,16 @@ fn insert_relevant_txs() {
|
|||||||
changeset,
|
changeset,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(graph.initial_changeset(), changeset,);
|
// The initial changeset will also contain info about the keychain we added
|
||||||
|
let initial_changeset = indexed_tx_graph::ChangeSet {
|
||||||
|
graph: changeset.graph,
|
||||||
|
indexer: keychain::ChangeSet {
|
||||||
|
last_revealed: changeset.indexer.last_revealed,
|
||||||
|
keychains_added: [((), descriptor)].into(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(graph.initial_changeset(), initial_changeset);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ensure consistency IndexedTxGraph list_* and balance methods. These methods lists
|
/// Ensure consistency IndexedTxGraph list_* and balance methods. These methods lists
|
||||||
@ -117,15 +131,17 @@ fn test_list_owned_txouts() {
|
|||||||
|
|
||||||
// Initiate IndexedTxGraph
|
// Initiate IndexedTxGraph
|
||||||
|
|
||||||
let (desc_1, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), "tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/0/*)").unwrap();
|
let (desc_1, _) =
|
||||||
let (desc_2, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), "tr(tprv8ZgxMBicQKsPd3krDUsBAmtnRsK3rb8u5yi1zhQgMhF1tR8MW7xfE4rnrbbsrbPR52e7rKapu6ztw1jXveJSCGHEriUGZV7mCe88duLp5pj/86'/1'/0'/1/*)").unwrap();
|
Descriptor::parse_descriptor(&Secp256k1::signing_only(), common::DESCRIPTORS[2]).unwrap();
|
||||||
|
let (desc_2, _) =
|
||||||
|
Descriptor::parse_descriptor(&Secp256k1::signing_only(), common::DESCRIPTORS[3]).unwrap();
|
||||||
|
|
||||||
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<String>>::new(
|
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<String>>::new(
|
||||||
KeychainTxOutIndex::new(10),
|
KeychainTxOutIndex::new(10),
|
||||||
);
|
);
|
||||||
|
|
||||||
graph.index.add_keychain("keychain_1".into(), desc_1);
|
let _ = graph.index.insert_descriptor("keychain_1".into(), desc_1);
|
||||||
graph.index.add_keychain("keychain_2".into(), desc_2);
|
let _ = graph.index.insert_descriptor("keychain_2".into(), desc_2);
|
||||||
|
|
||||||
// Get trusted and untrusted addresses
|
// Get trusted and untrusted addresses
|
||||||
|
|
||||||
@ -135,14 +151,20 @@ fn test_list_owned_txouts() {
|
|||||||
{
|
{
|
||||||
// we need to scope here to take immutanble reference of the graph
|
// we need to scope here to take immutanble reference of the graph
|
||||||
for _ in 0..10 {
|
for _ in 0..10 {
|
||||||
let ((_, script), _) = graph.index.reveal_next_spk(&"keychain_1".to_string());
|
let ((_, script), _) = graph
|
||||||
|
.index
|
||||||
|
.reveal_next_spk(&"keychain_1".to_string())
|
||||||
|
.unwrap();
|
||||||
// TODO Assert indexes
|
// TODO Assert indexes
|
||||||
trusted_spks.push(script.to_owned());
|
trusted_spks.push(script.to_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
for _ in 0..10 {
|
for _ in 0..10 {
|
||||||
let ((_, script), _) = graph.index.reveal_next_spk(&"keychain_2".to_string());
|
let ((_, script), _) = graph
|
||||||
|
.index
|
||||||
|
.reveal_next_spk(&"keychain_2".to_string())
|
||||||
|
.unwrap();
|
||||||
untrusted_spks.push(script.to_owned());
|
untrusted_spks.push(script.to_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -235,26 +257,18 @@ fn test_list_owned_txouts() {
|
|||||||
.unwrap_or_else(|| panic!("block must exist at {}", height));
|
.unwrap_or_else(|| panic!("block must exist at {}", height));
|
||||||
let txouts = graph
|
let txouts = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_txouts(
|
.filter_chain_txouts(&local_chain, chain_tip, graph.index.outpoints())
|
||||||
&local_chain,
|
|
||||||
chain_tip,
|
|
||||||
graph.index.outpoints().iter().cloned(),
|
|
||||||
)
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(
|
.filter_chain_unspents(&local_chain, chain_tip, graph.index.outpoints())
|
||||||
&local_chain,
|
|
||||||
chain_tip,
|
|
||||||
graph.index.outpoints().iter().cloned(),
|
|
||||||
)
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let balance = graph.graph().balance(
|
let balance = graph.graph().balance(
|
||||||
&local_chain,
|
&local_chain,
|
||||||
chain_tip,
|
chain_tip,
|
||||||
graph.index.outpoints().iter().cloned(),
|
graph.index.outpoints(),
|
||||||
|_, spk: &Script| trusted_spks.contains(&spk.to_owned()),
|
|_, spk: &Script| trusted_spks.contains(&spk.to_owned()),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -5,36 +5,39 @@ mod common;
|
|||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
indexed_tx_graph::Indexer,
|
indexed_tx_graph::Indexer,
|
||||||
keychain::{self, KeychainTxOutIndex},
|
keychain::{self, ChangeSet, KeychainTxOutIndex},
|
||||||
Append,
|
Append, DescriptorExt, DescriptorId,
|
||||||
};
|
};
|
||||||
|
|
||||||
use bitcoin::{secp256k1::Secp256k1, Amount, OutPoint, ScriptBuf, Transaction, TxOut};
|
use bitcoin::{secp256k1::Secp256k1, Amount, OutPoint, ScriptBuf, Transaction, TxOut};
|
||||||
use miniscript::{Descriptor, DescriptorPublicKey};
|
use miniscript::{Descriptor, DescriptorPublicKey};
|
||||||
|
|
||||||
|
use crate::common::DESCRIPTORS;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
||||||
enum TestKeychain {
|
enum TestKeychain {
|
||||||
External,
|
External,
|
||||||
Internal,
|
Internal,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parse_descriptor(descriptor: &str) -> Descriptor<DescriptorPublicKey> {
|
||||||
|
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||||
|
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, descriptor)
|
||||||
|
.unwrap()
|
||||||
|
.0
|
||||||
|
}
|
||||||
|
|
||||||
fn init_txout_index(
|
fn init_txout_index(
|
||||||
|
external_descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
|
internal_descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
lookahead: u32,
|
lookahead: u32,
|
||||||
) -> (
|
) -> bdk_chain::keychain::KeychainTxOutIndex<TestKeychain> {
|
||||||
bdk_chain::keychain::KeychainTxOutIndex<TestKeychain>,
|
|
||||||
Descriptor<DescriptorPublicKey>,
|
|
||||||
Descriptor<DescriptorPublicKey>,
|
|
||||||
) {
|
|
||||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<TestKeychain>::new(lookahead);
|
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<TestKeychain>::new(lookahead);
|
||||||
|
|
||||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
let _ = txout_index.insert_descriptor(TestKeychain::External, external_descriptor);
|
||||||
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
let _ = txout_index.insert_descriptor(TestKeychain::Internal, internal_descriptor);
|
||||||
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
|
||||||
|
|
||||||
txout_index.add_keychain(TestKeychain::External, external_descriptor.clone());
|
txout_index
|
||||||
txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone());
|
|
||||||
|
|
||||||
(txout_index, external_descriptor, internal_descriptor)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> ScriptBuf {
|
fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> ScriptBuf {
|
||||||
@ -44,29 +47,136 @@ fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> Scr
|
|||||||
.script_pubkey()
|
.script_pubkey()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We create two empty changesets lhs and rhs, we then insert various descriptors with various
|
||||||
|
// last_revealed, append rhs to lhs, and check that the result is consistent with these rules:
|
||||||
|
// - Existing index doesn't update if the new index in `other` is lower than `self`.
|
||||||
|
// - Existing index updates if the new index in `other` is higher than `self`.
|
||||||
|
// - Existing index is unchanged if keychain doesn't exist in `other`.
|
||||||
|
// - New keychain gets added if the keychain is in `other` but not in `self`.
|
||||||
|
#[test]
|
||||||
|
fn append_changesets_check_last_revealed() {
|
||||||
|
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
||||||
|
let descriptor_ids: Vec<_> = DESCRIPTORS
|
||||||
|
.iter()
|
||||||
|
.take(4)
|
||||||
|
.map(|d| {
|
||||||
|
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, d)
|
||||||
|
.unwrap()
|
||||||
|
.0
|
||||||
|
.descriptor_id()
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut lhs_di = BTreeMap::<DescriptorId, u32>::default();
|
||||||
|
let mut rhs_di = BTreeMap::<DescriptorId, u32>::default();
|
||||||
|
lhs_di.insert(descriptor_ids[0], 7);
|
||||||
|
lhs_di.insert(descriptor_ids[1], 0);
|
||||||
|
lhs_di.insert(descriptor_ids[2], 3);
|
||||||
|
|
||||||
|
rhs_di.insert(descriptor_ids[0], 3); // value less than lhs desc 0
|
||||||
|
rhs_di.insert(descriptor_ids[1], 5); // value more than lhs desc 1
|
||||||
|
lhs_di.insert(descriptor_ids[3], 4); // key doesn't exist in lhs
|
||||||
|
|
||||||
|
let mut lhs = ChangeSet {
|
||||||
|
keychains_added: BTreeMap::<(), _>::new(),
|
||||||
|
last_revealed: lhs_di,
|
||||||
|
};
|
||||||
|
let rhs = ChangeSet {
|
||||||
|
keychains_added: BTreeMap::<(), _>::new(),
|
||||||
|
last_revealed: rhs_di,
|
||||||
|
};
|
||||||
|
lhs.append(rhs);
|
||||||
|
|
||||||
|
// Existing index doesn't update if the new index in `other` is lower than `self`.
|
||||||
|
assert_eq!(lhs.last_revealed.get(&descriptor_ids[0]), Some(&7));
|
||||||
|
// Existing index updates if the new index in `other` is higher than `self`.
|
||||||
|
assert_eq!(lhs.last_revealed.get(&descriptor_ids[1]), Some(&5));
|
||||||
|
// Existing index is unchanged if keychain doesn't exist in `other`.
|
||||||
|
assert_eq!(lhs.last_revealed.get(&descriptor_ids[2]), Some(&3));
|
||||||
|
// New keychain gets added if the keychain is in `other` but not in `self`.
|
||||||
|
assert_eq!(lhs.last_revealed.get(&descriptor_ids[3]), Some(&4));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_apply_changeset_with_different_descriptors_to_same_keychain() {
|
||||||
|
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||||
|
let mut txout_index =
|
||||||
|
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||||
|
assert_eq!(
|
||||||
|
txout_index.keychains().collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
(&TestKeychain::External, &external_descriptor),
|
||||||
|
(&TestKeychain::Internal, &internal_descriptor)
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
let changeset = ChangeSet {
|
||||||
|
keychains_added: [(TestKeychain::External, internal_descriptor.clone())].into(),
|
||||||
|
last_revealed: [].into(),
|
||||||
|
};
|
||||||
|
txout_index.apply_changeset(changeset);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
txout_index.keychains().collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
(&TestKeychain::External, &internal_descriptor),
|
||||||
|
(&TestKeychain::Internal, &internal_descriptor)
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
let changeset = ChangeSet {
|
||||||
|
keychains_added: [(TestKeychain::Internal, external_descriptor.clone())].into(),
|
||||||
|
last_revealed: [].into(),
|
||||||
|
};
|
||||||
|
txout_index.apply_changeset(changeset);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
txout_index.keychains().collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
(&TestKeychain::External, &internal_descriptor),
|
||||||
|
(&TestKeychain::Internal, &external_descriptor)
|
||||||
|
]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_set_all_derivation_indices() {
|
fn test_set_all_derivation_indices() {
|
||||||
use bdk_chain::indexed_tx_graph::Indexer;
|
use bdk_chain::indexed_tx_graph::Indexer;
|
||||||
|
|
||||||
let (mut txout_index, _, _) = init_txout_index(0);
|
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||||
|
let mut txout_index =
|
||||||
|
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||||
let derive_to: BTreeMap<_, _> =
|
let derive_to: BTreeMap<_, _> =
|
||||||
[(TestKeychain::External, 12), (TestKeychain::Internal, 24)].into();
|
[(TestKeychain::External, 12), (TestKeychain::Internal, 24)].into();
|
||||||
|
let last_revealed: BTreeMap<_, _> = [
|
||||||
|
(external_descriptor.descriptor_id(), 12),
|
||||||
|
(internal_descriptor.descriptor_id(), 24),
|
||||||
|
]
|
||||||
|
.into();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.reveal_to_target_multi(&derive_to).1.as_inner(),
|
txout_index.reveal_to_target_multi(&derive_to).1,
|
||||||
&derive_to
|
ChangeSet {
|
||||||
|
keychains_added: BTreeMap::new(),
|
||||||
|
last_revealed: last_revealed.clone()
|
||||||
|
}
|
||||||
);
|
);
|
||||||
assert_eq!(txout_index.last_revealed_indices(), &derive_to);
|
assert_eq!(txout_index.last_revealed_indices(), derive_to);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
txout_index.reveal_to_target_multi(&derive_to).1,
|
||||||
keychain::ChangeSet::default(),
|
keychain::ChangeSet::default(),
|
||||||
"no changes if we set to the same thing"
|
"no changes if we set to the same thing"
|
||||||
);
|
);
|
||||||
assert_eq!(txout_index.initial_changeset().as_inner(), &derive_to);
|
assert_eq!(txout_index.initial_changeset().last_revealed, last_revealed);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_lookahead() {
|
fn test_lookahead() {
|
||||||
let (mut txout_index, external_desc, internal_desc) = init_txout_index(10);
|
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||||
|
let mut txout_index =
|
||||||
|
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 10);
|
||||||
|
|
||||||
// given:
|
// given:
|
||||||
// - external lookahead set to 10
|
// - external lookahead set to 10
|
||||||
@ -76,15 +186,16 @@ fn test_lookahead() {
|
|||||||
// - scripts cached in spk_txout_index should increase correctly
|
// - scripts cached in spk_txout_index should increase correctly
|
||||||
// - stored scripts of external keychain should be of expected counts
|
// - stored scripts of external keychain should be of expected counts
|
||||||
for index in (0..20).skip_while(|i| i % 2 == 1) {
|
for index in (0..20).skip_while(|i| i % 2 == 1) {
|
||||||
let (revealed_spks, revealed_changeset) =
|
let (revealed_spks, revealed_changeset) = txout_index
|
||||||
txout_index.reveal_to_target(&TestKeychain::External, index);
|
.reveal_to_target(&TestKeychain::External, index)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
revealed_spks.collect::<Vec<_>>(),
|
revealed_spks.collect::<Vec<_>>(),
|
||||||
vec![(index, spk_at_index(&external_desc, index))],
|
vec![(index, spk_at_index(&external_descriptor, index))],
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
revealed_changeset.as_inner(),
|
&revealed_changeset.last_revealed,
|
||||||
&[(TestKeychain::External, index)].into()
|
&[(external_descriptor.descriptor_id(), index)].into()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -126,17 +237,18 @@ fn test_lookahead() {
|
|||||||
// - derivation index is set ahead of current derivation index + lookahead
|
// - derivation index is set ahead of current derivation index + lookahead
|
||||||
// expect:
|
// expect:
|
||||||
// - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped
|
// - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped
|
||||||
let (revealed_spks, revealed_changeset) =
|
let (revealed_spks, revealed_changeset) = txout_index
|
||||||
txout_index.reveal_to_target(&TestKeychain::Internal, 24);
|
.reveal_to_target(&TestKeychain::Internal, 24)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
revealed_spks.collect::<Vec<_>>(),
|
revealed_spks.collect::<Vec<_>>(),
|
||||||
(0..=24)
|
(0..=24)
|
||||||
.map(|index| (index, spk_at_index(&internal_desc, index)))
|
.map(|index| (index, spk_at_index(&internal_descriptor, index)))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
revealed_changeset.as_inner(),
|
&revealed_changeset.last_revealed,
|
||||||
&[(TestKeychain::Internal, 24)].into()
|
&[(internal_descriptor.descriptor_id(), 24)].into()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.inner().all_spks().len(),
|
txout_index.inner().all_spks().len(),
|
||||||
@ -172,14 +284,14 @@ fn test_lookahead() {
|
|||||||
let tx = Transaction {
|
let tx = Transaction {
|
||||||
output: vec![
|
output: vec![
|
||||||
TxOut {
|
TxOut {
|
||||||
script_pubkey: external_desc
|
script_pubkey: external_descriptor
|
||||||
.at_derivation_index(external_index)
|
.at_derivation_index(external_index)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.script_pubkey(),
|
.script_pubkey(),
|
||||||
value: Amount::from_sat(10_000),
|
value: Amount::from_sat(10_000),
|
||||||
},
|
},
|
||||||
TxOut {
|
TxOut {
|
||||||
script_pubkey: internal_desc
|
script_pubkey: internal_descriptor
|
||||||
.at_derivation_index(internal_index)
|
.at_derivation_index(internal_index)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.script_pubkey(),
|
.script_pubkey(),
|
||||||
@ -219,14 +331,17 @@ fn test_lookahead() {
|
|||||||
// - last used index should change as expected
|
// - last used index should change as expected
|
||||||
#[test]
|
#[test]
|
||||||
fn test_scan_with_lookahead() {
|
fn test_scan_with_lookahead() {
|
||||||
let (mut txout_index, external_desc, _) = init_txout_index(10);
|
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||||
|
let mut txout_index =
|
||||||
|
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 10);
|
||||||
|
|
||||||
let spks: BTreeMap<u32, ScriptBuf> = [0, 10, 20, 30]
|
let spks: BTreeMap<u32, ScriptBuf> = [0, 10, 20, 30]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|i| {
|
.map(|i| {
|
||||||
(
|
(
|
||||||
i,
|
i,
|
||||||
external_desc
|
external_descriptor
|
||||||
.at_derivation_index(i)
|
.at_derivation_index(i)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.script_pubkey(),
|
.script_pubkey(),
|
||||||
@ -243,8 +358,8 @@ fn test_scan_with_lookahead() {
|
|||||||
|
|
||||||
let changeset = txout_index.index_txout(op, &txout);
|
let changeset = txout_index.index_txout(op, &txout);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
changeset.as_inner(),
|
&changeset.last_revealed,
|
||||||
&[(TestKeychain::External, spk_i)].into()
|
&[(external_descriptor.descriptor_id(), spk_i)].into()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.last_revealed_index(&TestKeychain::External),
|
txout_index.last_revealed_index(&TestKeychain::External),
|
||||||
@ -257,7 +372,7 @@ fn test_scan_with_lookahead() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// now try with index 41 (lookahead surpassed), we expect that the txout to not be indexed
|
// now try with index 41 (lookahead surpassed), we expect that the txout to not be indexed
|
||||||
let spk_41 = external_desc
|
let spk_41 = external_descriptor
|
||||||
.at_derivation_index(41)
|
.at_derivation_index(41)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.script_pubkey();
|
.script_pubkey();
|
||||||
@ -273,11 +388,13 @@ fn test_scan_with_lookahead() {
|
|||||||
#[test]
|
#[test]
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
fn test_wildcard_derivations() {
|
fn test_wildcard_derivations() {
|
||||||
let (mut txout_index, external_desc, _) = init_txout_index(0);
|
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||||
let external_spk_0 = external_desc.at_derivation_index(0).unwrap().script_pubkey();
|
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||||
let external_spk_16 = external_desc.at_derivation_index(16).unwrap().script_pubkey();
|
let mut txout_index = init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
||||||
let external_spk_26 = external_desc.at_derivation_index(26).unwrap().script_pubkey();
|
let external_spk_0 = external_descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
||||||
let external_spk_27 = external_desc.at_derivation_index(27).unwrap().script_pubkey();
|
let external_spk_16 = external_descriptor.at_derivation_index(16).unwrap().script_pubkey();
|
||||||
|
let external_spk_26 = external_descriptor.at_derivation_index(26).unwrap().script_pubkey();
|
||||||
|
let external_spk_27 = external_descriptor.at_derivation_index(27).unwrap().script_pubkey();
|
||||||
|
|
||||||
// - nothing is derived
|
// - nothing is derived
|
||||||
// - unused list is also empty
|
// - unused list is also empty
|
||||||
@ -285,13 +402,13 @@ fn test_wildcard_derivations() {
|
|||||||
// - next_derivation_index() == (0, true)
|
// - next_derivation_index() == (0, true)
|
||||||
// - derive_new() == ((0, <spk>), keychain::ChangeSet)
|
// - derive_new() == ((0, <spk>), keychain::ChangeSet)
|
||||||
// - next_unused() == ((0, <spk>), keychain::ChangeSet:is_empty())
|
// - next_unused() == ((0, <spk>), keychain::ChangeSet:is_empty())
|
||||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
|
assert_eq!(txout_index.next_index(&TestKeychain::External).unwrap(), (0, true));
|
||||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
||||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
|
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 0)].into());
|
||||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
||||||
assert_eq!(changeset.as_inner(), &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
|
|
||||||
// - derived till 25
|
// - derived till 25
|
||||||
// - used all spks till 15.
|
// - used all spks till 15.
|
||||||
@ -307,16 +424,16 @@ fn test_wildcard_derivations() {
|
|||||||
.chain([17, 20, 23])
|
.chain([17, 20, 23])
|
||||||
.for_each(|index| assert!(txout_index.mark_used(TestKeychain::External, index)));
|
.for_each(|index| assert!(txout_index.mark_used(TestKeychain::External, index)));
|
||||||
|
|
||||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true));
|
assert_eq!(txout_index.next_index(&TestKeychain::External).unwrap(), (26, true));
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (26, external_spk_26.as_script()));
|
assert_eq!(spk, (26, external_spk_26.as_script()));
|
||||||
|
|
||||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 26)].into());
|
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 26)].into());
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (16, external_spk_16.as_script()));
|
assert_eq!(spk, (16, external_spk_16.as_script()));
|
||||||
assert_eq!(changeset.as_inner(), &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
|
|
||||||
// - Use all the derived till 26.
|
// - Use all the derived till 26.
|
||||||
// - next_unused() = ((27, <spk>), keychain::ChangeSet)
|
// - next_unused() = ((27, <spk>), keychain::ChangeSet)
|
||||||
@ -324,9 +441,9 @@ fn test_wildcard_derivations() {
|
|||||||
txout_index.mark_used(TestKeychain::External, index);
|
txout_index.mark_used(TestKeychain::External, index);
|
||||||
});
|
});
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (27, external_spk_27.as_script()));
|
assert_eq!(spk, (27, external_spk_27.as_script()));
|
||||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 27)].into());
|
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 27)].into());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -334,13 +451,14 @@ fn test_non_wildcard_derivations() {
|
|||||||
let mut txout_index = KeychainTxOutIndex::<TestKeychain>::new(0);
|
let mut txout_index = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
|
|
||||||
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
||||||
let (no_wildcard_descriptor, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)").unwrap();
|
let (no_wildcard_descriptor, _) =
|
||||||
|
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[6]).unwrap();
|
||||||
let external_spk = no_wildcard_descriptor
|
let external_spk = no_wildcard_descriptor
|
||||||
.at_derivation_index(0)
|
.at_derivation_index(0)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.script_pubkey();
|
.script_pubkey();
|
||||||
|
|
||||||
txout_index.add_keychain(TestKeychain::External, no_wildcard_descriptor);
|
let _ = txout_index.insert_descriptor(TestKeychain::External, no_wildcard_descriptor.clone());
|
||||||
|
|
||||||
// given:
|
// given:
|
||||||
// - `txout_index` with no stored scripts
|
// - `txout_index` with no stored scripts
|
||||||
@ -348,14 +466,24 @@ fn test_non_wildcard_derivations() {
|
|||||||
// - next derivation index should be new
|
// - next derivation index should be new
|
||||||
// - when we derive a new script, script @ index 0
|
// - when we derive a new script, script @ index 0
|
||||||
// - when we get the next unused script, script @ index 0
|
// - when we get the next unused script, script @ index 0
|
||||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
|
assert_eq!(
|
||||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
txout_index.next_index(&TestKeychain::External).unwrap(),
|
||||||
|
(0, true)
|
||||||
|
);
|
||||||
|
let (spk, changeset) = txout_index
|
||||||
|
.reveal_next_spk(&TestKeychain::External)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(spk, (0, external_spk.as_script()));
|
assert_eq!(spk, (0, external_spk.as_script()));
|
||||||
assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
|
assert_eq!(
|
||||||
|
&changeset.last_revealed,
|
||||||
|
&[(no_wildcard_descriptor.descriptor_id(), 0)].into()
|
||||||
|
);
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
let (spk, changeset) = txout_index
|
||||||
|
.next_unused_spk(&TestKeychain::External)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(spk, (0, external_spk.as_script()));
|
assert_eq!(spk, (0, external_spk.as_script()));
|
||||||
assert_eq!(changeset.as_inner(), &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
|
|
||||||
// given:
|
// given:
|
||||||
// - the non-wildcard descriptor already has a stored and used script
|
// - the non-wildcard descriptor already has a stored and used script
|
||||||
@ -363,18 +491,26 @@ fn test_non_wildcard_derivations() {
|
|||||||
// - next derivation index should not be new
|
// - next derivation index should not be new
|
||||||
// - derive new and next unused should return the old script
|
// - derive new and next unused should return the old script
|
||||||
// - store_up_to should not panic and return empty changeset
|
// - store_up_to should not panic and return empty changeset
|
||||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (0, false));
|
assert_eq!(
|
||||||
|
txout_index.next_index(&TestKeychain::External).unwrap(),
|
||||||
|
(0, false)
|
||||||
|
);
|
||||||
txout_index.mark_used(TestKeychain::External, 0);
|
txout_index.mark_used(TestKeychain::External, 0);
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
|
let (spk, changeset) = txout_index
|
||||||
|
.reveal_next_spk(&TestKeychain::External)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(spk, (0, external_spk.as_script()));
|
assert_eq!(spk, (0, external_spk.as_script()));
|
||||||
assert_eq!(changeset.as_inner(), &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
|
let (spk, changeset) = txout_index
|
||||||
|
.next_unused_spk(&TestKeychain::External)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(spk, (0, external_spk.as_script()));
|
assert_eq!(spk, (0, external_spk.as_script()));
|
||||||
assert_eq!(changeset.as_inner(), &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
let (revealed_spks, revealed_changeset) =
|
let (revealed_spks, revealed_changeset) = txout_index
|
||||||
txout_index.reveal_to_target(&TestKeychain::External, 200);
|
.reveal_to_target(&TestKeychain::External, 200)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(revealed_spks.count(), 0);
|
assert_eq!(revealed_spks.count(), 0);
|
||||||
assert!(revealed_changeset.is_empty());
|
assert!(revealed_changeset.is_empty());
|
||||||
|
|
||||||
@ -438,7 +574,13 @@ fn lookahead_to_target() {
|
|||||||
];
|
];
|
||||||
|
|
||||||
for t in test_cases {
|
for t in test_cases {
|
||||||
let (mut index, _, _) = init_txout_index(t.lookahead);
|
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||||
|
let mut index = init_txout_index(
|
||||||
|
external_descriptor.clone(),
|
||||||
|
internal_descriptor.clone(),
|
||||||
|
t.lookahead,
|
||||||
|
);
|
||||||
|
|
||||||
if let Some(last_revealed) = t.external_last_revealed {
|
if let Some(last_revealed) = t.external_last_revealed {
|
||||||
let _ = index.reveal_to_target(&TestKeychain::External, last_revealed);
|
let _ = index.reveal_to_target(&TestKeychain::External, last_revealed);
|
||||||
@ -449,17 +591,19 @@ fn lookahead_to_target() {
|
|||||||
|
|
||||||
let keychain_test_cases = [
|
let keychain_test_cases = [
|
||||||
(
|
(
|
||||||
|
external_descriptor.descriptor_id(),
|
||||||
TestKeychain::External,
|
TestKeychain::External,
|
||||||
t.external_last_revealed,
|
t.external_last_revealed,
|
||||||
t.external_target,
|
t.external_target,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
|
internal_descriptor.descriptor_id(),
|
||||||
TestKeychain::Internal,
|
TestKeychain::Internal,
|
||||||
t.internal_last_revealed,
|
t.internal_last_revealed,
|
||||||
t.internal_target,
|
t.internal_target,
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
for (keychain, last_revealed, target) in keychain_test_cases {
|
for (descriptor_id, keychain, last_revealed, target) in keychain_test_cases {
|
||||||
if let Some(target) = target {
|
if let Some(target) = target {
|
||||||
let original_last_stored_index = match last_revealed {
|
let original_last_stored_index = match last_revealed {
|
||||||
Some(last_revealed) => Some(last_revealed + t.lookahead),
|
Some(last_revealed) => Some(last_revealed + t.lookahead),
|
||||||
@ -475,10 +619,10 @@ fn lookahead_to_target() {
|
|||||||
let keys = index
|
let keys = index
|
||||||
.inner()
|
.inner()
|
||||||
.all_spks()
|
.all_spks()
|
||||||
.range((keychain.clone(), 0)..=(keychain.clone(), u32::MAX))
|
.range((descriptor_id, 0)..=(descriptor_id, u32::MAX))
|
||||||
.map(|(k, _)| k.clone())
|
.map(|(k, _)| *k)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let exp_keys = core::iter::repeat(keychain)
|
let exp_keys = core::iter::repeat(descriptor_id)
|
||||||
.zip(0_u32..=exp_last_stored_index)
|
.zip(0_u32..=exp_last_stored_index)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
assert_eq!(keys, exp_keys);
|
assert_eq!(keys, exp_keys);
|
||||||
@ -486,3 +630,150 @@ fn lookahead_to_target() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// `::index_txout` should still index txouts with spks derived from descriptors without keychains.
|
||||||
|
/// This includes properly refilling the lookahead for said descriptors.
|
||||||
|
#[test]
|
||||||
|
fn index_txout_after_changing_descriptor_under_keychain() {
|
||||||
|
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||||
|
let (desc_a, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0])
|
||||||
|
.expect("descriptor 0 must be valid");
|
||||||
|
let (desc_b, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[1])
|
||||||
|
.expect("descriptor 1 must be valid");
|
||||||
|
let desc_id_a = desc_a.descriptor_id();
|
||||||
|
|
||||||
|
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<()>::new(10);
|
||||||
|
|
||||||
|
// Introduce `desc_a` under keychain `()` and replace the descriptor.
|
||||||
|
let _ = txout_index.insert_descriptor((), desc_a.clone());
|
||||||
|
let _ = txout_index.insert_descriptor((), desc_b.clone());
|
||||||
|
|
||||||
|
// Loop through spks in intervals of `lookahead` to create outputs with. We should always be
|
||||||
|
// able to index these outputs if `lookahead` is respected.
|
||||||
|
let spk_indices = [9, 19, 29, 39];
|
||||||
|
for i in spk_indices {
|
||||||
|
let spk_at_index = desc_a
|
||||||
|
.at_derivation_index(i)
|
||||||
|
.expect("must derive")
|
||||||
|
.script_pubkey();
|
||||||
|
let index_changeset = txout_index.index_txout(
|
||||||
|
// Use spk derivation index as vout as we just want an unique outpoint.
|
||||||
|
OutPoint::new(h!("mock_tx"), i as _),
|
||||||
|
&TxOut {
|
||||||
|
value: Amount::from_sat(10_000),
|
||||||
|
script_pubkey: spk_at_index,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
index_changeset,
|
||||||
|
bdk_chain::keychain::ChangeSet {
|
||||||
|
keychains_added: BTreeMap::default(),
|
||||||
|
last_revealed: [(desc_id_a, i)].into(),
|
||||||
|
},
|
||||||
|
"must always increase last active if impl respects lookahead"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn insert_descriptor_no_change() {
|
||||||
|
let secp = Secp256k1::signing_only();
|
||||||
|
let (desc, _) =
|
||||||
|
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0]).unwrap();
|
||||||
|
let mut txout_index = KeychainTxOutIndex::<()>::default();
|
||||||
|
assert_eq!(
|
||||||
|
txout_index.insert_descriptor((), desc.clone()),
|
||||||
|
keychain::ChangeSet {
|
||||||
|
keychains_added: [((), desc.clone())].into(),
|
||||||
|
last_revealed: Default::default()
|
||||||
|
},
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
txout_index.insert_descriptor((), desc.clone()),
|
||||||
|
keychain::ChangeSet::default(),
|
||||||
|
"inserting the same descriptor for keychain should return an empty changeset",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn applying_changesets_one_by_one_vs_aggregate_must_have_same_result() {
|
||||||
|
let desc = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
let changesets: &[ChangeSet<TestKeychain>] = &[
|
||||||
|
ChangeSet {
|
||||||
|
keychains_added: [(TestKeychain::Internal, desc.clone())].into(),
|
||||||
|
last_revealed: [].into(),
|
||||||
|
},
|
||||||
|
ChangeSet {
|
||||||
|
keychains_added: [(TestKeychain::External, desc.clone())].into(),
|
||||||
|
last_revealed: [(desc.descriptor_id(), 12)].into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut indexer_a = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
|
for changeset in changesets {
|
||||||
|
indexer_a.apply_changeset(changeset.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut indexer_b = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
|
let aggregate_changesets = changesets
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.reduce(|mut agg, cs| {
|
||||||
|
agg.append(cs);
|
||||||
|
agg
|
||||||
|
})
|
||||||
|
.expect("must aggregate changesets");
|
||||||
|
indexer_b.apply_changeset(aggregate_changesets);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
indexer_a.keychains().collect::<Vec<_>>(),
|
||||||
|
indexer_b.keychains().collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
indexer_a.spk_at_index(TestKeychain::External, 0),
|
||||||
|
indexer_b.spk_at_index(TestKeychain::External, 0)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
indexer_a.spk_at_index(TestKeychain::Internal, 0),
|
||||||
|
indexer_b.spk_at_index(TestKeychain::Internal, 0)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
indexer_a.last_revealed_indices(),
|
||||||
|
indexer_b.last_revealed_indices()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// When the same descriptor is associated with various keychains,
|
||||||
|
// index methods only return the highest keychain by Ord
|
||||||
|
#[test]
|
||||||
|
fn test_only_highest_ord_keychain_is_returned() {
|
||||||
|
let desc = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
|
||||||
|
let mut indexer = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
|
let _ = indexer.insert_descriptor(TestKeychain::Internal, desc.clone());
|
||||||
|
let _ = indexer.insert_descriptor(TestKeychain::External, desc);
|
||||||
|
|
||||||
|
// reveal_next_spk will work with either keychain
|
||||||
|
let spk0: ScriptBuf = indexer
|
||||||
|
.reveal_next_spk(&TestKeychain::External)
|
||||||
|
.unwrap()
|
||||||
|
.0
|
||||||
|
.1
|
||||||
|
.into();
|
||||||
|
let spk1: ScriptBuf = indexer
|
||||||
|
.reveal_next_spk(&TestKeychain::Internal)
|
||||||
|
.unwrap()
|
||||||
|
.0
|
||||||
|
.1
|
||||||
|
.into();
|
||||||
|
|
||||||
|
// index_of_spk will always return External
|
||||||
|
assert_eq!(
|
||||||
|
indexer.index_of_spk(&spk0),
|
||||||
|
Some((TestKeychain::External, 0))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
indexer.index_of_spk(&spk1),
|
||||||
|
Some((TestKeychain::External, 1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![cfg(feature = "miniscript")]
|
||||||
|
|
||||||
use std::ops::{Bound, RangeBounds};
|
use std::ops::{Bound, RangeBounds};
|
||||||
|
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![cfg(feature = "miniscript")]
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod common;
|
mod common;
|
||||||
use bdk_chain::tx_graph::CalculateFeeError;
|
use bdk_chain::tx_graph::CalculateFeeError;
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![cfg(feature = "miniscript")]
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
|
@ -212,7 +212,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
graph.graph().balance(
|
graph.graph().balance(
|
||||||
&*chain,
|
&*chain,
|
||||||
synced_to.block_id(),
|
synced_to.block_id(),
|
||||||
graph.index.outpoints().iter().cloned(),
|
graph.index.outpoints(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
@ -336,7 +336,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
graph.graph().balance(
|
graph.graph().balance(
|
||||||
&*chain,
|
&*chain,
|
||||||
synced_to.block_id(),
|
synced_to.block_id(),
|
||||||
graph.index.outpoints().iter().cloned(),
|
graph.index.outpoints(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
@ -249,14 +249,20 @@ where
|
|||||||
script_pubkey: address.script_pubkey(),
|
script_pubkey: address.script_pubkey(),
|
||||||
}];
|
}];
|
||||||
|
|
||||||
let internal_keychain = if graph.index.keychains().get(&Keychain::Internal).is_some() {
|
let internal_keychain = if graph
|
||||||
|
.index
|
||||||
|
.keychains()
|
||||||
|
.any(|(k, _)| *k == Keychain::Internal)
|
||||||
|
{
|
||||||
Keychain::Internal
|
Keychain::Internal
|
||||||
} else {
|
} else {
|
||||||
Keychain::External
|
Keychain::External
|
||||||
};
|
};
|
||||||
|
|
||||||
let ((change_index, change_script), change_changeset) =
|
let ((change_index, change_script), change_changeset) = graph
|
||||||
graph.index.next_unused_spk(&internal_keychain);
|
.index
|
||||||
|
.next_unused_spk(&internal_keychain)
|
||||||
|
.expect("Must exist");
|
||||||
changeset.append(change_changeset);
|
changeset.append(change_changeset);
|
||||||
|
|
||||||
// Clone to drop the immutable reference.
|
// Clone to drop the immutable reference.
|
||||||
@ -266,8 +272,9 @@ where
|
|||||||
&graph
|
&graph
|
||||||
.index
|
.index
|
||||||
.keychains()
|
.keychains()
|
||||||
.get(&internal_keychain)
|
.find(|(k, _)| *k == &internal_keychain)
|
||||||
.expect("must exist")
|
.expect("must exist")
|
||||||
|
.1
|
||||||
.at_derivation_index(change_index)
|
.at_derivation_index(change_index)
|
||||||
.expect("change_index can't be hardened"),
|
.expect("change_index can't be hardened"),
|
||||||
&assets,
|
&assets,
|
||||||
@ -284,8 +291,9 @@ where
|
|||||||
min_drain_value: graph
|
min_drain_value: graph
|
||||||
.index
|
.index
|
||||||
.keychains()
|
.keychains()
|
||||||
.get(&internal_keychain)
|
.find(|(k, _)| *k == &internal_keychain)
|
||||||
.expect("must exist")
|
.expect("must exist")
|
||||||
|
.1
|
||||||
.dust_value(),
|
.dust_value(),
|
||||||
..CoinSelectorOpt::fund_outputs(
|
..CoinSelectorOpt::fund_outputs(
|
||||||
&outputs,
|
&outputs,
|
||||||
@ -416,7 +424,7 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
|||||||
assets: &bdk_tmp_plan::Assets<K>,
|
assets: &bdk_tmp_plan::Assets<K>,
|
||||||
) -> Result<Vec<PlannedUtxo<K, A>>, O::Error> {
|
) -> Result<Vec<PlannedUtxo<K, A>>, O::Error> {
|
||||||
let chain_tip = chain.get_chain_tip()?;
|
let chain_tip = chain.get_chain_tip()?;
|
||||||
let outpoints = graph.index.outpoints().iter().cloned();
|
let outpoints = graph.index.outpoints();
|
||||||
graph
|
graph
|
||||||
.graph()
|
.graph()
|
||||||
.try_filter_chain_unspents(chain, chain_tip, outpoints)
|
.try_filter_chain_unspents(chain, chain_tip, outpoints)
|
||||||
@ -428,8 +436,9 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
|||||||
let desc = graph
|
let desc = graph
|
||||||
.index
|
.index
|
||||||
.keychains()
|
.keychains()
|
||||||
.get(&k)
|
.find(|(keychain, _)| *keychain == &k)
|
||||||
.expect("keychain must exist")
|
.expect("keychain must exist")
|
||||||
|
.1
|
||||||
.at_derivation_index(i)
|
.at_derivation_index(i)
|
||||||
.expect("i can't be hardened");
|
.expect("i can't be hardened");
|
||||||
let plan = bdk_tmp_plan::plan_satisfaction(&desc, assets)?;
|
let plan = bdk_tmp_plan::plan_satisfaction(&desc, assets)?;
|
||||||
@ -465,7 +474,8 @@ where
|
|||||||
_ => unreachable!("only these two variants exist in match arm"),
|
_ => unreachable!("only these two variants exist in match arm"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let ((spk_i, spk), index_changeset) = spk_chooser(index, &Keychain::External);
|
let ((spk_i, spk), index_changeset) =
|
||||||
|
spk_chooser(index, &Keychain::External).expect("Must exist");
|
||||||
let db = &mut *db.lock().unwrap();
|
let db = &mut *db.lock().unwrap();
|
||||||
db.stage_and_commit(C::from((
|
db.stage_and_commit(C::from((
|
||||||
local_chain::ChangeSet::default(),
|
local_chain::ChangeSet::default(),
|
||||||
@ -517,7 +527,7 @@ where
|
|||||||
let balance = graph.graph().try_balance(
|
let balance = graph.graph().try_balance(
|
||||||
chain,
|
chain,
|
||||||
chain.get_chain_tip()?,
|
chain.get_chain_tip()?,
|
||||||
graph.index.outpoints().iter().cloned(),
|
graph.index.outpoints(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@ -547,7 +557,7 @@ where
|
|||||||
let graph = &*graph.lock().unwrap();
|
let graph = &*graph.lock().unwrap();
|
||||||
let chain = &*chain.lock().unwrap();
|
let chain = &*chain.lock().unwrap();
|
||||||
let chain_tip = chain.get_chain_tip()?;
|
let chain_tip = chain.get_chain_tip()?;
|
||||||
let outpoints = graph.index.outpoints().iter().cloned();
|
let outpoints = graph.index.outpoints();
|
||||||
|
|
||||||
match txout_cmd {
|
match txout_cmd {
|
||||||
TxOutCmd::List {
|
TxOutCmd::List {
|
||||||
@ -695,9 +705,11 @@ where
|
|||||||
|
|
||||||
let mut index = KeychainTxOutIndex::<Keychain>::default();
|
let mut index = KeychainTxOutIndex::<Keychain>::default();
|
||||||
|
|
||||||
|
// TODO: descriptors are already stored in the db, so we shouldn't re-insert
|
||||||
|
// them in the index here. However, the keymap is not stored in the database.
|
||||||
let (descriptor, mut keymap) =
|
let (descriptor, mut keymap) =
|
||||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &args.descriptor)?;
|
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &args.descriptor)?;
|
||||||
index.add_keychain(Keychain::External, descriptor);
|
let _ = index.insert_descriptor(Keychain::External, descriptor);
|
||||||
|
|
||||||
if let Some((internal_descriptor, internal_keymap)) = args
|
if let Some((internal_descriptor, internal_keymap)) = args
|
||||||
.change_descriptor
|
.change_descriptor
|
||||||
@ -706,7 +718,7 @@ where
|
|||||||
.transpose()?
|
.transpose()?
|
||||||
{
|
{
|
||||||
keymap.extend(internal_keymap);
|
keymap.extend(internal_keymap);
|
||||||
index.add_keychain(Keychain::Internal, internal_descriptor);
|
let _ = index.insert_descriptor(Keychain::Internal, internal_descriptor);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut db_backend = match Store::<C>::open_or_create_new(db_magic, &args.db_path) {
|
let mut db_backend = match Store::<C>::open_or_create_new(db_magic, &args.db_path) {
|
||||||
|
@ -238,7 +238,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let mut outpoints: Box<dyn Iterator<Item = OutPoint>> = Box::new(core::iter::empty());
|
let mut outpoints: Box<dyn Iterator<Item = OutPoint>> = Box::new(core::iter::empty());
|
||||||
|
|
||||||
if utxos {
|
if utxos {
|
||||||
let init_outpoints = graph.index.outpoints().iter().cloned();
|
let init_outpoints = graph.index.outpoints();
|
||||||
|
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
|
@ -277,7 +277,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
// We want to search for whether the UTXO is spent, and spent by which
|
// We want to search for whether the UTXO is spent, and spent by which
|
||||||
// transaction. We provide the outpoint of the UTXO to
|
// transaction. We provide the outpoint of the UTXO to
|
||||||
// `EsploraExt::update_tx_graph_without_keychain`.
|
// `EsploraExt::update_tx_graph_without_keychain`.
|
||||||
let init_outpoints = graph.index.outpoints().iter().cloned();
|
let init_outpoints = graph.index.outpoints();
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(&*chain, local_tip.block_id(), init_outpoints)
|
.filter_chain_unspents(&*chain, local_tip.block_id(), init_outpoints)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user