refactor(keychain): Fix KeychainTxOutIndex range queries
The underlying SpkTxOutIndex should not use DescriptorIds to index because this loses the ordering relationship of the spks so queries on subranges of keychains work. Along with that we enforce that there is a strict 1-to-1 relationship between descriptors and keychains. Violating this leads to an error in insert_descriptor now. In general I try to make the translation layer between the SpkTxOutIndex and the KeychainTxOutIndex thinner. Ergonomics of this will be improved in next commit. The test from the previous commit passes.
This commit is contained in:
parent
3b2ff0cc95
commit
bc2a8be979
@ -1 +1,2 @@
|
|||||||
msrv="1.63.0"
|
msrv="1.63.0"
|
||||||
|
type-complexity-threshold = 275
|
||||||
|
@ -5,7 +5,8 @@ use crate::{
|
|||||||
spk_iter::BIP32_MAX_INDEX,
|
spk_iter::BIP32_MAX_INDEX,
|
||||||
DescriptorExt, DescriptorId, SpkIterator, SpkTxOutIndex,
|
DescriptorExt, DescriptorId, SpkIterator, SpkTxOutIndex,
|
||||||
};
|
};
|
||||||
use bitcoin::{hashes::Hash, Amount, OutPoint, Script, SignedAmount, Transaction, TxOut, Txid};
|
use alloc::{borrow::ToOwned, vec::Vec};
|
||||||
|
use bitcoin::{Amount, OutPoint, Script, ScriptBuf, SignedAmount, Transaction, TxOut, Txid};
|
||||||
use core::{
|
use core::{
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
ops::{Bound, RangeBounds},
|
ops::{Bound, RangeBounds},
|
||||||
@ -50,9 +51,17 @@ impl<K: Ord> Append for ChangeSet<K> {
|
|||||||
/// For each `last_revealed` in the given [`ChangeSet`]:
|
/// For each `last_revealed` in the given [`ChangeSet`]:
|
||||||
/// If the keychain already exists, increase the index when the other's index > self's index.
|
/// If the keychain already exists, increase the index when the other's index > self's index.
|
||||||
fn append(&mut self, other: Self) {
|
fn append(&mut self, other: Self) {
|
||||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
for (new_keychain, new_descriptor) in other.keychains_added {
|
||||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
if !self.keychains_added.contains_key(&new_keychain)
|
||||||
self.keychains_added.extend(other.keychains_added);
|
// FIXME: very inefficient
|
||||||
|
&& self
|
||||||
|
.keychains_added
|
||||||
|
.values()
|
||||||
|
.all(|descriptor| descriptor != &new_descriptor)
|
||||||
|
{
|
||||||
|
self.keychains_added.insert(new_keychain, new_descriptor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// for `last_revealed`, entries of `other` will take precedence ONLY if it is greater than
|
// for `last_revealed`, entries of `other` will take precedence ONLY if it is greater than
|
||||||
// what was originally in `self`.
|
// what was originally in `self`.
|
||||||
@ -127,7 +136,7 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
|||||||
///
|
///
|
||||||
/// # Change sets
|
/// # Change sets
|
||||||
///
|
///
|
||||||
/// Methods that can update the last revealed index or add keychains will return [`super::ChangeSet`] to report
|
/// Methods that can update the last revealed index or add keychains will return [`ChangeSet`] to report
|
||||||
/// these changes. This can be persisted for future recovery.
|
/// these changes. This can be persisted for future recovery.
|
||||||
///
|
///
|
||||||
/// ## Synopsis
|
/// ## Synopsis
|
||||||
@ -206,22 +215,18 @@ const DEFAULT_LOOKAHEAD: u32 = 25;
|
|||||||
/// [`unused_spks`]: KeychainTxOutIndex::unused_spks
|
/// [`unused_spks`]: KeychainTxOutIndex::unused_spks
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct KeychainTxOutIndex<K> {
|
pub struct KeychainTxOutIndex<K> {
|
||||||
inner: SpkTxOutIndex<(DescriptorId, u32)>,
|
inner: SpkTxOutIndex<(K, u32)>,
|
||||||
// keychain -> (descriptor, descriptor id) map
|
// keychain -> (descriptor, descriptor id) map
|
||||||
keychains_to_descriptors: BTreeMap<K, (DescriptorId, Descriptor<DescriptorPublicKey>)>,
|
keychains_to_descriptor_ids: BTreeMap<K, DescriptorId>,
|
||||||
// descriptor id -> keychain set
|
// descriptor id -> keychain map
|
||||||
// Because different keychains can have the same descriptor, we rank keychains by `Ord` so that
|
descriptor_ids_to_keychains: BTreeMap<DescriptorId, K>,
|
||||||
// that the first keychain variant (according to `Ord`) has the highest rank. When associated
|
|
||||||
// data (such as spks, outpoints) are returned with a keychain, we return the highest-ranked
|
|
||||||
// keychain with it.
|
|
||||||
descriptor_ids_to_keychain_set: HashMap<DescriptorId, BTreeSet<K>>,
|
|
||||||
// descriptor_id -> descriptor map
|
// descriptor_id -> descriptor map
|
||||||
// This is a "monotone" map, meaning that its size keeps growing, i.e., we never delete
|
// This is a "monotone" map, meaning that its size keeps growing, i.e., we never delete
|
||||||
// descriptors from it. This is useful for revealing spks for descriptors that don't have
|
// descriptors from it. This is useful for revealing spks for descriptors that don't have
|
||||||
// keychains associated.
|
// keychains associated.
|
||||||
descriptor_ids_to_descriptors: BTreeMap<DescriptorId, Descriptor<DescriptorPublicKey>>,
|
descriptor_ids_to_descriptors: BTreeMap<DescriptorId, Descriptor<DescriptorPublicKey>>,
|
||||||
// last revealed indexes
|
// last revealed indexes
|
||||||
last_revealed: BTreeMap<DescriptorId, u32>,
|
last_revealed: HashMap<DescriptorId, u32>,
|
||||||
// lookahead settings for each keychain
|
// lookahead settings for each keychain
|
||||||
lookahead: u32,
|
lookahead: u32,
|
||||||
}
|
}
|
||||||
@ -233,23 +238,26 @@ impl<K> Default for KeychainTxOutIndex<K> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
||||||
type ChangeSet = super::ChangeSet<K>;
|
type ChangeSet = ChangeSet<K>;
|
||||||
|
|
||||||
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
||||||
match self.inner.scan_txout(outpoint, txout).cloned() {
|
let mut changeset = ChangeSet::default();
|
||||||
Some((descriptor_id, index)) => {
|
if let Some((keychain, index)) = self.inner.scan_txout(outpoint, txout) {
|
||||||
// We want to reveal spks for descriptors that aren't tracked by any keychain, and
|
let did = self
|
||||||
// so we call reveal with descriptor_id
|
.keychains_to_descriptor_ids
|
||||||
let (_, changeset) = self.reveal_to_target_with_id(descriptor_id, index)
|
.get(keychain)
|
||||||
.expect("descriptors are added in a monotone manner, there cannot be a descriptor id with no corresponding descriptor");
|
.expect("invariant");
|
||||||
changeset
|
if self.last_revealed.get(did) < Some(index) {
|
||||||
|
self.last_revealed.insert(*did, *index);
|
||||||
|
changeset.last_revealed.insert(*did, *index);
|
||||||
|
self.replenish_lookahead_did(*did);
|
||||||
}
|
}
|
||||||
None => super::ChangeSet::default(),
|
|
||||||
}
|
}
|
||||||
|
changeset
|
||||||
}
|
}
|
||||||
|
|
||||||
fn index_tx(&mut self, tx: &bitcoin::Transaction) -> Self::ChangeSet {
|
fn index_tx(&mut self, tx: &bitcoin::Transaction) -> Self::ChangeSet {
|
||||||
let mut changeset = super::ChangeSet::<K>::default();
|
let mut changeset = ChangeSet::<K>::default();
|
||||||
for (op, txout) in tx.output.iter().enumerate() {
|
for (op, txout) in tx.output.iter().enumerate() {
|
||||||
changeset.append(self.index_txout(OutPoint::new(tx.compute_txid(), op as u32), txout));
|
changeset.append(self.index_txout(OutPoint::new(tx.compute_txid(), op as u32), txout));
|
||||||
}
|
}
|
||||||
@ -257,12 +265,12 @@ impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn initial_changeset(&self) -> Self::ChangeSet {
|
fn initial_changeset(&self) -> Self::ChangeSet {
|
||||||
super::ChangeSet {
|
ChangeSet {
|
||||||
keychains_added: self
|
keychains_added: self
|
||||||
.keychains()
|
.keychains()
|
||||||
.map(|(k, v)| (k.clone(), v.clone()))
|
.map(|(k, v)| (k.clone(), v.clone()))
|
||||||
.collect(),
|
.collect(),
|
||||||
last_revealed: self.last_revealed.clone(),
|
last_revealed: self.last_revealed.clone().into_iter().collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -289,10 +297,10 @@ impl<K> KeychainTxOutIndex<K> {
|
|||||||
pub fn new(lookahead: u32) -> Self {
|
pub fn new(lookahead: u32) -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: SpkTxOutIndex::default(),
|
inner: SpkTxOutIndex::default(),
|
||||||
keychains_to_descriptors: BTreeMap::new(),
|
keychains_to_descriptor_ids: BTreeMap::new(),
|
||||||
descriptor_ids_to_keychain_set: HashMap::new(),
|
|
||||||
descriptor_ids_to_descriptors: BTreeMap::new(),
|
descriptor_ids_to_descriptors: BTreeMap::new(),
|
||||||
last_revealed: BTreeMap::new(),
|
descriptor_ids_to_keychains: Default::default(),
|
||||||
|
last_revealed: Default::default(),
|
||||||
lookahead,
|
lookahead,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -300,50 +308,30 @@ impl<K> KeychainTxOutIndex<K> {
|
|||||||
|
|
||||||
/// Methods that are *re-exposed* from the internal [`SpkTxOutIndex`].
|
/// Methods that are *re-exposed* from the internal [`SpkTxOutIndex`].
|
||||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||||
/// Get the highest-ranked keychain that is currently associated with the given `desc_id`.
|
|
||||||
fn keychain_of_desc_id(&self, desc_id: &DescriptorId) -> Option<&K> {
|
|
||||||
let keychains = self.descriptor_ids_to_keychain_set.get(desc_id)?;
|
|
||||||
keychains.iter().next()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a reference to the internal [`SpkTxOutIndex`].
|
/// Return a reference to the internal [`SpkTxOutIndex`].
|
||||||
///
|
///
|
||||||
/// **WARNING:** The internal index will contain lookahead spks. Refer to
|
/// **WARNING:** The internal index will contain lookahead spks. Refer to
|
||||||
/// [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
/// [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
||||||
pub fn inner(&self) -> &SpkTxOutIndex<(DescriptorId, u32)> {
|
pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> {
|
||||||
&self.inner
|
&self.inner
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the set of indexed outpoints, corresponding to tracked keychains.
|
/// Get the set of indexed outpoints, corresponding to tracked keychains.
|
||||||
pub fn outpoints(&self) -> impl DoubleEndedIterator<Item = ((K, u32), OutPoint)> + '_ {
|
pub fn outpoints(&self) -> &BTreeSet<((K, u32), OutPoint)> {
|
||||||
self.inner
|
self.inner.outpoints()
|
||||||
.outpoints()
|
|
||||||
.iter()
|
|
||||||
.filter_map(|((desc_id, index), op)| {
|
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some(((keychain.clone(), *index), *op))
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over known txouts that spend to tracked script pubkeys.
|
/// Iterate over known txouts that spend to tracked script pubkeys.
|
||||||
pub fn txouts(&self) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> + '_ {
|
pub fn txouts(&self) -> impl DoubleEndedIterator<Item = (&(K, u32), OutPoint, &TxOut)> + '_ {
|
||||||
self.inner.txouts().filter_map(|((desc_id, i), op, txo)| {
|
self.inner.txouts()
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some((keychain.clone(), *i, op, txo))
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
||||||
pub fn txouts_in_tx(
|
pub fn txouts_in_tx(
|
||||||
&self,
|
&self,
|
||||||
txid: Txid,
|
txid: Txid,
|
||||||
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> {
|
) -> impl DoubleEndedIterator<Item = (&(K, u32), OutPoint, &TxOut)> {
|
||||||
self.inner
|
self.inner.txouts_in_tx(txid)
|
||||||
.txouts_in_tx(txid)
|
|
||||||
.filter_map(|((desc_id, i), op, txo)| {
|
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some((keychain.clone(), *i, op, txo))
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the [`TxOut`] of `outpoint` if it has been indexed, and if it corresponds to a
|
/// Return the [`TxOut`] of `outpoint` if it has been indexed, and if it corresponds to a
|
||||||
@ -352,27 +340,22 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// The associated keychain and keychain index of the txout's spk is also returned.
|
/// The associated keychain and keychain index of the txout's spk is also returned.
|
||||||
///
|
///
|
||||||
/// This calls [`SpkTxOutIndex::txout`] internally.
|
/// This calls [`SpkTxOutIndex::txout`] internally.
|
||||||
pub fn txout(&self, outpoint: OutPoint) -> Option<(K, u32, &TxOut)> {
|
pub fn txout(&self, outpoint: OutPoint) -> Option<(&(K, u32), &TxOut)> {
|
||||||
let ((descriptor_id, index), txo) = self.inner.txout(outpoint)?;
|
self.inner.txout(outpoint)
|
||||||
let keychain = self.keychain_of_desc_id(descriptor_id)?;
|
|
||||||
Some((keychain.clone(), *index, txo))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the script that exists under the given `keychain`'s `index`.
|
/// Return the script that exists under the given `keychain`'s `index`.
|
||||||
///
|
///
|
||||||
/// This calls [`SpkTxOutIndex::spk_at_index`] internally.
|
/// This calls [`SpkTxOutIndex::spk_at_index`] internally.
|
||||||
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<&Script> {
|
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<&Script> {
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain)?.0;
|
self.inner.spk_at_index(&(keychain.clone(), index))
|
||||||
self.inner.spk_at_index(&(descriptor_id, index))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the keychain and keychain index associated with the spk.
|
/// Returns the keychain and keychain index associated with the spk.
|
||||||
///
|
///
|
||||||
/// This calls [`SpkTxOutIndex::index_of_spk`] internally.
|
/// This calls [`SpkTxOutIndex::index_of_spk`] internally.
|
||||||
pub fn index_of_spk(&self, script: &Script) -> Option<(K, u32)> {
|
pub fn index_of_spk(&self, script: &Script) -> Option<&(K, u32)> {
|
||||||
let (desc_id, last_index) = self.inner.index_of_spk(script)?;
|
self.inner.index_of_spk(script)
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some((keychain.clone(), *last_index))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the spk under the `keychain`'s `index` has been used.
|
/// Returns whether the spk under the `keychain`'s `index` has been used.
|
||||||
@ -382,11 +365,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// This calls [`SpkTxOutIndex::is_used`] internally.
|
/// This calls [`SpkTxOutIndex::is_used`] internally.
|
||||||
pub fn is_used(&self, keychain: K, index: u32) -> bool {
|
pub fn is_used(&self, keychain: K, index: u32) -> bool {
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
self.inner.is_used(&(keychain, index))
|
||||||
match descriptor_id {
|
|
||||||
Some(descriptor_id) => self.inner.is_used(&(descriptor_id, index)),
|
|
||||||
None => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output
|
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output
|
||||||
@ -406,11 +385,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// [`unmark_used`]: Self::unmark_used
|
/// [`unmark_used`]: Self::unmark_used
|
||||||
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
|
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
self.inner.mark_used(&(keychain, index))
|
||||||
match descriptor_id {
|
|
||||||
Some(descriptor_id) => self.inner.mark_used(&(descriptor_id, index)),
|
|
||||||
None => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
||||||
@ -423,11 +398,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// [`mark_used`]: Self::mark_used
|
/// [`mark_used`]: Self::mark_used
|
||||||
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
|
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
self.inner.unmark_used(&(keychain, index))
|
||||||
match descriptor_id {
|
|
||||||
Some(descriptor_id) => self.inner.unmark_used(&(descriptor_id, index)),
|
|
||||||
None => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes the total value transfer effect `tx` has on the script pubkeys belonging to the
|
/// Computes the total value transfer effect `tx` has on the script pubkeys belonging to the
|
||||||
@ -462,9 +433,14 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
&self,
|
&self,
|
||||||
) -> impl DoubleEndedIterator<Item = (&K, &Descriptor<DescriptorPublicKey>)> + ExactSizeIterator + '_
|
) -> impl DoubleEndedIterator<Item = (&K, &Descriptor<DescriptorPublicKey>)> + ExactSizeIterator + '_
|
||||||
{
|
{
|
||||||
self.keychains_to_descriptors
|
self.keychains_to_descriptor_ids.iter().map(|(k, did)| {
|
||||||
.iter()
|
(
|
||||||
.map(|(k, (_, d))| (k, d))
|
k,
|
||||||
|
self.descriptor_ids_to_descriptors
|
||||||
|
.get(did)
|
||||||
|
.expect("invariant"),
|
||||||
|
)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert a descriptor with a keychain associated to it.
|
/// Insert a descriptor with a keychain associated to it.
|
||||||
@ -472,56 +448,67 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// Adding a descriptor means you will be able to derive new script pubkeys under it
|
/// Adding a descriptor means you will be able to derive new script pubkeys under it
|
||||||
/// and the txout index will discover transaction outputs with those script pubkeys.
|
/// and the txout index will discover transaction outputs with those script pubkeys.
|
||||||
///
|
///
|
||||||
/// When trying to add a keychain that already existed under a different descriptor, or a descriptor
|
/// keychain <-> descriptor is a one-to-one mapping -- in `--release` this method will ignore calls that try to
|
||||||
/// that already existed with a different keychain, the old keychain (or descriptor) will be
|
/// associate a keychain with the descriptor of another keychain or to re-assign the keychain to
|
||||||
/// overwritten.
|
/// new descriptor. If `debug_assertions` are enabled then it will panic.
|
||||||
pub fn insert_descriptor(
|
pub fn insert_descriptor(
|
||||||
&mut self,
|
&mut self,
|
||||||
keychain: K,
|
keychain: K,
|
||||||
descriptor: Descriptor<DescriptorPublicKey>,
|
descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
) -> super::ChangeSet<K> {
|
) -> Result<ChangeSet<K>, InsertDescriptorError<K>> {
|
||||||
let mut changeset = super::ChangeSet::<K>::default();
|
let mut changeset = ChangeSet::<K>::default();
|
||||||
let desc_id = descriptor.descriptor_id();
|
let desc_id = descriptor.descriptor_id();
|
||||||
|
if !self.keychains_to_descriptor_ids.contains_key(&keychain)
|
||||||
let old_desc = self
|
&& !self.descriptor_ids_to_keychains.contains_key(&desc_id)
|
||||||
.keychains_to_descriptors
|
{
|
||||||
.insert(keychain.clone(), (desc_id, descriptor.clone()));
|
self.descriptor_ids_to_descriptors
|
||||||
|
.insert(desc_id, descriptor.clone());
|
||||||
if let Some((old_desc_id, _)) = old_desc {
|
self.keychains_to_descriptor_ids
|
||||||
// nothing needs to be done if caller reinsterted the same descriptor under the same
|
.insert(keychain.clone(), desc_id);
|
||||||
// keychain
|
self.descriptor_ids_to_keychains
|
||||||
if old_desc_id == desc_id {
|
.insert(desc_id, keychain.clone());
|
||||||
return changeset;
|
self.replenish_lookahead(&keychain, self.lookahead);
|
||||||
|
changeset
|
||||||
|
.keychains_added
|
||||||
|
.insert(keychain.clone(), descriptor);
|
||||||
|
} else {
|
||||||
|
if let Some(existing_desc_id) = self.keychains_to_descriptor_ids.get(&keychain) {
|
||||||
|
let descriptor = self
|
||||||
|
.descriptor_ids_to_descriptors
|
||||||
|
.get(existing_desc_id)
|
||||||
|
.expect("invariant");
|
||||||
|
if *existing_desc_id != desc_id {
|
||||||
|
return Err(InsertDescriptorError::KeychainAlreadyAssigned {
|
||||||
|
existing_assignment: descriptor.clone(),
|
||||||
|
keychain,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(existing_keychain) = self.descriptor_ids_to_keychains.get(&desc_id) {
|
||||||
|
let descriptor = self
|
||||||
|
.descriptor_ids_to_descriptors
|
||||||
|
.get(&desc_id)
|
||||||
|
.expect("invariant")
|
||||||
|
.clone();
|
||||||
|
|
||||||
|
if *existing_keychain != keychain {
|
||||||
|
return Err(InsertDescriptorError::DescriptorAlreadyAssigned {
|
||||||
|
existing_assignment: existing_keychain.clone(),
|
||||||
|
descriptor,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// we should remove old descriptor that is associated with this keychain as the index
|
|
||||||
// is designed to track one descriptor per keychain (however different keychains can
|
|
||||||
// share the same descriptor)
|
|
||||||
let _is_keychain_removed = self
|
|
||||||
.descriptor_ids_to_keychain_set
|
|
||||||
.get_mut(&old_desc_id)
|
|
||||||
.expect("we must have already inserted this descriptor")
|
|
||||||
.remove(&keychain);
|
|
||||||
debug_assert!(_is_keychain_removed);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.descriptor_ids_to_keychain_set
|
Ok(changeset)
|
||||||
.entry(desc_id)
|
|
||||||
.or_default()
|
|
||||||
.insert(keychain.clone());
|
|
||||||
self.descriptor_ids_to_descriptors
|
|
||||||
.insert(desc_id, descriptor.clone());
|
|
||||||
self.replenish_lookahead(&keychain, self.lookahead);
|
|
||||||
|
|
||||||
changeset
|
|
||||||
.keychains_added
|
|
||||||
.insert(keychain.clone(), descriptor);
|
|
||||||
changeset
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the descriptor associated with the keychain. Returns `None` if the keychain doesn't
|
/// Gets the descriptor associated with the keychain. Returns `None` if the keychain doesn't
|
||||||
/// have a descriptor associated with it.
|
/// have a descriptor associated with it.
|
||||||
pub fn get_descriptor(&self, keychain: &K) -> Option<&Descriptor<DescriptorPublicKey>> {
|
pub fn get_descriptor(&self, keychain: &K) -> Option<&Descriptor<DescriptorPublicKey>> {
|
||||||
self.keychains_to_descriptors.get(keychain).map(|(_, d)| d)
|
let did = self.keychains_to_descriptor_ids.get(keychain)?;
|
||||||
|
self.descriptor_ids_to_descriptors.get(did)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the lookahead setting.
|
/// Get the lookahead setting.
|
||||||
@ -548,39 +535,44 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn replenish_lookahead(&mut self, keychain: &K, lookahead: u32) {
|
fn replenish_lookahead_did(&mut self, did: DescriptorId) {
|
||||||
let descriptor_opt = self.keychains_to_descriptors.get(keychain).cloned();
|
if let Some(keychain) = self.descriptor_ids_to_keychains.get(&did).cloned() {
|
||||||
if let Some((descriptor_id, descriptor)) = descriptor_opt {
|
self.replenish_lookahead(&keychain, self.lookahead);
|
||||||
let next_store_index = self.next_store_index(descriptor_id);
|
}
|
||||||
let next_reveal_index = self.last_revealed.get(&descriptor_id).map_or(0, |v| *v + 1);
|
}
|
||||||
|
|
||||||
|
fn replenish_lookahead(&mut self, keychain: &K, lookahead: u32) {
|
||||||
|
if let Some(did) = self.keychains_to_descriptor_ids.get(keychain) {
|
||||||
|
let descriptor = self
|
||||||
|
.descriptor_ids_to_descriptors
|
||||||
|
.get(did)
|
||||||
|
.expect("invariant");
|
||||||
|
let next_store_index = self
|
||||||
|
.inner
|
||||||
|
.all_spks()
|
||||||
|
.range(&(keychain.clone(), u32::MIN)..=&(keychain.clone(), u32::MAX))
|
||||||
|
.last()
|
||||||
|
.map_or(0, |((_, index), _)| *index + 1);
|
||||||
|
let next_reveal_index = self.last_revealed.get(did).map_or(0, |v| *v + 1);
|
||||||
for (new_index, new_spk) in SpkIterator::new_with_range(
|
for (new_index, new_spk) in SpkIterator::new_with_range(
|
||||||
descriptor,
|
descriptor,
|
||||||
next_store_index..next_reveal_index + lookahead,
|
next_store_index..next_reveal_index + lookahead,
|
||||||
) {
|
) {
|
||||||
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
let _inserted = self
|
||||||
|
.inner
|
||||||
|
.insert_spk((keychain.clone(), new_index), new_spk);
|
||||||
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_store_index(&self, descriptor_id: DescriptorId) -> u32 {
|
|
||||||
self.inner()
|
|
||||||
.all_spks()
|
|
||||||
// This range is keeping only the spks with descriptor_id equal to
|
|
||||||
// `descriptor_id`. We don't use filter here as range is more optimized.
|
|
||||||
.range((descriptor_id, u32::MIN)..(descriptor_id, u32::MAX))
|
|
||||||
.last()
|
|
||||||
.map_or(0, |((_, index), _)| *index + 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get an unbounded spk iterator over a given `keychain`. Returns `None` if the provided
|
/// Get an unbounded spk iterator over a given `keychain`. Returns `None` if the provided
|
||||||
/// keychain doesn't exist
|
/// keychain doesn't exist
|
||||||
pub fn unbounded_spk_iter(
|
pub fn unbounded_spk_iter(
|
||||||
&self,
|
&self,
|
||||||
keychain: &K,
|
keychain: &K,
|
||||||
) -> Option<SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
) -> Option<SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||||
let descriptor = self.keychains_to_descriptors.get(keychain)?.1.clone();
|
let descriptor = self.get_descriptor(keychain)?.clone();
|
||||||
Some(SpkIterator::new(descriptor))
|
Some(SpkIterator::new(descriptor))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -588,9 +580,19 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
pub fn all_unbounded_spk_iters(
|
pub fn all_unbounded_spk_iters(
|
||||||
&self,
|
&self,
|
||||||
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||||
self.keychains_to_descriptors
|
self.keychains_to_descriptor_ids
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(k, (_, descriptor))| (k.clone(), SpkIterator::new(descriptor.clone())))
|
.map(|(k, did)| {
|
||||||
|
(
|
||||||
|
k.clone(),
|
||||||
|
SpkIterator::new(
|
||||||
|
self.descriptor_ids_to_descriptors
|
||||||
|
.get(did)
|
||||||
|
.expect("invariant")
|
||||||
|
.clone(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -598,45 +600,65 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
pub fn revealed_spks(
|
pub fn revealed_spks(
|
||||||
&self,
|
&self,
|
||||||
range: impl RangeBounds<K>,
|
range: impl RangeBounds<K>,
|
||||||
) -> impl DoubleEndedIterator<Item = (&K, u32, &Script)> + Clone {
|
) -> impl Iterator<Item = (&(K, u32), &Script)> {
|
||||||
self.keychains_to_descriptors
|
let start = range.start_bound();
|
||||||
.range(range)
|
let end = range.end_bound();
|
||||||
.flat_map(|(_, (descriptor_id, _))| {
|
let mut iter_last_revealed = self
|
||||||
let start = Bound::Included((*descriptor_id, u32::MIN));
|
.keychains_to_descriptor_ids
|
||||||
let end = match self.last_revealed.get(descriptor_id) {
|
.range((start, end))
|
||||||
Some(last_revealed) => Bound::Included((*descriptor_id, *last_revealed)),
|
.map(|(k, did)| (k, self.last_revealed.get(did).cloned()));
|
||||||
None => Bound::Excluded((*descriptor_id, u32::MIN)),
|
let mut iter_spks = self
|
||||||
};
|
.inner
|
||||||
|
.all_spks()
|
||||||
|
.range(self.map_to_inner_bounds((start, end)));
|
||||||
|
let mut current_keychain = iter_last_revealed.next();
|
||||||
|
// The reason we need a tricky algorithm is because of the "lookahead" feature which means
|
||||||
|
// that some of the spks in the SpkTxoutIndex will not have been revealed yet. So we need to
|
||||||
|
// filter out those spks that are above the last_revealed for that keychain. To do this we
|
||||||
|
// iterate through the last_revealed for each keychain and the spks for each keychain in
|
||||||
|
// tandem. This minimizes BTreeMap queries.
|
||||||
|
core::iter::from_fn(move || loop {
|
||||||
|
let (path, spk) = iter_spks.next()?;
|
||||||
|
let (keychain, index) = path;
|
||||||
|
// We need to find the last revealed that matches the current spk we are considering so
|
||||||
|
// we skip ahead.
|
||||||
|
while current_keychain?.0 < keychain {
|
||||||
|
current_keychain = iter_last_revealed.next();
|
||||||
|
}
|
||||||
|
let (current_keychain, last_revealed) = current_keychain?;
|
||||||
|
|
||||||
self.inner
|
if current_keychain == keychain && Some(*index) <= last_revealed {
|
||||||
.all_spks()
|
break Some((path, spk.as_script()));
|
||||||
.range((start, end))
|
}
|
||||||
.map(|((descriptor_id, i), spk)| {
|
})
|
||||||
(
|
|
||||||
self.keychain_of_desc_id(descriptor_id)
|
|
||||||
.expect("must have keychain"),
|
|
||||||
*i,
|
|
||||||
spk.as_script(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over revealed spks of the given `keychain`.
|
/// Iterate over revealed spks of the given `keychain` with ascending indices.
|
||||||
|
///
|
||||||
|
/// This is a double ended iterator so you can easily reverse it to get an iterator where
|
||||||
|
/// the script pubkeys that were most recently revealed are first.
|
||||||
pub fn revealed_keychain_spks<'a>(
|
pub fn revealed_keychain_spks<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
keychain: &'a K,
|
keychain: &'a K,
|
||||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + 'a {
|
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + 'a {
|
||||||
self.revealed_spks(keychain..=keychain)
|
let end = self
|
||||||
.map(|(_, i, spk)| (i, spk))
|
.last_revealed_index(keychain)
|
||||||
|
.map(|v| v + 1)
|
||||||
|
.unwrap_or(0);
|
||||||
|
self.inner
|
||||||
|
.all_spks()
|
||||||
|
.range((keychain.clone(), 0)..(keychain.clone(), end))
|
||||||
|
.map(|((_, index), spk)| (*index, spk.as_script()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over revealed, but unused, spks of all keychains.
|
/// Iterate over revealed, but unused, spks of all keychains.
|
||||||
pub fn unused_spks(&self) -> impl DoubleEndedIterator<Item = (K, u32, &Script)> + Clone {
|
pub fn unused_spks(&self) -> impl DoubleEndedIterator<Item = ((K, u32), &Script)> + Clone {
|
||||||
self.keychains_to_descriptors.keys().flat_map(|keychain| {
|
self.keychains_to_descriptor_ids
|
||||||
self.unused_keychain_spks(keychain)
|
.keys()
|
||||||
.map(|(i, spk)| (keychain.clone(), i, spk))
|
.flat_map(|keychain| {
|
||||||
})
|
self.unused_keychain_spks(keychain)
|
||||||
|
.map(|(i, spk)| ((keychain.clone(), i), spk))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over revealed, but unused, spks of the given `keychain`.
|
/// Iterate over revealed, but unused, spks of the given `keychain`.
|
||||||
@ -645,17 +667,13 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
&self,
|
&self,
|
||||||
keychain: &K,
|
keychain: &K,
|
||||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
|
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
|
||||||
let desc_id = self
|
let end = match self.keychains_to_descriptor_ids.get(keychain) {
|
||||||
.keychains_to_descriptors
|
Some(did) => self.last_revealed.get(did).map(|v| *v + 1).unwrap_or(0),
|
||||||
.get(keychain)
|
None => 0,
|
||||||
.map(|(desc_id, _)| *desc_id)
|
};
|
||||||
// We use a dummy desc id if we can't find the real one in our map. In this way,
|
|
||||||
// if this method was to be called with a non-existent keychain, we would return an
|
|
||||||
// empty iterator
|
|
||||||
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]));
|
|
||||||
let next_i = self.last_revealed.get(&desc_id).map_or(0, |&i| i + 1);
|
|
||||||
self.inner
|
self.inner
|
||||||
.unused_spks((desc_id, u32::MIN)..(desc_id, next_i))
|
.unused_spks((keychain.clone(), 0)..(keychain.clone(), end))
|
||||||
.map(|((_, i), spk)| (*i, spk))
|
.map(|((_, i), spk)| (*i, spk))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -672,8 +690,12 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
///
|
///
|
||||||
/// Returns None if the provided `keychain` doesn't exist.
|
/// Returns None if the provided `keychain` doesn't exist.
|
||||||
pub fn next_index(&self, keychain: &K) -> Option<(u32, bool)> {
|
pub fn next_index(&self, keychain: &K) -> Option<(u32, bool)> {
|
||||||
let (descriptor_id, descriptor) = self.keychains_to_descriptors.get(keychain)?;
|
let did = self.keychains_to_descriptor_ids.get(keychain)?;
|
||||||
let last_index = self.last_revealed.get(descriptor_id).cloned();
|
let last_index = self.last_revealed.get(did).cloned();
|
||||||
|
let descriptor = self
|
||||||
|
.descriptor_ids_to_descriptors
|
||||||
|
.get(did)
|
||||||
|
.expect("invariant");
|
||||||
|
|
||||||
// we can only get the next index if the wildcard exists.
|
// we can only get the next index if the wildcard exists.
|
||||||
let has_wildcard = descriptor.has_wildcard();
|
let has_wildcard = descriptor.has_wildcard();
|
||||||
@ -700,7 +722,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
self.last_revealed
|
self.last_revealed
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(desc_id, index)| {
|
.filter_map(|(desc_id, index)| {
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
let keychain = self.descriptor_ids_to_keychains.get(desc_id)?;
|
||||||
Some((keychain.clone(), *index))
|
Some((keychain.clone(), *index))
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
@ -709,91 +731,21 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// Get the last derivation index revealed for `keychain`. Returns None if the keychain doesn't
|
/// Get the last derivation index revealed for `keychain`. Returns None if the keychain doesn't
|
||||||
/// exist, or if the keychain doesn't have any revealed scripts.
|
/// exist, or if the keychain doesn't have any revealed scripts.
|
||||||
pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
|
pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
let descriptor_id = self.keychains_to_descriptor_ids.get(keychain)?;
|
||||||
self.last_revealed.get(&descriptor_id).cloned()
|
self.last_revealed.get(descriptor_id).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
||||||
pub fn reveal_to_target_multi(
|
pub fn reveal_to_target_multi(&mut self, keychains: &BTreeMap<K, u32>) -> ChangeSet<K> {
|
||||||
&mut self,
|
let mut changeset = ChangeSet::default();
|
||||||
keychains: &BTreeMap<K, u32>,
|
|
||||||
) -> (
|
|
||||||
BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>>,
|
|
||||||
super::ChangeSet<K>,
|
|
||||||
) {
|
|
||||||
let mut changeset = super::ChangeSet::default();
|
|
||||||
let mut spks = BTreeMap::new();
|
|
||||||
|
|
||||||
for (keychain, &index) in keychains {
|
for (keychain, &index) in keychains {
|
||||||
if let Some((new_spks, new_changeset)) = self.reveal_to_target(keychain, index) {
|
if let Some((_, new_changeset)) = self.reveal_to_target(keychain, index) {
|
||||||
if !new_changeset.is_empty() {
|
changeset.append(new_changeset);
|
||||||
spks.insert(keychain.clone(), new_spks);
|
|
||||||
changeset.append(new_changeset.clone());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(spks, changeset)
|
changeset
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience method to call `reveal_to_target` with a descriptor_id instead of a keychain.
|
|
||||||
/// This is useful for revealing spks of descriptors for which we don't have a keychain
|
|
||||||
/// tracked.
|
|
||||||
/// Refer to the `reveal_to_target` documentation for more.
|
|
||||||
///
|
|
||||||
/// Returns None if the provided `descriptor_id` doesn't correspond to a tracked descriptor.
|
|
||||||
fn reveal_to_target_with_id(
|
|
||||||
&mut self,
|
|
||||||
descriptor_id: DescriptorId,
|
|
||||||
target_index: u32,
|
|
||||||
) -> Option<(
|
|
||||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
|
||||||
super::ChangeSet<K>,
|
|
||||||
)> {
|
|
||||||
let descriptor = self
|
|
||||||
.descriptor_ids_to_descriptors
|
|
||||||
.get(&descriptor_id)?
|
|
||||||
.clone();
|
|
||||||
let has_wildcard = descriptor.has_wildcard();
|
|
||||||
|
|
||||||
let target_index = if has_wildcard { target_index } else { 0 };
|
|
||||||
let next_reveal_index = self
|
|
||||||
.last_revealed
|
|
||||||
.get(&descriptor_id)
|
|
||||||
.map_or(0, |index| *index + 1);
|
|
||||||
|
|
||||||
debug_assert!(next_reveal_index + self.lookahead >= self.next_store_index(descriptor_id));
|
|
||||||
|
|
||||||
// If the target_index is already revealed, we are done
|
|
||||||
if next_reveal_index > target_index {
|
|
||||||
return Some((
|
|
||||||
SpkIterator::new_with_range(descriptor, next_reveal_index..next_reveal_index),
|
|
||||||
super::ChangeSet::default(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// We range over the indexes that are not stored and insert their spks in the index.
|
|
||||||
// Indexes from next_reveal_index to next_reveal_index + lookahead are already stored (due
|
|
||||||
// to lookahead), so we only range from next_reveal_index + lookahead to target + lookahead
|
|
||||||
let range = next_reveal_index + self.lookahead..=target_index + self.lookahead;
|
|
||||||
for (new_index, new_spk) in SpkIterator::new_with_range(descriptor.clone(), range) {
|
|
||||||
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
|
||||||
debug_assert!(_inserted, "must not have existing spk");
|
|
||||||
debug_assert!(
|
|
||||||
has_wildcard || new_index == 0,
|
|
||||||
"non-wildcard descriptors must not iterate past index 0"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let _old_index = self.last_revealed.insert(descriptor_id, target_index);
|
|
||||||
debug_assert!(_old_index < Some(target_index));
|
|
||||||
Some((
|
|
||||||
SpkIterator::new_with_range(descriptor, next_reveal_index..target_index + 1),
|
|
||||||
super::ChangeSet {
|
|
||||||
keychains_added: BTreeMap::new(),
|
|
||||||
last_revealed: core::iter::once((descriptor_id, target_index)).collect(),
|
|
||||||
},
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
||||||
@ -803,8 +755,8 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// the `target_index` is in the hardened index range), this method will make a best-effort and
|
/// the `target_index` is in the hardened index range), this method will make a best-effort and
|
||||||
/// reveal up to the last possible index.
|
/// reveal up to the last possible index.
|
||||||
///
|
///
|
||||||
/// This returns an iterator of newly revealed indices (alongside their scripts) and a
|
/// This returns list of newly revealed indices (alongside their scripts) and a
|
||||||
/// [`super::ChangeSet`], which reports updates to the latest revealed index. If no new script
|
/// [`ChangeSet`], which reports updates to the latest revealed index. If no new script
|
||||||
/// pubkeys are revealed, then both of these will be empty.
|
/// pubkeys are revealed, then both of these will be empty.
|
||||||
///
|
///
|
||||||
/// Returns None if the provided `keychain` doesn't exist.
|
/// Returns None if the provided `keychain` doesn't exist.
|
||||||
@ -812,40 +764,57 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
keychain: &K,
|
keychain: &K,
|
||||||
target_index: u32,
|
target_index: u32,
|
||||||
) -> Option<(
|
) -> Option<(Vec<(u32, ScriptBuf)>, ChangeSet<K>)> {
|
||||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
let mut changeset = ChangeSet::default();
|
||||||
super::ChangeSet<K>,
|
let mut spks: Vec<(u32, ScriptBuf)> = vec![];
|
||||||
)> {
|
while let Some((i, new)) = self.next_index(keychain) {
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
if !new || i > target_index {
|
||||||
self.reveal_to_target_with_id(descriptor_id, target_index)
|
break;
|
||||||
|
}
|
||||||
|
match self.reveal_next_spk(keychain) {
|
||||||
|
Some(((i, spk), change)) => {
|
||||||
|
spks.push((i, spk.to_owned()));
|
||||||
|
changeset.append(change);
|
||||||
|
}
|
||||||
|
None => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some((spks, changeset))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to reveal the next script pubkey for `keychain`.
|
/// Attempts to reveal the next script pubkey for `keychain`.
|
||||||
///
|
///
|
||||||
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
||||||
/// [`super::ChangeSet`] which represents changes in the last revealed index (if any).
|
/// [`ChangeSet`] which represents changes in the last revealed index (if any).
|
||||||
/// Returns None if the provided keychain doesn't exist.
|
/// Returns None if the provided keychain doesn't exist.
|
||||||
///
|
///
|
||||||
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
||||||
/// [`super::ChangeSet`]. There are two scenarios when a new script pubkey cannot be derived:
|
/// [`ChangeSet`]. There are two scenarios when a new script pubkey cannot be derived:
|
||||||
///
|
///
|
||||||
/// 1. The descriptor has no wildcard and already has one script revealed.
|
/// 1. The descriptor has no wildcard and already has one script revealed.
|
||||||
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
||||||
/// 3. There is no descriptor associated with the given keychain.
|
/// 3. There is no descriptor associated with the given keychain.
|
||||||
pub fn reveal_next_spk(
|
pub fn reveal_next_spk(&mut self, keychain: &K) -> Option<((u32, &Script), ChangeSet<K>)> {
|
||||||
&mut self,
|
let (next_index, new) = self.next_index(keychain)?;
|
||||||
keychain: &K,
|
let mut changeset = ChangeSet::default();
|
||||||
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
if new {
|
||||||
let (next_index, _) = self.next_index(keychain).expect("We know keychain exists");
|
let did = self.keychains_to_descriptor_ids.get(keychain)?;
|
||||||
let changeset = self
|
let descriptor = self.descriptor_ids_to_descriptors.get(did)?;
|
||||||
.reveal_to_target(keychain, next_index)
|
let spk = descriptor
|
||||||
.expect("We know keychain exists")
|
.at_derivation_index(next_index)
|
||||||
.1;
|
.expect("already checked index is not too high")
|
||||||
|
.script_pubkey();
|
||||||
|
let _ = self.inner.insert_spk((keychain.clone(), next_index), spk);
|
||||||
|
self.last_revealed.insert(*did, next_index);
|
||||||
|
changeset.last_revealed.insert(*did, next_index);
|
||||||
|
self.replenish_lookahead(keychain, self.lookahead);
|
||||||
|
}
|
||||||
let script = self
|
let script = self
|
||||||
.inner
|
.inner
|
||||||
.spk_at_index(&(descriptor_id, next_index))
|
.spk_at_index(&(keychain.clone(), next_index))
|
||||||
.expect("script must already be stored");
|
.expect("we just inserted it");
|
||||||
Some(((next_index, script), changeset))
|
Some(((next_index, script), changeset))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -859,10 +828,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// returned.
|
/// returned.
|
||||||
///
|
///
|
||||||
/// Returns None if the provided keychain doesn't exist.
|
/// Returns None if the provided keychain doesn't exist.
|
||||||
pub fn next_unused_spk(
|
pub fn next_unused_spk(&mut self, keychain: &K) -> Option<((u32, &Script), ChangeSet<K>)> {
|
||||||
&mut self,
|
|
||||||
keychain: &K,
|
|
||||||
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
|
||||||
let need_new = self.unused_keychain_spks(keychain).next().is_none();
|
let need_new = self.unused_keychain_spks(keychain).next().is_none();
|
||||||
// this rather strange branch is needed because of some lifetime issues
|
// this rather strange branch is needed because of some lifetime issues
|
||||||
if need_new {
|
if need_new {
|
||||||
@ -872,7 +838,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
self.unused_keychain_spks(keychain)
|
self.unused_keychain_spks(keychain)
|
||||||
.next()
|
.next()
|
||||||
.expect("we already know next exists"),
|
.expect("we already know next exists"),
|
||||||
super::ChangeSet::default(),
|
ChangeSet::default(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -884,43 +850,26 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
keychain: &'a K,
|
keychain: &'a K,
|
||||||
) -> impl DoubleEndedIterator<Item = (u32, OutPoint)> + 'a {
|
) -> impl DoubleEndedIterator<Item = (u32, OutPoint)> + 'a {
|
||||||
self.keychain_outpoints_in_range(keychain..=keychain)
|
self.keychain_outpoints_in_range(keychain..=keychain)
|
||||||
.map(move |(_, i, op)| (i, op))
|
.map(|((_, i), op)| (*i, op))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over [`OutPoint`]s that have script pubkeys derived from keychains in `range`.
|
/// Iterate over [`OutPoint`]s that have script pubkeys derived from keychains in `range`.
|
||||||
pub fn keychain_outpoints_in_range<'a>(
|
pub fn keychain_outpoints_in_range<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
range: impl RangeBounds<K> + 'a,
|
range: impl RangeBounds<K> + 'a,
|
||||||
) -> impl DoubleEndedIterator<Item = (&'a K, u32, OutPoint)> + 'a {
|
) -> impl DoubleEndedIterator<Item = (&(K, u32), OutPoint)> + 'a {
|
||||||
let bounds = self.map_to_inner_bounds(range);
|
self.inner.outputs_in_range(self.map_to_inner_bounds(range))
|
||||||
self.inner
|
|
||||||
.outputs_in_range(bounds)
|
|
||||||
.map(move |((desc_id, i), op)| {
|
|
||||||
let keychain = self
|
|
||||||
.keychain_of_desc_id(desc_id)
|
|
||||||
.expect("keychain must exist");
|
|
||||||
(keychain, *i, op)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn map_to_inner_bounds(
|
fn map_to_inner_bounds(&self, bound: impl RangeBounds<K>) -> impl RangeBounds<(K, u32)> {
|
||||||
&self,
|
|
||||||
bound: impl RangeBounds<K>,
|
|
||||||
) -> impl RangeBounds<(DescriptorId, u32)> {
|
|
||||||
let get_desc_id = |keychain| {
|
|
||||||
self.keychains_to_descriptors
|
|
||||||
.get(keychain)
|
|
||||||
.map(|(desc_id, _)| *desc_id)
|
|
||||||
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]))
|
|
||||||
};
|
|
||||||
let start = match bound.start_bound() {
|
let start = match bound.start_bound() {
|
||||||
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MIN)),
|
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MIN)),
|
||||||
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MAX)),
|
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MAX)),
|
||||||
Bound::Unbounded => Bound::Unbounded,
|
Bound::Unbounded => Bound::Unbounded,
|
||||||
};
|
};
|
||||||
let end = match bound.end_bound() {
|
let end = match bound.end_bound() {
|
||||||
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MAX)),
|
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MAX)),
|
||||||
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MIN)),
|
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MIN)),
|
||||||
Bound::Unbounded => Bound::Unbounded,
|
Bound::Unbounded => Bound::Unbounded,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -936,7 +885,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
||||||
/// a [`TxOut`] with it's script pubkey.
|
/// a [`TxOut`] with it's script pubkey.
|
||||||
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
||||||
self.keychains_to_descriptors
|
self.keychains_to_descriptor_ids
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(keychain, _)| {
|
.filter_map(|(keychain, _)| {
|
||||||
self.last_used_index(keychain)
|
self.last_used_index(keychain)
|
||||||
@ -951,7 +900,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
/// - Adds new descriptors introduced
|
/// - Adds new descriptors introduced
|
||||||
/// - If a descriptor is introduced for a keychain that already had a descriptor, overwrites
|
/// - If a descriptor is introduced for a keychain that already had a descriptor, overwrites
|
||||||
/// the old descriptor
|
/// the old descriptor
|
||||||
pub fn apply_changeset(&mut self, changeset: super::ChangeSet<K>) {
|
pub fn apply_changeset(&mut self, changeset: ChangeSet<K>) {
|
||||||
let ChangeSet {
|
let ChangeSet {
|
||||||
keychains_added,
|
keychains_added,
|
||||||
last_revealed,
|
last_revealed,
|
||||||
@ -959,13 +908,61 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|||||||
for (keychain, descriptor) in keychains_added {
|
for (keychain, descriptor) in keychains_added {
|
||||||
let _ = self.insert_descriptor(keychain, descriptor);
|
let _ = self.insert_descriptor(keychain, descriptor);
|
||||||
}
|
}
|
||||||
let last_revealed = last_revealed
|
|
||||||
.into_iter()
|
for (&desc_id, &index) in &last_revealed {
|
||||||
.filter_map(|(desc_id, index)| {
|
let v = self.last_revealed.entry(desc_id).or_default();
|
||||||
let keychain = self.keychain_of_desc_id(&desc_id)?;
|
*v = index.max(*v);
|
||||||
Some((keychain.clone(), index))
|
}
|
||||||
})
|
|
||||||
.collect();
|
for did in last_revealed.keys() {
|
||||||
let _ = self.reveal_to_target_multi(&last_revealed);
|
self.replenish_lookahead_did(*did);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
/// Error returned from [`KeychainTxOutIndex::insert_descriptor`]
|
||||||
|
pub enum InsertDescriptorError<K> {
|
||||||
|
/// The descriptor has already been assigned to a keychain so you can't assign it to another
|
||||||
|
DescriptorAlreadyAssigned {
|
||||||
|
/// The descriptor you have attempted to reassign
|
||||||
|
descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
|
/// The keychain that the descriptor is already assigned to
|
||||||
|
existing_assignment: K,
|
||||||
|
},
|
||||||
|
/// The keychain is already assigned to a descriptor so you can't reassign it
|
||||||
|
KeychainAlreadyAssigned {
|
||||||
|
/// The keychain that you have attempted to reassign
|
||||||
|
keychain: K,
|
||||||
|
/// The descriptor that the keychain is already assigned to
|
||||||
|
existing_assignment: Descriptor<DescriptorPublicKey>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: core::fmt::Debug> core::fmt::Display for InsertDescriptorError<K> {
|
||||||
|
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||||
|
match self {
|
||||||
|
InsertDescriptorError::DescriptorAlreadyAssigned {
|
||||||
|
existing_assignment: existing,
|
||||||
|
descriptor,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"attempt to re-assign descriptor {descriptor:?} already assigned to {existing:?}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
InsertDescriptorError::KeychainAlreadyAssigned {
|
||||||
|
existing_assignment: existing,
|
||||||
|
keychain,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"attempt to re-assign keychain {keychain:?} already assigned to {existing:?}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
impl<K: core::fmt::Debug> std::error::Error for InsertDescriptorError<K> {}
|
||||||
|
@ -166,7 +166,7 @@ impl SyncRequest {
|
|||||||
self.chain_spks(
|
self.chain_spks(
|
||||||
index
|
index
|
||||||
.revealed_spks(spk_range)
|
.revealed_spks(spk_range)
|
||||||
.map(|(_, _, spk)| spk.to_owned())
|
.map(|(_, spk)| spk.to_owned())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,7 @@ impl<I> Default for SpkTxOutIndex<I> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Clone + Ord> Indexer for SpkTxOutIndex<I> {
|
impl<I: Clone + Ord + core::fmt::Debug> Indexer for SpkTxOutIndex<I> {
|
||||||
type ChangeSet = ();
|
type ChangeSet = ();
|
||||||
|
|
||||||
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
||||||
@ -76,7 +76,7 @@ impl<I: Clone + Ord> Indexer for SpkTxOutIndex<I> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
impl<I: Clone + Ord + core::fmt::Debug> SpkTxOutIndex<I> {
|
||||||
/// Scans a transaction's outputs for matching script pubkeys.
|
/// Scans a transaction's outputs for matching script pubkeys.
|
||||||
///
|
///
|
||||||
/// Typically, this is used in two situations:
|
/// Typically, this is used in two situations:
|
||||||
|
@ -10,7 +10,7 @@ use bdk_chain::{
|
|||||||
indexed_tx_graph::{self, IndexedTxGraph},
|
indexed_tx_graph::{self, IndexedTxGraph},
|
||||||
keychain::{self, Balance, KeychainTxOutIndex},
|
keychain::{self, Balance, KeychainTxOutIndex},
|
||||||
local_chain::LocalChain,
|
local_chain::LocalChain,
|
||||||
tx_graph, ChainPosition, ConfirmationHeightAnchor, DescriptorExt,
|
tx_graph, Append, ChainPosition, ConfirmationHeightAnchor, DescriptorExt,
|
||||||
};
|
};
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
secp256k1::Secp256k1, Amount, OutPoint, Script, ScriptBuf, Transaction, TxIn, TxOut,
|
secp256k1::Secp256k1, Amount, OutPoint, Script, ScriptBuf, Transaction, TxIn, TxOut,
|
||||||
@ -140,8 +140,16 @@ fn test_list_owned_txouts() {
|
|||||||
KeychainTxOutIndex::new(10),
|
KeychainTxOutIndex::new(10),
|
||||||
);
|
);
|
||||||
|
|
||||||
let _ = graph.index.insert_descriptor("keychain_1".into(), desc_1);
|
assert!(!graph
|
||||||
let _ = graph.index.insert_descriptor("keychain_2".into(), desc_2);
|
.index
|
||||||
|
.insert_descriptor("keychain_1".into(), desc_1)
|
||||||
|
.unwrap()
|
||||||
|
.is_empty());
|
||||||
|
assert!(!graph
|
||||||
|
.index
|
||||||
|
.insert_descriptor("keychain_2".into(), desc_2)
|
||||||
|
.unwrap()
|
||||||
|
.is_empty());
|
||||||
|
|
||||||
// Get trusted and untrusted addresses
|
// Get trusted and untrusted addresses
|
||||||
|
|
||||||
@ -257,18 +265,26 @@ fn test_list_owned_txouts() {
|
|||||||
.unwrap_or_else(|| panic!("block must exist at {}", height));
|
.unwrap_or_else(|| panic!("block must exist at {}", height));
|
||||||
let txouts = graph
|
let txouts = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_txouts(&local_chain, chain_tip, graph.index.outpoints())
|
.filter_chain_txouts(
|
||||||
|
&local_chain,
|
||||||
|
chain_tip,
|
||||||
|
graph.index.outpoints().iter().cloned(),
|
||||||
|
)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(&local_chain, chain_tip, graph.index.outpoints())
|
.filter_chain_unspents(
|
||||||
|
&local_chain,
|
||||||
|
chain_tip,
|
||||||
|
graph.index.outpoints().iter().cloned(),
|
||||||
|
)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let balance = graph.graph().balance(
|
let balance = graph.graph().balance(
|
||||||
&local_chain,
|
&local_chain,
|
||||||
chain_tip,
|
chain_tip,
|
||||||
graph.index.outpoints(),
|
graph.index.outpoints().iter().cloned(),
|
||||||
|_, spk: &Script| trusted_spks.contains(&spk.to_owned()),
|
|_, spk: &Script| trusted_spks.contains(&spk.to_owned()),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ fn append_changesets_check_last_revealed() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_apply_changeset_with_different_descriptors_to_same_keychain() {
|
fn when_apply_contradictory_changesets_they_are_ignored() {
|
||||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||||
let mut txout_index =
|
let mut txout_index =
|
||||||
@ -120,7 +120,7 @@ fn test_apply_changeset_with_different_descriptors_to_same_keychain() {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.keychains().collect::<Vec<_>>(),
|
txout_index.keychains().collect::<Vec<_>>(),
|
||||||
vec![
|
vec![
|
||||||
(&TestKeychain::External, &internal_descriptor),
|
(&TestKeychain::External, &external_descriptor),
|
||||||
(&TestKeychain::Internal, &internal_descriptor)
|
(&TestKeychain::Internal, &internal_descriptor)
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -134,8 +134,8 @@ fn test_apply_changeset_with_different_descriptors_to_same_keychain() {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.keychains().collect::<Vec<_>>(),
|
txout_index.keychains().collect::<Vec<_>>(),
|
||||||
vec![
|
vec![
|
||||||
(&TestKeychain::External, &internal_descriptor),
|
(&TestKeychain::External, &external_descriptor),
|
||||||
(&TestKeychain::Internal, &external_descriptor)
|
(&TestKeychain::Internal, &internal_descriptor)
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -156,7 +156,7 @@ fn test_set_all_derivation_indices() {
|
|||||||
]
|
]
|
||||||
.into();
|
.into();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
txout_index.reveal_to_target_multi(&derive_to),
|
||||||
ChangeSet {
|
ChangeSet {
|
||||||
keychains_added: BTreeMap::new(),
|
keychains_added: BTreeMap::new(),
|
||||||
last_revealed: last_revealed.clone()
|
last_revealed: last_revealed.clone()
|
||||||
@ -164,7 +164,7 @@ fn test_set_all_derivation_indices() {
|
|||||||
);
|
);
|
||||||
assert_eq!(txout_index.last_revealed_indices(), derive_to);
|
assert_eq!(txout_index.last_revealed_indices(), derive_to);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
txout_index.reveal_to_target_multi(&derive_to),
|
||||||
keychain::ChangeSet::default(),
|
keychain::ChangeSet::default(),
|
||||||
"no changes if we set to the same thing"
|
"no changes if we set to the same thing"
|
||||||
);
|
);
|
||||||
@ -190,7 +190,7 @@ fn test_lookahead() {
|
|||||||
.reveal_to_target(&TestKeychain::External, index)
|
.reveal_to_target(&TestKeychain::External, index)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
revealed_spks.collect::<Vec<_>>(),
|
revealed_spks,
|
||||||
vec![(index, spk_at_index(&external_descriptor, index))],
|
vec![(index, spk_at_index(&external_descriptor, index))],
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -241,7 +241,7 @@ fn test_lookahead() {
|
|||||||
.reveal_to_target(&TestKeychain::Internal, 24)
|
.reveal_to_target(&TestKeychain::Internal, 24)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
revealed_spks.collect::<Vec<_>>(),
|
revealed_spks,
|
||||||
(0..=24)
|
(0..=24)
|
||||||
.map(|index| (index, spk_at_index(&internal_descriptor, index)))
|
.map(|index| (index, spk_at_index(&internal_descriptor, index)))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
@ -511,7 +511,7 @@ fn test_non_wildcard_derivations() {
|
|||||||
let (revealed_spks, revealed_changeset) = txout_index
|
let (revealed_spks, revealed_changeset) = txout_index
|
||||||
.reveal_to_target(&TestKeychain::External, 200)
|
.reveal_to_target(&TestKeychain::External, 200)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(revealed_spks.count(), 0);
|
assert_eq!(revealed_spks.len(), 0);
|
||||||
assert!(revealed_changeset.is_empty());
|
assert!(revealed_changeset.is_empty());
|
||||||
|
|
||||||
// we check that spks_of_keychain returns a SpkIterator with just one element
|
// we check that spks_of_keychain returns a SpkIterator with just one element
|
||||||
@ -591,19 +591,17 @@ fn lookahead_to_target() {
|
|||||||
|
|
||||||
let keychain_test_cases = [
|
let keychain_test_cases = [
|
||||||
(
|
(
|
||||||
external_descriptor.descriptor_id(),
|
|
||||||
TestKeychain::External,
|
TestKeychain::External,
|
||||||
t.external_last_revealed,
|
t.external_last_revealed,
|
||||||
t.external_target,
|
t.external_target,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
internal_descriptor.descriptor_id(),
|
|
||||||
TestKeychain::Internal,
|
TestKeychain::Internal,
|
||||||
t.internal_last_revealed,
|
t.internal_last_revealed,
|
||||||
t.internal_target,
|
t.internal_target,
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
for (descriptor_id, keychain, last_revealed, target) in keychain_test_cases {
|
for (keychain, last_revealed, target) in keychain_test_cases {
|
||||||
if let Some(target) = target {
|
if let Some(target) = target {
|
||||||
let original_last_stored_index = match last_revealed {
|
let original_last_stored_index = match last_revealed {
|
||||||
Some(last_revealed) => Some(last_revealed + t.lookahead),
|
Some(last_revealed) => Some(last_revealed + t.lookahead),
|
||||||
@ -619,10 +617,10 @@ fn lookahead_to_target() {
|
|||||||
let keys = index
|
let keys = index
|
||||||
.inner()
|
.inner()
|
||||||
.all_spks()
|
.all_spks()
|
||||||
.range((descriptor_id, 0)..=(descriptor_id, u32::MAX))
|
.range((keychain.clone(), 0)..=(keychain.clone(), u32::MAX))
|
||||||
.map(|(k, _)| *k)
|
.map(|(k, _)| k.clone())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let exp_keys = core::iter::repeat(descriptor_id)
|
let exp_keys = core::iter::repeat(keychain)
|
||||||
.zip(0_u32..=exp_last_stored_index)
|
.zip(0_u32..=exp_last_stored_index)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
assert_eq!(keys, exp_keys);
|
assert_eq!(keys, exp_keys);
|
||||||
@ -631,50 +629,6 @@ fn lookahead_to_target() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `::index_txout` should still index txouts with spks derived from descriptors without keychains.
|
|
||||||
/// This includes properly refilling the lookahead for said descriptors.
|
|
||||||
#[test]
|
|
||||||
fn index_txout_after_changing_descriptor_under_keychain() {
|
|
||||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
|
||||||
let (desc_a, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0])
|
|
||||||
.expect("descriptor 0 must be valid");
|
|
||||||
let (desc_b, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[1])
|
|
||||||
.expect("descriptor 1 must be valid");
|
|
||||||
let desc_id_a = desc_a.descriptor_id();
|
|
||||||
|
|
||||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<()>::new(10);
|
|
||||||
|
|
||||||
// Introduce `desc_a` under keychain `()` and replace the descriptor.
|
|
||||||
let _ = txout_index.insert_descriptor((), desc_a.clone());
|
|
||||||
let _ = txout_index.insert_descriptor((), desc_b.clone());
|
|
||||||
|
|
||||||
// Loop through spks in intervals of `lookahead` to create outputs with. We should always be
|
|
||||||
// able to index these outputs if `lookahead` is respected.
|
|
||||||
let spk_indices = [9, 19, 29, 39];
|
|
||||||
for i in spk_indices {
|
|
||||||
let spk_at_index = desc_a
|
|
||||||
.at_derivation_index(i)
|
|
||||||
.expect("must derive")
|
|
||||||
.script_pubkey();
|
|
||||||
let index_changeset = txout_index.index_txout(
|
|
||||||
// Use spk derivation index as vout as we just want an unique outpoint.
|
|
||||||
OutPoint::new(h!("mock_tx"), i as _),
|
|
||||||
&TxOut {
|
|
||||||
value: Amount::from_sat(10_000),
|
|
||||||
script_pubkey: spk_at_index,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
index_changeset,
|
|
||||||
bdk_chain::keychain::ChangeSet {
|
|
||||||
keychains_added: BTreeMap::default(),
|
|
||||||
last_revealed: [(desc_id_a, i)].into(),
|
|
||||||
},
|
|
||||||
"must always increase last active if impl respects lookahead"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_descriptor_no_change() {
|
fn insert_descriptor_no_change() {
|
||||||
let secp = Secp256k1::signing_only();
|
let secp = Secp256k1::signing_only();
|
||||||
@ -683,19 +637,20 @@ fn insert_descriptor_no_change() {
|
|||||||
let mut txout_index = KeychainTxOutIndex::<()>::default();
|
let mut txout_index = KeychainTxOutIndex::<()>::default();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.insert_descriptor((), desc.clone()),
|
txout_index.insert_descriptor((), desc.clone()),
|
||||||
keychain::ChangeSet {
|
Ok(keychain::ChangeSet {
|
||||||
keychains_added: [((), desc.clone())].into(),
|
keychains_added: [((), desc.clone())].into(),
|
||||||
last_revealed: Default::default()
|
last_revealed: Default::default()
|
||||||
},
|
}),
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.insert_descriptor((), desc.clone()),
|
txout_index.insert_descriptor((), desc.clone()),
|
||||||
keychain::ChangeSet::default(),
|
Ok(keychain::ChangeSet::default()),
|
||||||
"inserting the same descriptor for keychain should return an empty changeset",
|
"inserting the same descriptor for keychain should return an empty changeset",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
fn applying_changesets_one_by_one_vs_aggregate_must_have_same_result() {
|
fn applying_changesets_one_by_one_vs_aggregate_must_have_same_result() {
|
||||||
let desc = parse_descriptor(DESCRIPTORS[0]);
|
let desc = parse_descriptor(DESCRIPTORS[0]);
|
||||||
let changesets: &[ChangeSet<TestKeychain>] = &[
|
let changesets: &[ChangeSet<TestKeychain>] = &[
|
||||||
@ -743,39 +698,25 @@ fn applying_changesets_one_by_one_vs_aggregate_must_have_same_result() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// When the same descriptor is associated with various keychains,
|
|
||||||
// index methods only return the highest keychain by Ord
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_only_highest_ord_keychain_is_returned() {
|
fn assigning_same_descriptor_to_multiple_keychains_should_error() {
|
||||||
let desc = parse_descriptor(DESCRIPTORS[0]);
|
let desc = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
|
||||||
let mut indexer = KeychainTxOutIndex::<TestKeychain>::new(0);
|
let mut indexer = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
let _ = indexer.insert_descriptor(TestKeychain::Internal, desc.clone());
|
let _ = indexer.insert_descriptor(TestKeychain::Internal, desc.clone());
|
||||||
let _ = indexer.insert_descriptor(TestKeychain::External, desc);
|
assert!(indexer
|
||||||
|
.insert_descriptor(TestKeychain::External, desc)
|
||||||
|
.is_err())
|
||||||
|
}
|
||||||
|
|
||||||
// reveal_next_spk will work with either keychain
|
#[test]
|
||||||
let spk0: ScriptBuf = indexer
|
fn reassigning_keychain_to_a_new_descriptor_should_error() {
|
||||||
.reveal_next_spk(&TestKeychain::External)
|
let desc1 = parse_descriptor(DESCRIPTORS[0]);
|
||||||
.unwrap()
|
let desc2 = parse_descriptor(DESCRIPTORS[1]);
|
||||||
.0
|
let mut indexer = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
.1
|
let _ = indexer.insert_descriptor(TestKeychain::Internal, desc1);
|
||||||
.into();
|
assert!(indexer
|
||||||
let spk1: ScriptBuf = indexer
|
.insert_descriptor(TestKeychain::Internal, desc2)
|
||||||
.reveal_next_spk(&TestKeychain::Internal)
|
.is_err());
|
||||||
.unwrap()
|
|
||||||
.0
|
|
||||||
.1
|
|
||||||
.into();
|
|
||||||
|
|
||||||
// index_of_spk will always return External
|
|
||||||
assert_eq!(
|
|
||||||
indexer.index_of_spk(&spk0),
|
|
||||||
Some((TestKeychain::External, 0))
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
indexer.index_of_spk(&spk1),
|
|
||||||
Some((TestKeychain::External, 1))
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -786,27 +727,29 @@ fn when_querying_over_a_range_of_keychains_the_utxos_should_show_up() {
|
|||||||
for (i, descriptor) in DESCRIPTORS.iter().enumerate() {
|
for (i, descriptor) in DESCRIPTORS.iter().enumerate() {
|
||||||
let descriptor = parse_descriptor(descriptor);
|
let descriptor = parse_descriptor(descriptor);
|
||||||
let _ = indexer.insert_descriptor(i, descriptor.clone());
|
let _ = indexer.insert_descriptor(i, descriptor.clone());
|
||||||
indexer.reveal_next_spk(&i);
|
if i != 4 {
|
||||||
|
// skip one in the middle to see if uncovers any bugs
|
||||||
|
indexer.reveal_next_spk(&i);
|
||||||
|
}
|
||||||
tx.output.push(TxOut {
|
tx.output.push(TxOut {
|
||||||
script_pubkey: descriptor.at_derivation_index(0).unwrap().script_pubkey(),
|
script_pubkey: descriptor.at_derivation_index(0).unwrap().script_pubkey(),
|
||||||
value: Amount::from_sat(10_000),
|
value: Amount::from_sat(10_000),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = indexer.index_tx(&tx);
|
let n_spks = DESCRIPTORS.len() - /*we skipped one*/ 1;
|
||||||
assert_eq!(indexer.outpoints().count(), DESCRIPTORS.len());
|
|
||||||
|
|
||||||
assert_eq!(
|
let _ = indexer.index_tx(&tx);
|
||||||
indexer.revealed_spks(0..DESCRIPTORS.len()).count(),
|
assert_eq!(indexer.outpoints().len(), n_spks);
|
||||||
DESCRIPTORS.len()
|
|
||||||
);
|
assert_eq!(indexer.revealed_spks(0..DESCRIPTORS.len()).count(), n_spks);
|
||||||
assert_eq!(indexer.revealed_spks(1..4).count(), 4 - 1);
|
assert_eq!(indexer.revealed_spks(1..4).count(), 4 - 1);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
indexer.net_value(&tx, 0..DESCRIPTORS.len()).to_sat(),
|
indexer.net_value(&tx, 0..DESCRIPTORS.len()).to_sat(),
|
||||||
(10_000 * DESCRIPTORS.len()) as i64
|
(10_000 * n_spks) as i64
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
indexer.net_value(&tx, 3..5).to_sat(),
|
indexer.net_value(&tx, 3..6).to_sat(),
|
||||||
(10_000 * (5 - 3)) as i64
|
(10_000 * (6 - 3 - /*the skipped one*/ 1)) as i64
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,6 @@ pub enum Error {
|
|||||||
HardenedDerivationXpub,
|
HardenedDerivationXpub,
|
||||||
/// The descriptor contains multipath keys
|
/// The descriptor contains multipath keys
|
||||||
MultiPath,
|
MultiPath,
|
||||||
|
|
||||||
/// Error thrown while working with [`keys`](crate::keys)
|
/// Error thrown while working with [`keys`](crate::keys)
|
||||||
Key(crate::keys::KeyError),
|
Key(crate::keys::KeyError),
|
||||||
/// Error while extracting and manipulating policies
|
/// Error while extracting and manipulating policies
|
||||||
|
@ -772,7 +772,7 @@ impl Wallet {
|
|||||||
keychain: KeychainKind,
|
keychain: KeychainKind,
|
||||||
index: u32,
|
index: u32,
|
||||||
) -> anyhow::Result<impl Iterator<Item = AddressInfo> + '_> {
|
) -> anyhow::Result<impl Iterator<Item = AddressInfo> + '_> {
|
||||||
let (spk_iter, index_changeset) = self
|
let (spks, index_changeset) = self
|
||||||
.indexed_graph
|
.indexed_graph
|
||||||
.index
|
.index
|
||||||
.reveal_to_target(&keychain, index)
|
.reveal_to_target(&keychain, index)
|
||||||
@ -781,7 +781,7 @@ impl Wallet {
|
|||||||
self.persist
|
self.persist
|
||||||
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
.stage_and_commit(indexed_tx_graph::ChangeSet::from(index_changeset).into())?;
|
||||||
|
|
||||||
Ok(spk_iter.map(move |(index, spk)| AddressInfo {
|
Ok(spks.into_iter().map(move |(index, spk)| AddressInfo {
|
||||||
index,
|
index,
|
||||||
address: Address::from_script(&spk, self.network).expect("must have address form"),
|
address: Address::from_script(&spk, self.network).expect("must have address form"),
|
||||||
keychain,
|
keychain,
|
||||||
@ -861,7 +861,7 @@ impl Wallet {
|
|||||||
///
|
///
|
||||||
/// Will only return `Some(_)` if the wallet has given out the spk.
|
/// Will only return `Some(_)` if the wallet has given out the spk.
|
||||||
pub fn derivation_of_spk(&self, spk: &Script) -> Option<(KeychainKind, u32)> {
|
pub fn derivation_of_spk(&self, spk: &Script) -> Option<(KeychainKind, u32)> {
|
||||||
self.indexed_graph.index.index_of_spk(spk)
|
self.indexed_graph.index.index_of_spk(spk).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the list of unspent outputs of this wallet
|
/// Return the list of unspent outputs of this wallet
|
||||||
@ -871,7 +871,7 @@ impl Wallet {
|
|||||||
.filter_chain_unspents(
|
.filter_chain_unspents(
|
||||||
&self.chain,
|
&self.chain,
|
||||||
self.chain.tip().block_id(),
|
self.chain.tip().block_id(),
|
||||||
self.indexed_graph.index.outpoints(),
|
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||||
)
|
)
|
||||||
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
||||||
}
|
}
|
||||||
@ -885,7 +885,7 @@ impl Wallet {
|
|||||||
.filter_chain_txouts(
|
.filter_chain_txouts(
|
||||||
&self.chain,
|
&self.chain,
|
||||||
self.chain.tip().block_id(),
|
self.chain.tip().block_id(),
|
||||||
self.indexed_graph.index.outpoints(),
|
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||||
)
|
)
|
||||||
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
.map(|((k, i), full_txo)| new_local_utxo(k, i, full_txo))
|
||||||
}
|
}
|
||||||
@ -932,7 +932,7 @@ impl Wallet {
|
|||||||
/// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the
|
/// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the
|
||||||
/// wallet's database.
|
/// wallet's database.
|
||||||
pub fn get_utxo(&self, op: OutPoint) -> Option<LocalOutput> {
|
pub fn get_utxo(&self, op: OutPoint) -> Option<LocalOutput> {
|
||||||
let (keychain, index, _) = self.indexed_graph.index.txout(op)?;
|
let (&(keychain, index), _) = self.indexed_graph.index.txout(op)?;
|
||||||
self.indexed_graph
|
self.indexed_graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(
|
.filter_chain_unspents(
|
||||||
@ -1207,7 +1207,7 @@ impl Wallet {
|
|||||||
self.indexed_graph.graph().balance(
|
self.indexed_graph.graph().balance(
|
||||||
&self.chain,
|
&self.chain,
|
||||||
self.chain.tip().block_id(),
|
self.chain.tip().block_id(),
|
||||||
self.indexed_graph.index.outpoints(),
|
self.indexed_graph.index.outpoints().iter().cloned(),
|
||||||
|&(k, _), _| k == KeychainKind::Internal,
|
|&(k, _), _| k == KeychainKind::Internal,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -1699,7 +1699,7 @@ impl Wallet {
|
|||||||
.into();
|
.into();
|
||||||
|
|
||||||
let weighted_utxo = match txout_index.index_of_spk(&txout.script_pubkey) {
|
let weighted_utxo = match txout_index.index_of_spk(&txout.script_pubkey) {
|
||||||
Some((keychain, derivation_index)) => {
|
Some(&(keychain, derivation_index)) => {
|
||||||
let satisfaction_weight = self
|
let satisfaction_weight = self
|
||||||
.get_descriptor_for_keychain(keychain)
|
.get_descriptor_for_keychain(keychain)
|
||||||
.max_weight_to_satisfy()
|
.max_weight_to_satisfy()
|
||||||
@ -1744,7 +1744,7 @@ impl Wallet {
|
|||||||
for (index, txout) in tx.output.iter().enumerate() {
|
for (index, txout) in tx.output.iter().enumerate() {
|
||||||
let change_keychain = KeychainKind::Internal;
|
let change_keychain = KeychainKind::Internal;
|
||||||
match txout_index.index_of_spk(&txout.script_pubkey) {
|
match txout_index.index_of_spk(&txout.script_pubkey) {
|
||||||
Some((keychain, _)) if keychain == change_keychain => {
|
Some((keychain, _)) if *keychain == change_keychain => {
|
||||||
change_index = Some(index)
|
change_index = Some(index)
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -2015,13 +2015,13 @@ impl Wallet {
|
|||||||
if let Some((keychain, index)) = txout_index.index_of_spk(&txout.script_pubkey) {
|
if let Some((keychain, index)) = txout_index.index_of_spk(&txout.script_pubkey) {
|
||||||
// NOTE: unmark_used will **not** make something unused if it has actually been used
|
// NOTE: unmark_used will **not** make something unused if it has actually been used
|
||||||
// by a tx in the tracker. It only removes the superficial marking.
|
// by a tx in the tracker. It only removes the superficial marking.
|
||||||
txout_index.unmark_used(keychain, index);
|
txout_index.unmark_used(*keychain, *index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_descriptor_for_txout(&self, txout: &TxOut) -> Option<DerivedDescriptor> {
|
fn get_descriptor_for_txout(&self, txout: &TxOut) -> Option<DerivedDescriptor> {
|
||||||
let (keychain, child) = self
|
let &(keychain, child) = self
|
||||||
.indexed_graph
|
.indexed_graph
|
||||||
.index
|
.index
|
||||||
.index_of_spk(&txout.script_pubkey)?;
|
.index_of_spk(&txout.script_pubkey)?;
|
||||||
@ -2237,7 +2237,7 @@ impl Wallet {
|
|||||||
) -> Result<psbt::Input, CreateTxError> {
|
) -> Result<psbt::Input, CreateTxError> {
|
||||||
// Try to find the prev_script in our db to figure out if this is internal or external,
|
// Try to find the prev_script in our db to figure out if this is internal or external,
|
||||||
// and the derivation index
|
// and the derivation index
|
||||||
let (keychain, child) = self
|
let &(keychain, child) = self
|
||||||
.indexed_graph
|
.indexed_graph
|
||||||
.index
|
.index
|
||||||
.index_of_spk(&utxo.txout.script_pubkey)
|
.index_of_spk(&utxo.txout.script_pubkey)
|
||||||
@ -2285,7 +2285,7 @@ impl Wallet {
|
|||||||
|
|
||||||
// Try to figure out the keychain and derivation for every input and output
|
// Try to figure out the keychain and derivation for every input and output
|
||||||
for (is_input, index, out) in utxos.into_iter() {
|
for (is_input, index, out) in utxos.into_iter() {
|
||||||
if let Some((keychain, child)) =
|
if let Some(&(keychain, child)) =
|
||||||
self.indexed_graph.index.index_of_spk(&out.script_pubkey)
|
self.indexed_graph.index.index_of_spk(&out.script_pubkey)
|
||||||
{
|
{
|
||||||
let desc = self.get_descriptor_for_keychain(keychain);
|
let desc = self.get_descriptor_for_keychain(keychain);
|
||||||
@ -2331,7 +2331,7 @@ impl Wallet {
|
|||||||
None => ChangeSet::default(),
|
None => ChangeSet::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let (_, index_changeset) = self
|
let index_changeset = self
|
||||||
.indexed_graph
|
.indexed_graph
|
||||||
.index
|
.index
|
||||||
.reveal_to_target_multi(&update.last_active_indices);
|
.reveal_to_target_multi(&update.last_active_indices);
|
||||||
@ -2536,17 +2536,27 @@ fn create_signers<E: IntoWalletDescriptor>(
|
|||||||
) -> Result<(Arc<SignersContainer>, Arc<SignersContainer>), DescriptorError> {
|
) -> Result<(Arc<SignersContainer>, Arc<SignersContainer>), DescriptorError> {
|
||||||
let descriptor = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
let descriptor = into_wallet_descriptor_checked(descriptor, secp, network)?;
|
||||||
let change_descriptor = into_wallet_descriptor_checked(change_descriptor, secp, network)?;
|
let change_descriptor = into_wallet_descriptor_checked(change_descriptor, secp, network)?;
|
||||||
if descriptor.0 == change_descriptor.0 {
|
|
||||||
return Err(DescriptorError::ExternalAndInternalAreTheSame);
|
|
||||||
}
|
|
||||||
|
|
||||||
let (descriptor, keymap) = descriptor;
|
let (descriptor, keymap) = descriptor;
|
||||||
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
let signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
||||||
let _ = index.insert_descriptor(KeychainKind::External, descriptor);
|
let _ = index
|
||||||
|
.insert_descriptor(KeychainKind::External, descriptor)
|
||||||
|
.expect("this is the first descriptor we're inserting");
|
||||||
|
|
||||||
let (descriptor, keymap) = change_descriptor;
|
let (descriptor, keymap) = change_descriptor;
|
||||||
let change_signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
let change_signers = Arc::new(SignersContainer::build(keymap, &descriptor, secp));
|
||||||
let _ = index.insert_descriptor(KeychainKind::Internal, descriptor);
|
let _ = index
|
||||||
|
.insert_descriptor(KeychainKind::Internal, descriptor)
|
||||||
|
.map_err(|e| {
|
||||||
|
use bdk_chain::keychain::InsertDescriptorError;
|
||||||
|
match e {
|
||||||
|
InsertDescriptorError::DescriptorAlreadyAssigned { .. } => {
|
||||||
|
crate::descriptor::error::Error::ExternalAndInternalAreTheSame
|
||||||
|
}
|
||||||
|
InsertDescriptorError::KeychainAlreadyAssigned { .. } => {
|
||||||
|
unreachable!("this is the first time we're assigning internal")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok((signers, change_signers))
|
Ok((signers, change_signers))
|
||||||
}
|
}
|
||||||
|
@ -212,7 +212,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
graph.graph().balance(
|
graph.graph().balance(
|
||||||
&*chain,
|
&*chain,
|
||||||
synced_to.block_id(),
|
synced_to.block_id(),
|
||||||
graph.index.outpoints(),
|
graph.index.outpoints().iter().cloned(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
@ -336,7 +336,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
graph.graph().balance(
|
graph.graph().balance(
|
||||||
&*chain,
|
&*chain,
|
||||||
synced_to.block_id(),
|
synced_to.block_id(),
|
||||||
graph.index.outpoints(),
|
graph.index.outpoints().iter().cloned(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
@ -427,7 +427,7 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
|||||||
let outpoints = graph.index.outpoints();
|
let outpoints = graph.index.outpoints();
|
||||||
graph
|
graph
|
||||||
.graph()
|
.graph()
|
||||||
.try_filter_chain_unspents(chain, chain_tip, outpoints)
|
.try_filter_chain_unspents(chain, chain_tip, outpoints.iter().cloned())
|
||||||
.filter_map(|r| -> Option<Result<PlannedUtxo<K, A>, _>> {
|
.filter_map(|r| -> Option<Result<PlannedUtxo<K, A>, _>> {
|
||||||
let (k, i, full_txo) = match r {
|
let (k, i, full_txo) = match r {
|
||||||
Err(err) => return Some(Err(err)),
|
Err(err) => return Some(Err(err)),
|
||||||
@ -527,7 +527,7 @@ where
|
|||||||
let balance = graph.graph().try_balance(
|
let balance = graph.graph().try_balance(
|
||||||
chain,
|
chain,
|
||||||
chain.get_chain_tip()?,
|
chain.get_chain_tip()?,
|
||||||
graph.index.outpoints(),
|
graph.index.outpoints().iter().cloned(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@ -568,7 +568,7 @@ where
|
|||||||
} => {
|
} => {
|
||||||
let txouts = graph
|
let txouts = graph
|
||||||
.graph()
|
.graph()
|
||||||
.try_filter_chain_txouts(chain, chain_tip, outpoints)
|
.try_filter_chain_txouts(chain, chain_tip, outpoints.iter().cloned())
|
||||||
.filter(|r| match r {
|
.filter(|r| match r {
|
||||||
Ok((_, full_txo)) => match (spent, unspent) {
|
Ok((_, full_txo)) => match (spent, unspent) {
|
||||||
(true, false) => full_txo.spent_by.is_some(),
|
(true, false) => full_txo.spent_by.is_some(),
|
||||||
|
@ -228,9 +228,9 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let all_spks = graph
|
let all_spks = graph
|
||||||
.index
|
.index
|
||||||
.revealed_spks(..)
|
.revealed_spks(..)
|
||||||
.map(|(k, i, spk)| (k.to_owned(), i, spk.to_owned()))
|
.map(|(index, spk)| (index.to_owned(), spk.to_owned()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request = request.chain_spks(all_spks.into_iter().map(|(k, spk_i, spk)| {
|
request = request.chain_spks(all_spks.into_iter().map(|((k, spk_i), spk)| {
|
||||||
eprint!("Scanning {}: {}", k, spk_i);
|
eprint!("Scanning {}: {}", k, spk_i);
|
||||||
spk
|
spk
|
||||||
}));
|
}));
|
||||||
@ -239,10 +239,10 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let unused_spks = graph
|
let unused_spks = graph
|
||||||
.index
|
.index
|
||||||
.unused_spks()
|
.unused_spks()
|
||||||
.map(|(k, i, spk)| (k, i, spk.to_owned()))
|
.map(|(index, spk)| (index.to_owned(), spk.to_owned()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request =
|
request =
|
||||||
request.chain_spks(unused_spks.into_iter().map(move |(k, spk_i, spk)| {
|
request.chain_spks(unused_spks.into_iter().map(move |((k, spk_i), spk)| {
|
||||||
eprint!(
|
eprint!(
|
||||||
"Checking if address {} {}:{} has been used",
|
"Checking if address {} {}:{} has been used",
|
||||||
Address::from_script(&spk, args.network).unwrap(),
|
Address::from_script(&spk, args.network).unwrap(),
|
||||||
@ -258,7 +258,11 @@ fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(&*chain, chain_tip.block_id(), init_outpoints)
|
.filter_chain_unspents(
|
||||||
|
&*chain,
|
||||||
|
chain_tip.block_id(),
|
||||||
|
init_outpoints.iter().cloned(),
|
||||||
|
)
|
||||||
.map(|(_, utxo)| utxo)
|
.map(|(_, utxo)| utxo)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request = request.chain_outpoints(utxos.into_iter().map(|utxo| {
|
request = request.chain_outpoints(utxos.into_iter().map(|utxo| {
|
||||||
@ -338,7 +342,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let mut indexed_tx_graph_changeset =
|
let mut indexed_tx_graph_changeset =
|
||||||
indexed_tx_graph::ChangeSet::<ConfirmationHeightAnchor, _>::default();
|
indexed_tx_graph::ChangeSet::<ConfirmationHeightAnchor, _>::default();
|
||||||
if let Some(keychain_update) = keychain_update {
|
if let Some(keychain_update) = keychain_update {
|
||||||
let (_, keychain_changeset) = graph.index.reveal_to_target_multi(&keychain_update);
|
let keychain_changeset = graph.index.reveal_to_target_multi(&keychain_update);
|
||||||
indexed_tx_graph_changeset.append(keychain_changeset.into());
|
indexed_tx_graph_changeset.append(keychain_changeset.into());
|
||||||
}
|
}
|
||||||
indexed_tx_graph_changeset.append(graph.apply_update(graph_update));
|
indexed_tx_graph_changeset.append(graph.apply_update(graph_update));
|
||||||
|
@ -204,7 +204,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
// addresses derived so we need to derive up to last active addresses the scan found
|
// addresses derived so we need to derive up to last active addresses the scan found
|
||||||
// before adding the transactions.
|
// before adding the transactions.
|
||||||
(chain.apply_update(update.chain_update)?, {
|
(chain.apply_update(update.chain_update)?, {
|
||||||
let (_, index_changeset) = graph
|
let index_changeset = graph
|
||||||
.index
|
.index
|
||||||
.reveal_to_target_multi(&update.last_active_indices);
|
.reveal_to_target_multi(&update.last_active_indices);
|
||||||
let mut indexed_tx_graph_changeset = graph.apply_update(update.graph_update);
|
let mut indexed_tx_graph_changeset = graph.apply_update(update.graph_update);
|
||||||
@ -245,7 +245,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let all_spks = graph
|
let all_spks = graph
|
||||||
.index
|
.index
|
||||||
.revealed_spks(..)
|
.revealed_spks(..)
|
||||||
.map(|(k, i, spk)| (k.to_owned(), i, spk.to_owned()))
|
.map(|((k, i), spk)| (k.to_owned(), *i, spk.to_owned()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request = request.chain_spks(all_spks.into_iter().map(|(k, i, spk)| {
|
request = request.chain_spks(all_spks.into_iter().map(|(k, i, spk)| {
|
||||||
eprint!("scanning {}:{}", k, i);
|
eprint!("scanning {}:{}", k, i);
|
||||||
@ -258,10 +258,10 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let unused_spks = graph
|
let unused_spks = graph
|
||||||
.index
|
.index
|
||||||
.unused_spks()
|
.unused_spks()
|
||||||
.map(|(k, i, spk)| (k, i, spk.to_owned()))
|
.map(|(index, spk)| (index.to_owned(), spk.to_owned()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request =
|
request =
|
||||||
request.chain_spks(unused_spks.into_iter().map(move |(k, i, spk)| {
|
request.chain_spks(unused_spks.into_iter().map(move |((k, i), spk)| {
|
||||||
eprint!(
|
eprint!(
|
||||||
"Checking if address {} {}:{} has been used",
|
"Checking if address {} {}:{} has been used",
|
||||||
Address::from_script(&spk, args.network).unwrap(),
|
Address::from_script(&spk, args.network).unwrap(),
|
||||||
@ -280,7 +280,11 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let init_outpoints = graph.index.outpoints();
|
let init_outpoints = graph.index.outpoints();
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(&*chain, local_tip.block_id(), init_outpoints)
|
.filter_chain_unspents(
|
||||||
|
&*chain,
|
||||||
|
local_tip.block_id(),
|
||||||
|
init_outpoints.iter().cloned(),
|
||||||
|
)
|
||||||
.map(|(_, utxo)| utxo)
|
.map(|(_, utxo)| utxo)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request = request.chain_outpoints(
|
request = request.chain_outpoints(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user