Merge bitcoindevkit/bdk#1128: feat: add bdk_sqlite crate implementing PersistBackend

475c5024ecc5d288f35fabb8fab2b62dbf67653b feat(sqlite): add bdk_sqlite crate implementing PersistBackend backed by a SQLite database (Steve Myers)
b8aa76cd050bc3c2eac776ecbd9b4e2ab3e2590e feat(wallet): use the new `CombinedChangeSet` of `bdk_persist` (志宇)
0958ff56b20c1325a7da5d5f21e7ac836c601f1f feat(persist): introduce `CombinedChangeSet` (志宇)
54942a902d3203ee59731f35d2a1c402ac626878 ci: bump build_docs rust version to nightly-2024-05-12 (Steve Myers)
d975a48e7c519a703441e74058321021957b177f docs: update README MSRV pinning to match CI (Steve Myers)

Pull request description:

  ### Description

  Add "bdk_sqlite_store" crate implementing `PersistBackend` backed by a SQLite database.

  ### Notes to the reviewers

  In addition to adding a SQLite based `PersistenceBackend` this PR also:

  * add  `CombinedChangeSet` in `bdk_persist` and update `wallet` crate to use it.
  * updates the `wallet/tests` to also use this new sqlite store crate.
  * updates `example-crates/wallet_esplora_async` to use this new sqlite store crate.
  * fixes out of date README instructions for MSRV.
  * bumps the ci `build_docs` job to rust `nightly-2024-05-12`.

  ### Changelog notice

  Changed

  - Update Wallet to use CombinedChangeSet for persistence.

  Added

  - Add  CombinedChangeSet in bdk_persist crate.
  - Add bdk_sqlite crate implementing SQLite based PersistenceBackend storage for CombinedChangeSet.

  ### Checklists

  #### All Submissions:

  * [x] I've signed all my commits
  * [x] I followed the [contribution guidelines](https://github.com/bitcoindevkit/bdk/blob/master/CONTRIBUTING.md)
  * [x] I ran `cargo fmt` and `cargo clippy` before committing

  #### New Features:

  * [x] I've added tests for the new feature
  * [x] I've added docs for the new feature

ACKs for top commit:
  evanlinjin:
    ACK 475c5024ecc5d288f35fabb8fab2b62dbf67653b

Tree-SHA512: 72565127994fbfff34d7359e1d36d6f13e552995bb7ea61dd38ba5329b0d15e500fa106214a83fd870f1017dc659ff1bb8fc71bc6aad0a793730ec7f23179d8c
This commit is contained in:
志宇 2024-05-23 22:33:05 +08:00
commit 2eea0f4e90
No known key found for this signature in database
GPG Key ID: F6345C9837C2BDE8
24 changed files with 1626 additions and 584 deletions

View File

@ -10,15 +10,13 @@ jobs:
- name: Checkout sources
uses: actions/checkout@v2
- name: Set default toolchain
run: rustup default nightly-2022-12-14
run: rustup default nightly-2024-05-12
- name: Set profile
run: rustup set profile minimal
- name: Update toolchain
run: rustup update
- name: Rust Cache
uses: Swatinem/rust-cache@v2.2.1
- name: Pin dependencies for MSRV
run: cargo update -p home --precise "0.5.5"
- name: Build docs
run: cargo doc --no-deps
env:

1
.gitignore vendored
View File

@ -7,3 +7,4 @@ Cargo.lock
# Example persisted files.
*.db
*.sqlite*

View File

@ -4,6 +4,7 @@ members = [
"crates/wallet",
"crates/chain",
"crates/file_store",
"crates/sqlite",
"crates/electrum",
"crates/esplora",
"crates/bitcoind_rpc",

View File

@ -68,15 +68,9 @@ This library should compile with any combination of features with Rust 1.63.0.
To build with the MSRV you will need to pin dependencies as follows:
```shell
# zip 0.6.3 has MSRV 1.64.0
cargo update -p zip --precise "0.6.2"
# time 0.3.21 has MSRV 1.65.0
cargo update -p zstd-sys --precise "2.0.8+zstd.1.5.5"
cargo update -p time --precise "0.3.20"
# jobserver 0.1.27 has MSRV 1.66.0
cargo update -p jobserver --precise "0.1.26"
# home 0.5.9 has MSRV 1.70.0
cargo update -p home --precise "0.5.5"
# proptest 1.4.0 has MSRV 1.65.0
cargo update -p proptest --precise "1.2.0"
```

View File

@ -19,7 +19,7 @@ bitcoincore-rpc = { version = "0.18" }
bdk_chain = { path = "../chain", version = "0.14", default-features = false }
[dev-dependencies]
bdk_testenv = { path = "../testenv", default_features = false }
bdk_testenv = { path = "../testenv", default-features = false }
[features]
default = ["std"]

View File

@ -58,9 +58,6 @@ extern crate alloc;
#[cfg(feature = "serde")]
pub extern crate serde_crate as serde;
#[cfg(feature = "bincode")]
extern crate bincode;
#[cfg(feature = "std")]
#[macro_use]
extern crate std;

View File

@ -22,7 +22,7 @@ bitcoin = { version = "0.31.0", optional = true, default-features = false }
miniscript = { version = "11.0.0", optional = true, default-features = false }
[dev-dependencies]
bdk_testenv = { path = "../testenv", default_features = false }
bdk_testenv = { path = "../testenv", default-features = false }
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] }
[features]

View File

@ -1,10 +1,7 @@
# BDK File Store
This is a simple append-only flat file implementation of
[`PersistBackend`](bdk_persist::PersistBackend).
This is a simple append-only flat file implementation of [`PersistBackend`](bdk_persist::PersistBackend).
The main structure is [`Store`](crate::Store), which can be used with [`bdk`]'s
`Wallet` to persist wallet data into a flat file.
The main structure is [`Store`] which works with any [`bdk_chain`] based changesets to persist data into a flat file.
[`bdk`]: https://docs.rs/bdk/latest
[`bdk_persist`]: https://docs.rs/bdk_persist/latest
[`bdk_chain`]:https://docs.rs/bdk_chain/latest/bdk_chain/

View File

@ -17,6 +17,6 @@ anyhow = { version = "1", default-features = false }
bdk_chain = { path = "../chain", version = "0.14.0", default-features = false }
[features]
default = ["bdk_chain/std"]
default = ["bdk_chain/std", "miniscript"]
serde = ["bdk_chain/serde"]
miniscript = ["bdk_chain/miniscript"]

View File

@ -1,3 +1,5 @@
# BDK Persist
This crate is home to the [`PersistBackend`](crate::PersistBackend) trait which defines the behavior of a database to perform the task of persisting changes made to BDK data structures. The [`Persist`](crate::Persist) type provides a convenient wrapper around a `PersistBackend` that allows staging changes before committing them.
This crate is home to the [`PersistBackend`] trait which defines the behavior of a database to perform the task of persisting changes made to BDK data structures.
The [`Persist`] type provides a convenient wrapper around a [`PersistBackend`] that allows staging changes before committing them.

View File

@ -0,0 +1,73 @@
#![cfg(feature = "miniscript")]
use bdk_chain::{bitcoin::Network, indexed_tx_graph, keychain, local_chain, Anchor, Append};
/// Changes from a combination of [`bdk_chain`] structures.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(
feature = "serde",
derive(bdk_chain::serde::Deserialize, bdk_chain::serde::Serialize),
serde(
crate = "bdk_chain::serde",
bound(
deserialize = "A: Ord + bdk_chain::serde::Deserialize<'de>, K: Ord + bdk_chain::serde::Deserialize<'de>",
serialize = "A: Ord + bdk_chain::serde::Serialize, K: Ord + bdk_chain::serde::Serialize",
),
)
)]
pub struct CombinedChangeSet<K, A> {
/// Changes to the [`LocalChain`](local_chain::LocalChain).
pub chain: local_chain::ChangeSet,
/// Changes to [`IndexedTxGraph`](indexed_tx_graph::IndexedTxGraph).
pub indexed_tx_graph: indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>,
/// Stores the network type of the transaction data.
pub network: Option<Network>,
}
impl<K, A> Default for CombinedChangeSet<K, A> {
fn default() -> Self {
Self {
chain: Default::default(),
indexed_tx_graph: Default::default(),
network: None,
}
}
}
impl<K: Ord, A: Anchor> Append for CombinedChangeSet<K, A> {
fn append(&mut self, other: Self) {
Append::append(&mut self.chain, other.chain);
Append::append(&mut self.indexed_tx_graph, other.indexed_tx_graph);
if other.network.is_some() {
debug_assert!(
self.network.is_none() || self.network == other.network,
"network type must either be just introduced or remain the same"
);
self.network = other.network;
}
}
fn is_empty(&self) -> bool {
self.chain.is_empty() && self.indexed_tx_graph.is_empty() && self.network.is_none()
}
}
impl<K, A> From<local_chain::ChangeSet> for CombinedChangeSet<K, A> {
fn from(chain: local_chain::ChangeSet) -> Self {
Self {
chain,
..Default::default()
}
}
}
impl<K, A> From<indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>>
for CombinedChangeSet<K, A>
{
fn from(indexed_tx_graph: indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>) -> Self {
Self {
indexed_tx_graph,
..Default::default()
}
}
}

View File

@ -1,5 +1,8 @@
#![doc = include_str!("../README.md")]
#![no_std]
#![warn(missing_docs)]
mod changeset;
mod persist;
pub use changeset::*;
pub use persist::*;

19
crates/sqlite/Cargo.toml Normal file
View File

@ -0,0 +1,19 @@
[package]
name = "bdk_sqlite"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
repository = "https://github.com/bitcoindevkit/bdk"
documentation = "https://docs.rs/bdk_sqlite"
description = "A simple SQLite based implementation of Persist for Bitcoin Dev Kit."
keywords = ["bitcoin", "persist", "persistence", "bdk", "sqlite"]
authors = ["Bitcoin Dev Kit Developers"]
readme = "README.md"
[dependencies]
anyhow = { version = "1", default-features = false }
bdk_chain = { path = "../chain", version = "0.14.0", features = ["serde", "miniscript"] }
bdk_persist = { path = "../persist", version = "0.2.0", features = ["serde"] }
rusqlite = { version = "0.31.0", features = ["bundled"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"

8
crates/sqlite/README.md Normal file
View File

@ -0,0 +1,8 @@
# BDK SQLite
This is a simple [SQLite] relational database schema backed implementation of [`PersistBackend`](bdk_persist::PersistBackend).
The main structure is `Store` which persists [`bdk_persist`] `CombinedChangeSet` data into a SQLite database file.
[`bdk_persist`]:https://docs.rs/bdk_persist/latest/bdk_persist/
[SQLite]: https://www.sqlite.org/index.html

View File

@ -0,0 +1,69 @@
-- schema version control
CREATE TABLE version
(
version INTEGER
) STRICT;
INSERT INTO version
VALUES (1);
-- network is the valid network for all other table data
CREATE TABLE network
(
name TEXT UNIQUE NOT NULL
) STRICT;
-- keychain is the json serialized keychain structure as JSONB,
-- descriptor is the complete descriptor string,
-- descriptor_id is a sha256::Hash id of the descriptor string w/o the checksum,
-- last revealed index is a u32
CREATE TABLE keychain
(
keychain BLOB PRIMARY KEY NOT NULL,
descriptor TEXT NOT NULL,
descriptor_id BLOB NOT NULL,
last_revealed INTEGER
) STRICT;
-- hash is block hash hex string,
-- block height is a u32,
CREATE TABLE block
(
hash TEXT PRIMARY KEY NOT NULL,
height INTEGER NOT NULL
) STRICT;
-- txid is transaction hash hex string (reversed)
-- whole_tx is a consensus encoded transaction,
-- last seen is a u64 unix epoch seconds
CREATE TABLE tx
(
txid TEXT PRIMARY KEY NOT NULL,
whole_tx BLOB,
last_seen INTEGER
) STRICT;
-- Outpoint txid hash hex string (reversed)
-- Outpoint vout
-- TxOut value as SATs
-- TxOut script consensus encoded
CREATE TABLE txout
(
txid TEXT NOT NULL,
vout INTEGER NOT NULL,
value INTEGER NOT NULL,
script BLOB NOT NULL,
PRIMARY KEY (txid, vout)
) STRICT;
-- join table between anchor and tx
-- block hash hex string
-- anchor is a json serialized Anchor structure as JSONB,
-- txid is transaction hash hex string (reversed)
CREATE TABLE anchor_tx
(
block_hash TEXT NOT NULL,
anchor BLOB NOT NULL,
txid TEXT NOT NULL REFERENCES tx (txid),
UNIQUE (anchor, txid),
FOREIGN KEY (block_hash) REFERENCES block(hash)
) STRICT;

34
crates/sqlite/src/lib.rs Normal file
View File

@ -0,0 +1,34 @@
#![doc = include_str!("../README.md")]
// only enables the `doc_cfg` feature when the `docsrs` configuration attribute is defined
#![cfg_attr(docsrs, feature(doc_cfg))]
mod schema;
mod store;
use bdk_chain::bitcoin::Network;
pub use rusqlite;
pub use store::Store;
/// Error that occurs while reading or writing change sets with the SQLite database.
#[derive(Debug)]
pub enum Error {
/// Invalid network, cannot change the one already stored in the database.
Network { expected: Network, given: Network },
/// SQLite error.
Sqlite(rusqlite::Error),
}
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::Network { expected, given } => write!(
f,
"network error trying to read or write change set, expected {}, given {}",
expected, given
),
Self::Sqlite(e) => write!(f, "sqlite error reading or writing changeset: {}", e),
}
}
}
impl std::error::Error for Error {}

View File

@ -0,0 +1,96 @@
use crate::Store;
use rusqlite::{named_params, Connection, Error};
const SCHEMA_0: &str = include_str!("../schema/schema_0.sql");
const MIGRATIONS: &[&str] = &[SCHEMA_0];
/// Schema migration related functions.
impl<K, A> Store<K, A> {
/// Migrate sqlite db schema to latest version.
pub(crate) fn migrate(conn: &mut Connection) -> Result<(), Error> {
let stmts = &MIGRATIONS
.iter()
.flat_map(|stmt| {
// remove comment lines
let s = stmt
.split('\n')
.filter(|l| !l.starts_with("--") && !l.is_empty())
.collect::<Vec<_>>()
.join(" ");
// split into statements
s.split(';')
// remove extra spaces
.map(|s| {
s.trim()
.split(' ')
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ")
})
.collect::<Vec<_>>()
})
// remove empty statements
.filter(|s| !s.is_empty())
.collect::<Vec<String>>();
let version = Self::get_schema_version(conn)?;
let stmts = &stmts[(version as usize)..];
// begin transaction, all migration statements and new schema version commit or rollback
let tx = conn.transaction()?;
// execute every statement and return `Some` new schema version
// if execution fails, return `Error::Rusqlite`
// if no statements executed returns `None`
let new_version = stmts
.iter()
.enumerate()
.map(|version_stmt| {
tx.execute(version_stmt.1.as_str(), [])
// map result value to next migration version
.map(|_| version_stmt.0 as i32 + version + 1)
})
.last()
.transpose()?;
// if `Some` new statement version, set new schema version
if let Some(version) = new_version {
Self::set_schema_version(&tx, version)?;
}
// commit transaction
tx.commit()?;
Ok(())
}
fn get_schema_version(conn: &Connection) -> rusqlite::Result<i32> {
let statement = conn.prepare_cached("SELECT version FROM version");
match statement {
Err(Error::SqliteFailure(e, Some(msg))) => {
if msg == "no such table: version" {
Ok(0)
} else {
Err(Error::SqliteFailure(e, Some(msg)))
}
}
Ok(mut stmt) => {
let mut rows = stmt.query([])?;
match rows.next()? {
Some(row) => {
let version: i32 = row.get(0)?;
Ok(version)
}
None => Ok(0),
}
}
_ => Ok(0),
}
}
fn set_schema_version(conn: &Connection, version: i32) -> rusqlite::Result<usize> {
conn.execute(
"UPDATE version SET version=:version",
named_params! {":version": version},
)
}
}

779
crates/sqlite/src/store.rs Normal file
View File

@ -0,0 +1,779 @@
use bdk_chain::bitcoin::consensus::{deserialize, serialize};
use bdk_chain::bitcoin::hashes::Hash;
use bdk_chain::bitcoin::{Amount, Network, OutPoint, ScriptBuf, Transaction, TxOut};
use bdk_chain::bitcoin::{BlockHash, Txid};
use bdk_chain::miniscript::descriptor::{Descriptor, DescriptorPublicKey};
use rusqlite::{named_params, Connection};
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, BTreeSet};
use std::fmt::Debug;
use std::marker::PhantomData;
use std::str::FromStr;
use std::sync::{Arc, Mutex};
use crate::Error;
use bdk_chain::{
indexed_tx_graph, keychain, local_chain, tx_graph, Anchor, Append, DescriptorExt, DescriptorId,
};
use bdk_persist::CombinedChangeSet;
/// Persists data in to a relational schema based [SQLite] database file.
///
/// The changesets loaded or stored represent changes to keychain and blockchain data.
///
/// [SQLite]: https://www.sqlite.org/index.html
pub struct Store<K, A> {
// A rusqlite connection to the SQLite database. Uses a Mutex for thread safety.
conn: Mutex<Connection>,
keychain_marker: PhantomData<K>,
anchor_marker: PhantomData<A>,
}
impl<K, A> Debug for Store<K, A> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&self.conn, f)
}
}
impl<K, A> Store<K, A>
where
K: Ord + for<'de> Deserialize<'de> + Serialize + Send,
A: Anchor + for<'de> Deserialize<'de> + Serialize + Send,
{
/// Creates a new store from a [`Connection`].
pub fn new(mut conn: Connection) -> Result<Self, rusqlite::Error> {
Self::migrate(&mut conn)?;
Ok(Self {
conn: Mutex::new(conn),
keychain_marker: Default::default(),
anchor_marker: Default::default(),
})
}
pub(crate) fn db_transaction(&mut self) -> Result<rusqlite::Transaction, Error> {
let connection = self.conn.get_mut().expect("unlocked connection mutex");
connection.transaction().map_err(Error::Sqlite)
}
}
impl<K, A, C> bdk_persist::PersistBackend<C> for Store<K, A>
where
K: Ord + for<'de> Deserialize<'de> + Serialize + Send,
A: Anchor + for<'de> Deserialize<'de> + Serialize + Send,
C: Clone + From<CombinedChangeSet<K, A>> + Into<CombinedChangeSet<K, A>>,
{
fn write_changes(&mut self, changeset: &C) -> anyhow::Result<()> {
self.write(&changeset.clone().into())
.map_err(|e| anyhow::anyhow!(e).context("unable to write changes to sqlite database"))
}
fn load_from_persistence(&mut self) -> anyhow::Result<Option<C>> {
self.read()
.map(|c| c.map(Into::into))
.map_err(|e| anyhow::anyhow!(e).context("unable to read changes from sqlite database"))
}
}
/// Network table related functions.
impl<K, A> Store<K, A> {
/// Insert [`Network`] for which all other tables data is valid.
///
/// Error if trying to insert different network value.
fn insert_network(
current_network: &Option<Network>,
db_transaction: &rusqlite::Transaction,
network_changeset: &Option<Network>,
) -> Result<(), Error> {
if let Some(network) = network_changeset {
match current_network {
// if no network change do nothing
Some(current_network) if current_network == network => Ok(()),
// if new network not the same as current, error
Some(current_network) => Err(Error::Network {
expected: *current_network,
given: *network,
}),
// insert network if none exists
None => {
let insert_network_stmt = &mut db_transaction
.prepare_cached("INSERT INTO network (name) VALUES (:name)")
.expect("insert network statement");
let name = network.to_string();
insert_network_stmt
.execute(named_params! {":name": name })
.map_err(Error::Sqlite)?;
Ok(())
}
}
} else {
Ok(())
}
}
/// Select the valid [`Network`] for this database, or `None` if not set.
fn select_network(db_transaction: &rusqlite::Transaction) -> Result<Option<Network>, Error> {
let mut select_network_stmt = db_transaction
.prepare_cached("SELECT name FROM network WHERE rowid = 1")
.expect("select network statement");
let network = select_network_stmt
.query_row([], |row| {
let network = row.get_unwrap::<usize, String>(0);
let network = Network::from_str(network.as_str()).expect("valid network");
Ok(network)
})
.map_err(Error::Sqlite);
match network {
Ok(network) => Ok(Some(network)),
Err(Error::Sqlite(rusqlite::Error::QueryReturnedNoRows)) => Ok(None),
Err(e) => Err(e),
}
}
}
/// Block table related functions.
impl<K, A> Store<K, A> {
/// Insert or delete local chain blocks.
///
/// Error if trying to insert existing block hash.
fn insert_or_delete_blocks(
db_transaction: &rusqlite::Transaction,
chain_changeset: &local_chain::ChangeSet,
) -> Result<(), Error> {
for (height, hash) in chain_changeset.iter() {
match hash {
// add new hash at height
Some(hash) => {
let insert_block_stmt = &mut db_transaction
.prepare_cached("INSERT INTO block (hash, height) VALUES (:hash, :height)")
.expect("insert block statement");
let hash = hash.to_string();
insert_block_stmt
.execute(named_params! {":hash": hash, ":height": height })
.map_err(Error::Sqlite)?;
}
// delete block at height
None => {
let delete_block_stmt = &mut db_transaction
.prepare_cached("DELETE FROM block WHERE height IS :height")
.expect("delete block statement");
delete_block_stmt
.execute(named_params! {":height": height })
.map_err(Error::Sqlite)?;
}
}
}
Ok(())
}
/// Select all blocks.
fn select_blocks(
db_transaction: &rusqlite::Transaction,
) -> Result<BTreeMap<u32, Option<BlockHash>>, Error> {
let mut select_blocks_stmt = db_transaction
.prepare_cached("SELECT height, hash FROM block")
.expect("select blocks statement");
let blocks = select_blocks_stmt
.query_map([], |row| {
let height = row.get_unwrap::<usize, u32>(0);
let hash = row.get_unwrap::<usize, String>(1);
let hash = Some(BlockHash::from_str(hash.as_str()).expect("block hash"));
Ok((height, hash))
})
.map_err(Error::Sqlite)?;
blocks
.into_iter()
.map(|row| row.map_err(Error::Sqlite))
.collect()
}
}
/// Keychain table related functions.
///
/// The keychain objects are stored as [`JSONB`] data.
/// [`JSONB`]: https://sqlite.org/json1.html#jsonb
impl<K, A> Store<K, A>
where
K: Ord + for<'de> Deserialize<'de> + Serialize + Send,
A: Anchor + Send,
{
/// Insert keychain with descriptor and last active index.
///
/// If keychain exists only update last active index.
fn insert_keychains(
db_transaction: &rusqlite::Transaction,
tx_graph_changeset: &indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>,
) -> Result<(), Error> {
let keychain_changeset = &tx_graph_changeset.indexer;
for (keychain, descriptor) in keychain_changeset.keychains_added.iter() {
let insert_keychain_stmt = &mut db_transaction
.prepare_cached("INSERT INTO keychain (keychain, descriptor, descriptor_id) VALUES (jsonb(:keychain), :descriptor, :descriptor_id)")
.expect("insert keychain statement");
let keychain_json = serde_json::to_string(keychain).expect("keychain json");
let descriptor_id = descriptor.descriptor_id().to_byte_array();
let descriptor = descriptor.to_string();
insert_keychain_stmt.execute(named_params! {":keychain": keychain_json, ":descriptor": descriptor, ":descriptor_id": descriptor_id })
.map_err(Error::Sqlite)?;
}
Ok(())
}
/// Update descriptor last revealed index.
fn update_last_revealed(
db_transaction: &rusqlite::Transaction,
tx_graph_changeset: &indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>,
) -> Result<(), Error> {
let keychain_changeset = &tx_graph_changeset.indexer;
for (descriptor_id, last_revealed) in keychain_changeset.last_revealed.iter() {
let update_last_revealed_stmt = &mut db_transaction
.prepare_cached(
"UPDATE keychain SET last_revealed = :last_revealed
WHERE descriptor_id = :descriptor_id",
)
.expect("update last revealed statement");
let descriptor_id = descriptor_id.to_byte_array();
update_last_revealed_stmt.execute(named_params! {":descriptor_id": descriptor_id, ":last_revealed": * last_revealed })
.map_err(Error::Sqlite)?;
}
Ok(())
}
/// Select keychains added.
fn select_keychains(
db_transaction: &rusqlite::Transaction,
) -> Result<BTreeMap<K, Descriptor<DescriptorPublicKey>>, Error> {
let mut select_keychains_added_stmt = db_transaction
.prepare_cached("SELECT json(keychain), descriptor FROM keychain")
.expect("select keychains statement");
let keychains = select_keychains_added_stmt
.query_map([], |row| {
let keychain = row.get_unwrap::<usize, String>(0);
let keychain = serde_json::from_str::<K>(keychain.as_str()).expect("keychain");
let descriptor = row.get_unwrap::<usize, String>(1);
let descriptor = Descriptor::from_str(descriptor.as_str()).expect("descriptor");
Ok((keychain, descriptor))
})
.map_err(Error::Sqlite)?;
keychains
.into_iter()
.map(|row| row.map_err(Error::Sqlite))
.collect()
}
/// Select descriptor last revealed indexes.
fn select_last_revealed(
db_transaction: &rusqlite::Transaction,
) -> Result<BTreeMap<DescriptorId, u32>, Error> {
let mut select_last_revealed_stmt = db_transaction
.prepare_cached(
"SELECT descriptor, last_revealed FROM keychain WHERE last_revealed IS NOT NULL",
)
.expect("select last revealed statement");
let last_revealed = select_last_revealed_stmt
.query_map([], |row| {
let descriptor = row.get_unwrap::<usize, String>(0);
let descriptor = Descriptor::from_str(descriptor.as_str()).expect("descriptor");
let descriptor_id = descriptor.descriptor_id();
let last_revealed = row.get_unwrap::<usize, u32>(1);
Ok((descriptor_id, last_revealed))
})
.map_err(Error::Sqlite)?;
last_revealed
.into_iter()
.map(|row| row.map_err(Error::Sqlite))
.collect()
}
}
/// Tx (transaction) and txout (transaction output) table related functions.
impl<K, A> Store<K, A> {
/// Insert transactions.
///
/// Error if trying to insert existing txid.
fn insert_txs(
db_transaction: &rusqlite::Transaction,
tx_graph_changeset: &indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>,
) -> Result<(), Error> {
for tx in tx_graph_changeset.graph.txs.iter() {
let insert_tx_stmt = &mut db_transaction
.prepare_cached("INSERT INTO tx (txid, whole_tx) VALUES (:txid, :whole_tx) ON CONFLICT (txid) DO UPDATE SET whole_tx = :whole_tx WHERE txid = :txid")
.expect("insert or update tx whole_tx statement");
let txid = tx.txid().to_string();
let whole_tx = serialize(&tx);
insert_tx_stmt
.execute(named_params! {":txid": txid, ":whole_tx": whole_tx })
.map_err(Error::Sqlite)?;
}
Ok(())
}
/// Select all transactions.
fn select_txs(
db_transaction: &rusqlite::Transaction,
) -> Result<BTreeSet<Arc<Transaction>>, Error> {
let mut select_tx_stmt = db_transaction
.prepare_cached("SELECT whole_tx FROM tx WHERE whole_tx IS NOT NULL")
.expect("select tx statement");
let txs = select_tx_stmt
.query_map([], |row| {
let whole_tx = row.get_unwrap::<usize, Vec<u8>>(0);
let whole_tx: Transaction = deserialize(&whole_tx).expect("transaction");
Ok(Arc::new(whole_tx))
})
.map_err(Error::Sqlite)?;
txs.into_iter()
.map(|row| row.map_err(Error::Sqlite))
.collect()
}
/// Select all transactions with last_seen values.
fn select_last_seen(
db_transaction: &rusqlite::Transaction,
) -> Result<BTreeMap<Txid, u64>, Error> {
// load tx last_seen
let mut select_last_seen_stmt = db_transaction
.prepare_cached("SELECT txid, last_seen FROM tx WHERE last_seen IS NOT NULL")
.expect("select tx last seen statement");
let last_seen = select_last_seen_stmt
.query_map([], |row| {
let txid = row.get_unwrap::<usize, String>(0);
let txid = Txid::from_str(&txid).expect("txid");
let last_seen = row.get_unwrap::<usize, u64>(1);
Ok((txid, last_seen))
})
.map_err(Error::Sqlite)?;
last_seen
.into_iter()
.map(|row| row.map_err(Error::Sqlite))
.collect()
}
/// Insert txouts.
///
/// Error if trying to insert existing outpoint.
fn insert_txouts(
db_transaction: &rusqlite::Transaction,
tx_graph_changeset: &indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>,
) -> Result<(), Error> {
for txout in tx_graph_changeset.graph.txouts.iter() {
let insert_txout_stmt = &mut db_transaction
.prepare_cached("INSERT INTO txout (txid, vout, value, script) VALUES (:txid, :vout, :value, :script)")
.expect("insert txout statement");
let txid = txout.0.txid.to_string();
let vout = txout.0.vout;
let value = txout.1.value.to_sat();
let script = txout.1.script_pubkey.as_bytes();
insert_txout_stmt.execute(named_params! {":txid": txid, ":vout": vout, ":value": value, ":script": script })
.map_err(Error::Sqlite)?;
}
Ok(())
}
/// Select all transaction outputs.
fn select_txouts(
db_transaction: &rusqlite::Transaction,
) -> Result<BTreeMap<OutPoint, TxOut>, Error> {
// load tx outs
let mut select_txout_stmt = db_transaction
.prepare_cached("SELECT txid, vout, value, script FROM txout")
.expect("select txout statement");
let txouts = select_txout_stmt
.query_map([], |row| {
let txid = row.get_unwrap::<usize, String>(0);
let txid = Txid::from_str(&txid).expect("txid");
let vout = row.get_unwrap::<usize, u32>(1);
let outpoint = OutPoint::new(txid, vout);
let value = row.get_unwrap::<usize, u64>(2);
let script_pubkey = row.get_unwrap::<usize, Vec<u8>>(3);
let script_pubkey = ScriptBuf::from_bytes(script_pubkey);
let txout = TxOut {
value: Amount::from_sat(value),
script_pubkey,
};
Ok((outpoint, txout))
})
.map_err(Error::Sqlite)?;
txouts
.into_iter()
.map(|row| row.map_err(Error::Sqlite))
.collect()
}
/// Update transaction last seen times.
fn update_last_seen(
db_transaction: &rusqlite::Transaction,
tx_graph_changeset: &indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>,
) -> Result<(), Error> {
for tx_last_seen in tx_graph_changeset.graph.last_seen.iter() {
let insert_or_update_tx_stmt = &mut db_transaction
.prepare_cached("INSERT INTO tx (txid, last_seen) VALUES (:txid, :last_seen) ON CONFLICT (txid) DO UPDATE SET last_seen = :last_seen WHERE txid = :txid")
.expect("insert or update tx last_seen statement");
let txid = tx_last_seen.0.to_string();
let last_seen = *tx_last_seen.1;
insert_or_update_tx_stmt
.execute(named_params! {":txid": txid, ":last_seen": last_seen })
.map_err(Error::Sqlite)?;
}
Ok(())
}
}
/// Anchor table related functions.
impl<K, A> Store<K, A>
where
K: Ord + for<'de> Deserialize<'de> + Serialize + Send,
A: Anchor + for<'de> Deserialize<'de> + Serialize + Send,
{
/// Insert anchors.
fn insert_anchors(
db_transaction: &rusqlite::Transaction,
tx_graph_changeset: &indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>>,
) -> Result<(), Error> {
// serde_json::to_string
for anchor in tx_graph_changeset.graph.anchors.iter() {
let insert_anchor_stmt = &mut db_transaction
.prepare_cached("INSERT INTO anchor_tx (block_hash, anchor, txid) VALUES (:block_hash, jsonb(:anchor), :txid)")
.expect("insert anchor statement");
let block_hash = anchor.0.anchor_block().hash.to_string();
let anchor_json = serde_json::to_string(&anchor.0).expect("anchor json");
let txid = anchor.1.to_string();
insert_anchor_stmt.execute(named_params! {":block_hash": block_hash, ":anchor": anchor_json, ":txid": txid })
.map_err(Error::Sqlite)?;
}
Ok(())
}
/// Select all anchors.
fn select_anchors(
db_transaction: &rusqlite::Transaction,
) -> Result<BTreeSet<(A, Txid)>, Error> {
// serde_json::from_str
let mut select_anchor_stmt = db_transaction
.prepare_cached("SELECT block_hash, json(anchor), txid FROM anchor_tx")
.expect("select anchor statement");
let anchors = select_anchor_stmt
.query_map([], |row| {
let hash = row.get_unwrap::<usize, String>(0);
let hash = BlockHash::from_str(hash.as_str()).expect("block hash");
let anchor = row.get_unwrap::<usize, String>(1);
let anchor: A = serde_json::from_str(anchor.as_str()).expect("anchor");
// double check anchor blob block hash matches
assert_eq!(hash, anchor.anchor_block().hash);
let txid = row.get_unwrap::<usize, String>(2);
let txid = Txid::from_str(&txid).expect("txid");
Ok((anchor, txid))
})
.map_err(Error::Sqlite)?;
anchors
.into_iter()
.map(|row| row.map_err(Error::Sqlite))
.collect()
}
}
/// Functions to read and write all [`ChangeSet`] data.
impl<K, A> Store<K, A>
where
K: Ord + for<'de> Deserialize<'de> + Serialize + Send,
A: Anchor + for<'de> Deserialize<'de> + Serialize + Send,
{
fn write(&mut self, changeset: &CombinedChangeSet<K, A>) -> Result<(), Error> {
// no need to write anything if changeset is empty
if changeset.is_empty() {
return Ok(());
}
let db_transaction = self.db_transaction()?;
let network_changeset = &changeset.network;
let current_network = Self::select_network(&db_transaction)?;
Self::insert_network(&current_network, &db_transaction, network_changeset)?;
let chain_changeset = &changeset.chain;
Self::insert_or_delete_blocks(&db_transaction, chain_changeset)?;
let tx_graph_changeset = &changeset.indexed_tx_graph;
Self::insert_keychains(&db_transaction, tx_graph_changeset)?;
Self::update_last_revealed(&db_transaction, tx_graph_changeset)?;
Self::insert_txs(&db_transaction, tx_graph_changeset)?;
Self::insert_txouts(&db_transaction, tx_graph_changeset)?;
Self::insert_anchors(&db_transaction, tx_graph_changeset)?;
Self::update_last_seen(&db_transaction, tx_graph_changeset)?;
db_transaction.commit().map_err(Error::Sqlite)
}
fn read(&mut self) -> Result<Option<CombinedChangeSet<K, A>>, Error> {
let db_transaction = self.db_transaction()?;
let network = Self::select_network(&db_transaction)?;
let chain = Self::select_blocks(&db_transaction)?;
let keychains_added = Self::select_keychains(&db_transaction)?;
let last_revealed = Self::select_last_revealed(&db_transaction)?;
let txs = Self::select_txs(&db_transaction)?;
let last_seen = Self::select_last_seen(&db_transaction)?;
let txouts = Self::select_txouts(&db_transaction)?;
let anchors = Self::select_anchors(&db_transaction)?;
let graph: tx_graph::ChangeSet<A> = tx_graph::ChangeSet {
txs,
txouts,
anchors,
last_seen,
};
let indexer: keychain::ChangeSet<K> = keychain::ChangeSet {
keychains_added,
last_revealed,
};
let indexed_tx_graph: indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<K>> =
indexed_tx_graph::ChangeSet { graph, indexer };
if network.is_none() && chain.is_empty() && indexed_tx_graph.is_empty() {
Ok(None)
} else {
Ok(Some(CombinedChangeSet {
chain,
indexed_tx_graph,
network,
}))
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::store::Append;
use bdk_chain::bitcoin::consensus::encode::deserialize;
use bdk_chain::bitcoin::constants::genesis_block;
use bdk_chain::bitcoin::hashes::hex::FromHex;
use bdk_chain::bitcoin::transaction::Transaction;
use bdk_chain::bitcoin::Network::Testnet;
use bdk_chain::bitcoin::{secp256k1, BlockHash, OutPoint};
use bdk_chain::miniscript::Descriptor;
use bdk_chain::{
indexed_tx_graph, keychain, tx_graph, BlockId, ConfirmationHeightAnchor,
ConfirmationTimeHeightAnchor, DescriptorExt,
};
use bdk_persist::PersistBackend;
use std::str::FromStr;
use std::sync::Arc;
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash, Debug, Serialize, Deserialize)]
enum Keychain {
External { account: u32, name: String },
Internal { account: u32, name: String },
}
#[test]
fn insert_and_load_aggregate_changesets_with_confirmation_time_height_anchor(
) -> anyhow::Result<()> {
let (test_changesets, agg_test_changesets) =
create_test_changesets(&|height, time, hash| ConfirmationTimeHeightAnchor {
confirmation_height: height,
confirmation_time: time,
anchor_block: (height, hash).into(),
});
let conn = Connection::open_in_memory().expect("in memory connection");
let mut store = Store::<Keychain, ConfirmationTimeHeightAnchor>::new(conn)
.expect("create new memory db store");
test_changesets.iter().for_each(|changeset| {
store.write_changes(changeset).expect("write changeset");
});
let agg_changeset = store.load_from_persistence().expect("aggregated changeset");
assert_eq!(agg_changeset, Some(agg_test_changesets));
Ok(())
}
#[test]
fn insert_and_load_aggregate_changesets_with_confirmation_height_anchor() -> anyhow::Result<()>
{
let (test_changesets, agg_test_changesets) =
create_test_changesets(&|height, _time, hash| ConfirmationHeightAnchor {
confirmation_height: height,
anchor_block: (height, hash).into(),
});
let conn = Connection::open_in_memory().expect("in memory connection");
let mut store = Store::<Keychain, ConfirmationHeightAnchor>::new(conn)
.expect("create new memory db store");
test_changesets.iter().for_each(|changeset| {
store.write_changes(changeset).expect("write changeset");
});
let agg_changeset = store.load_from_persistence().expect("aggregated changeset");
assert_eq!(agg_changeset, Some(agg_test_changesets));
Ok(())
}
#[test]
fn insert_and_load_aggregate_changesets_with_blockid_anchor() -> anyhow::Result<()> {
let (test_changesets, agg_test_changesets) =
create_test_changesets(&|height, _time, hash| BlockId { height, hash });
let conn = Connection::open_in_memory().expect("in memory connection");
let mut store = Store::<Keychain, BlockId>::new(conn).expect("create new memory db store");
test_changesets.iter().for_each(|changeset| {
store.write_changes(changeset).expect("write changeset");
});
let agg_changeset = store.load_from_persistence().expect("aggregated changeset");
assert_eq!(agg_changeset, Some(agg_test_changesets));
Ok(())
}
fn create_test_changesets<A: Anchor + Copy>(
anchor_fn: &dyn Fn(u32, u64, BlockHash) -> A,
) -> (
Vec<CombinedChangeSet<Keychain, A>>,
CombinedChangeSet<Keychain, A>,
) {
let secp = &secp256k1::Secp256k1::signing_only();
let network_changeset = Some(Testnet);
let block_hash_0: BlockHash = genesis_block(Testnet).block_hash();
let block_hash_1 =
BlockHash::from_str("00000000b873e79784647a6c82962c70d228557d24a747ea4d1b8bbe878e1206")
.unwrap();
let block_hash_2 =
BlockHash::from_str("000000006c02c8ea6e4ff69651f7fcde348fb9d557a06e6957b65552002a7820")
.unwrap();
let block_changeset = [
(0, Some(block_hash_0)),
(1, Some(block_hash_1)),
(2, Some(block_hash_2)),
]
.into();
let ext_keychain = Keychain::External {
account: 0,
name: "ext test".to_string(),
};
let (ext_desc, _ext_keymap) = Descriptor::parse_descriptor(secp, "wpkh(tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy/0/*)").unwrap();
let ext_desc_id = ext_desc.descriptor_id();
let int_keychain = Keychain::Internal {
account: 0,
name: "int test".to_string(),
};
let (int_desc, _int_keymap) = Descriptor::parse_descriptor(secp, "wpkh(tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy/1/*)").unwrap();
let int_desc_id = int_desc.descriptor_id();
let tx0_hex = Vec::<u8>::from_hex("01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000").unwrap();
let tx0: Arc<Transaction> = Arc::new(deserialize(tx0_hex.as_slice()).unwrap());
let tx1_hex = Vec::<u8>::from_hex("010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025151feffffff0200f2052a010000001600149243f727dd5343293eb83174324019ec16c2630f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205e423a8754336ca99dbe16509b877ef1bf98d008836c725005b3c787c41ebe46022047246e4467ad7cc7f1ad98662afcaf14c115e0095a227c7b05c5182591c23e7e01000120000000000000000000000000000000000000000000000000000000000000000000000000").unwrap();
let tx1: Arc<Transaction> = Arc::new(deserialize(tx1_hex.as_slice()).unwrap());
let tx2_hex = Vec::<u8>::from_hex("01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0e0432e7494d010e062f503253482fffffffff0100f2052a010000002321038a7f6ef1c8ca0c588aa53fa860128077c9e6c11e6830f4d7ee4e763a56b7718fac00000000").unwrap();
let tx2: Arc<Transaction> = Arc::new(deserialize(tx2_hex.as_slice()).unwrap());
let outpoint0_0 = OutPoint::new(tx0.txid(), 0);
let txout0_0 = tx0.output.first().unwrap().clone();
let outpoint1_0 = OutPoint::new(tx1.txid(), 0);
let txout1_0 = tx1.output.first().unwrap().clone();
let anchor1 = anchor_fn(1, 1296667328, block_hash_1);
let anchor2 = anchor_fn(2, 1296688946, block_hash_2);
let tx_graph_changeset = tx_graph::ChangeSet::<A> {
txs: [tx0.clone(), tx1.clone()].into(),
txouts: [(outpoint0_0, txout0_0), (outpoint1_0, txout1_0)].into(),
anchors: [(anchor1, tx0.txid()), (anchor1, tx1.txid())].into(),
last_seen: [
(tx0.txid(), 1598918400),
(tx1.txid(), 1598919121),
(tx2.txid(), 1608919121),
]
.into(),
};
let keychain_changeset = keychain::ChangeSet {
keychains_added: [(ext_keychain, ext_desc), (int_keychain, int_desc)].into(),
last_revealed: [(ext_desc_id, 124), (int_desc_id, 421)].into(),
};
let graph_changeset: indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<Keychain>> =
indexed_tx_graph::ChangeSet {
graph: tx_graph_changeset,
indexer: keychain_changeset,
};
// test changesets to write to db
let mut changesets = Vec::new();
changesets.push(CombinedChangeSet {
chain: block_changeset,
indexed_tx_graph: graph_changeset,
network: network_changeset,
});
// create changeset that sets the whole tx2 and updates it's lastseen where before there was only the txid and last_seen
let tx_graph_changeset2 = tx_graph::ChangeSet::<A> {
txs: [tx2.clone()].into(),
txouts: BTreeMap::default(),
anchors: BTreeSet::default(),
last_seen: [(tx2.txid(), 1708919121)].into(),
};
let graph_changeset2: indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<Keychain>> =
indexed_tx_graph::ChangeSet {
graph: tx_graph_changeset2,
indexer: keychain::ChangeSet::default(),
};
changesets.push(CombinedChangeSet {
chain: local_chain::ChangeSet::default(),
indexed_tx_graph: graph_changeset2,
network: None,
});
// create changeset that adds a new anchor2 for tx0 and tx1
let tx_graph_changeset3 = tx_graph::ChangeSet::<A> {
txs: BTreeSet::default(),
txouts: BTreeMap::default(),
anchors: [(anchor2, tx0.txid()), (anchor2, tx1.txid())].into(),
last_seen: BTreeMap::default(),
};
let graph_changeset3: indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<Keychain>> =
indexed_tx_graph::ChangeSet {
graph: tx_graph_changeset3,
indexer: keychain::ChangeSet::default(),
};
changesets.push(CombinedChangeSet {
chain: local_chain::ChangeSet::default(),
indexed_tx_graph: graph_changeset3,
network: None,
});
// aggregated test changesets
let agg_test_changesets =
changesets
.iter()
.fold(CombinedChangeSet::<Keychain, A>::default(), |mut i, cs| {
i.append(cs.clone());
i
});
(changesets, agg_test_changesets)
}
}

View File

@ -20,7 +20,7 @@ bitcoin = { version = "0.31.0", features = ["serde", "base64", "rand-std"], defa
serde = { version = "^1.0", features = ["derive"] }
serde_json = { version = "^1.0" }
bdk_chain = { path = "../chain", version = "0.14.0", features = ["miniscript", "serde"], default-features = false }
bdk_persist = { path = "../persist", version = "0.2.0" }
bdk_persist = { path = "../persist", version = "0.2.0", features = ["miniscript", "serde"], default-features = false }
# Optional dependencies
bip39 = { version = "2.0", optional = true }
@ -45,6 +45,7 @@ dev-getrandom-wasm = ["getrandom/js"]
lazy_static = "1.4"
assert_matches = "1.5.0"
tempfile = "3"
bdk_sqlite = { path = "../sqlite" }
bdk_file_store = { path = "../file_store" }
anyhow = "1"

View File

@ -22,7 +22,7 @@ use alloc::{
pub use bdk_chain::keychain::Balance;
use bdk_chain::{
indexed_tx_graph,
keychain::{self, KeychainTxOutIndex},
keychain::KeychainTxOutIndex,
local_chain::{
self, ApplyHeaderError, CannotConnectError, CheckPoint, CheckPointIter, LocalChain,
},
@ -134,72 +134,7 @@ impl From<SyncResult> for Update {
}
/// The changes made to a wallet by applying an [`Update`].
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize, Default)]
pub struct ChangeSet {
/// Changes to the [`LocalChain`].
///
/// [`LocalChain`]: local_chain::LocalChain
pub chain: local_chain::ChangeSet,
/// Changes to [`IndexedTxGraph`].
///
/// [`IndexedTxGraph`]: bdk_chain::indexed_tx_graph::IndexedTxGraph
pub indexed_tx_graph: indexed_tx_graph::ChangeSet<
ConfirmationTimeHeightAnchor,
keychain::ChangeSet<KeychainKind>,
>,
/// Stores the network type of the wallet.
pub network: Option<Network>,
}
impl Append for ChangeSet {
fn append(&mut self, other: Self) {
Append::append(&mut self.chain, other.chain);
Append::append(&mut self.indexed_tx_graph, other.indexed_tx_graph);
if other.network.is_some() {
debug_assert!(
self.network.is_none() || self.network == other.network,
"network type must be consistent"
);
self.network = other.network;
}
}
fn is_empty(&self) -> bool {
self.chain.is_empty() && self.indexed_tx_graph.is_empty()
}
}
impl From<local_chain::ChangeSet> for ChangeSet {
fn from(chain: local_chain::ChangeSet) -> Self {
Self {
chain,
..Default::default()
}
}
}
impl
From<
indexed_tx_graph::ChangeSet<
ConfirmationTimeHeightAnchor,
keychain::ChangeSet<KeychainKind>,
>,
> for ChangeSet
{
fn from(
indexed_tx_graph: indexed_tx_graph::ChangeSet<
ConfirmationTimeHeightAnchor,
keychain::ChangeSet<KeychainKind>,
>,
) -> Self {
Self {
indexed_tx_graph,
..Default::default()
}
}
}
pub type ChangeSet = bdk_persist::CombinedChangeSet<KeychainKind, ConfirmationTimeHeightAnchor>;
/// A derived address and the index it was found at.
/// For convenience this automatically derefs to `Address`
@ -531,12 +466,13 @@ impl Wallet {
/// # use bdk_wallet::descriptor::Descriptor;
/// # use bitcoin::key::Secp256k1;
/// # use bdk_wallet::KeychainKind;
/// # use bdk_file_store::Store;
/// # use bdk_sqlite::{Store, rusqlite::Connection};
/// #
/// # fn main() -> Result<(), anyhow::Error> {
/// # let temp_dir = tempfile::tempdir().expect("must create tempdir");
/// # let file_path = temp_dir.path().join("store.db");
/// # let db: Store<bdk_wallet::wallet::ChangeSet> = Store::create_new(&[], &file_path).expect("must create db");
/// # let conn = Connection::open(file_path).expect("must open connection");
/// # let db = Store::new(conn).expect("must create db");
/// let secp = Secp256k1::new();
///
/// let (external_descriptor, external_keymap) = Descriptor::parse_descriptor(&secp, "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)").unwrap();

View File

@ -1,9 +1,12 @@
use std::path::Path;
use std::str::FromStr;
use assert_matches::assert_matches;
use bdk_chain::collections::BTreeMap;
use bdk_chain::COINBASE_MATURITY;
use bdk_chain::{BlockId, ConfirmationTime};
use bdk_persist::PersistBackend;
use bdk_sqlite::rusqlite::Connection;
use bdk_wallet::descriptor::{calc_checksum, IntoWalletDescriptor};
use bdk_wallet::psbt::PsbtUtils;
use bdk_wallet::signer::{SignOptions, SignerError};
@ -69,13 +72,19 @@ const P2WPKH_FAKE_WITNESS_SIZE: usize = 106;
const DB_MAGIC: &[u8] = &[0x21, 0x24, 0x48];
#[test]
fn load_recovers_wallet() {
fn load_recovers_wallet() -> anyhow::Result<()> {
fn run<B, FN, FR>(filename: &str, create_new: FN, recover: FR) -> anyhow::Result<()>
where
B: PersistBackend<bdk_wallet::wallet::ChangeSet> + Send + Sync + 'static,
FN: Fn(&Path) -> anyhow::Result<B>,
FR: Fn(&Path) -> anyhow::Result<B>,
{
let temp_dir = tempfile::tempdir().expect("must create tempdir");
let file_path = temp_dir.path().join("store.db");
let file_path = temp_dir.path().join(filename);
// create new wallet
let wallet_spk_index = {
let db = bdk_file_store::Store::create_new(DB_MAGIC, &file_path).expect("must create db");
let db = create_new(&file_path).expect("must create db");
let mut wallet = Wallet::new(get_test_tr_single_sig_xprv(), None, db, Network::Testnet)
.expect("must init wallet");
@ -85,7 +94,7 @@ fn load_recovers_wallet() {
// recover wallet
{
let db = bdk_file_store::Store::open(DB_MAGIC, &file_path).expect("must recover db");
let db = recover(&file_path).expect("must recover db");
let wallet = Wallet::load(db).expect("must recover wallet");
assert_eq!(wallet.network(), Network::Testnet);
assert_eq!(
@ -108,21 +117,41 @@ fn load_recovers_wallet() {
// `new` can only be called on empty db
{
let db = bdk_file_store::Store::open(DB_MAGIC, &file_path).expect("must recover db");
let db = recover(&file_path).expect("must recover db");
let result = Wallet::new(get_test_tr_single_sig_xprv(), None, db, Network::Testnet);
assert!(matches!(result, Err(NewError::NonEmptyDatabase)));
}
Ok(())
}
run(
"store.db",
|path| Ok(bdk_file_store::Store::create_new(DB_MAGIC, path)?),
|path| Ok(bdk_file_store::Store::open(DB_MAGIC, path)?),
)?;
run(
"store.sqlite",
|path| Ok(bdk_sqlite::Store::new(Connection::open(path)?)?),
|path| Ok(bdk_sqlite::Store::new(Connection::open(path)?)?),
)?;
Ok(())
}
#[test]
fn new_or_load() {
fn new_or_load() -> anyhow::Result<()> {
fn run<B, F>(filename: &str, new_or_load: F) -> anyhow::Result<()>
where
B: PersistBackend<bdk_wallet::wallet::ChangeSet> + Send + Sync + 'static,
F: Fn(&Path) -> anyhow::Result<B>,
{
let temp_dir = tempfile::tempdir().expect("must create tempdir");
let file_path = temp_dir.path().join("store.db");
let file_path = temp_dir.path().join(filename);
// init wallet when non-existent
let wallet_keychains: BTreeMap<_, _> = {
let db = bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path)
.expect("must create db");
let db = new_or_load(&file_path).expect("must create db");
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
.expect("must init wallet");
wallet.keychains().map(|(k, v)| (*k, v.clone())).collect()
@ -130,8 +159,7 @@ fn new_or_load() {
// wrong network
{
let db =
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
let db = new_or_load(&file_path).expect("must create db");
let err = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Bitcoin)
.expect_err("wrong network");
assert!(
@ -153,8 +181,7 @@ fn new_or_load() {
let got_blockhash =
bitcoin::blockdata::constants::genesis_block(Network::Testnet).block_hash();
let db =
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
let db = new_or_load(&file_path).expect("must open db");
let err = Wallet::new_or_load_with_genesis_hash(
get_test_wpkh(),
None,
@ -182,8 +209,7 @@ fn new_or_load() {
.unwrap()
.0;
let db =
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
let db = new_or_load(&file_path).expect("must open db");
let err = Wallet::new_or_load(exp_descriptor, None, db, Network::Testnet)
.expect_err("wrong external descriptor");
assert!(
@ -202,8 +228,7 @@ fn new_or_load() {
let exp_descriptor = Some(get_test_tr_single_sig());
let got_descriptor = None;
let db =
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
let db = new_or_load(&file_path).expect("must open db");
let err = Wallet::new_or_load(get_test_wpkh(), exp_descriptor, db, Network::Testnet)
.expect_err("wrong internal descriptor");
assert!(
@ -219,8 +244,7 @@ fn new_or_load() {
// all parameters match
{
let db =
bdk_file_store::Store::open_or_create_new(DB_MAGIC, &file_path).expect("must open db");
let db = new_or_load(&file_path).expect("must open db");
let wallet = Wallet::new_or_load(get_test_wpkh(), None, db, Network::Testnet)
.expect("must recover wallet");
assert_eq!(wallet.network(), Network::Testnet);
@ -229,6 +253,17 @@ fn new_or_load() {
.map(|(k, v)| (*k, v.clone()))
.eq(wallet_keychains));
}
Ok(())
}
run("store.db", |path| {
Ok(bdk_file_store::Store::open_or_create_new(DB_MAGIC, path)?)
})?;
run("store.sqlite", |path| {
Ok(bdk_sqlite::Store::new(Connection::open(path)?)?)
})?;
Ok(())
}
#[test]

View File

@ -8,6 +8,6 @@ edition = "2021"
[dependencies]
bdk_wallet = { path = "../../crates/wallet" }
bdk_esplora = { path = "../../crates/esplora", features = ["async-https"] }
bdk_file_store = { path = "../../crates/file_store" }
bdk_sqlite = { path = "../../crates/sqlite" }
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] }
anyhow = "1"

View File

@ -1,22 +1,22 @@
use std::{collections::BTreeSet, io::Write, str::FromStr};
use bdk_esplora::{esplora_client, EsploraAsyncExt};
use bdk_file_store::Store;
use bdk_wallet::{
bitcoin::{Address, Amount, Network, Script},
KeychainKind, SignOptions, Wallet,
};
const DB_MAGIC: &str = "bdk_wallet_esplora_async_example";
use bdk_sqlite::{rusqlite::Connection, Store};
const SEND_AMOUNT: Amount = Amount::from_sat(5000);
const STOP_GAP: usize = 50;
const PARALLEL_REQUESTS: usize = 5;
#[tokio::main]
async fn main() -> Result<(), anyhow::Error> {
let db_path = std::env::temp_dir().join("bdk-esplora-async-example");
let db =
Store::<bdk_wallet::wallet::ChangeSet>::open_or_create_new(DB_MAGIC.as_bytes(), db_path)?;
let db_path = "bdk-esplora-async-example.sqlite";
let conn = Connection::open(db_path)?;
let db = Store::new(conn)?;
let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)";
let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)";
@ -24,7 +24,7 @@ async fn main() -> Result<(), anyhow::Error> {
external_descriptor,
Some(internal_descriptor),
db,
Network::Testnet,
Network::Signet,
)?;
let address = wallet.next_unused_address(KeychainKind::External)?;
@ -34,8 +34,7 @@ async fn main() -> Result<(), anyhow::Error> {
println!("Wallet balance before syncing: {} sats", balance.total());
print!("Syncing...");
let client =
esplora_client::Builder::new("https://blockstream.info/testnet/api").build_async()?;
let client = esplora_client::Builder::new("http://signet.bitcoindevkit.net").build_async()?;
fn generate_inspect(kind: KeychainKind) -> impl FnMut(u32, &Script) + Send + Sync + 'static {
let mut once = Some(());
@ -91,7 +90,7 @@ async fn main() -> Result<(), anyhow::Error> {
}
let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?
.require_network(Network::Testnet)?;
.require_network(Network::Signet)?;
let mut tx_builder = wallet.build_tx();
tx_builder

View File

@ -305,341 +305,341 @@ where
}?
}
#[cfg(all(test, feature = "miniscript"))]
mod test {
use bitcoin::secp256k1::Secp256k1;
use crate::coin_select::{evaluate_cs::evaluate, ExcessStrategyKind};
use super::{
coin_select_bnb,
evaluate_cs::{Evaluation, EvaluationError},
tester::Tester,
CoinSelector, CoinSelectorOpt, Vec, WeightedValue,
};
fn tester() -> Tester {
const DESC_STR: &str = "tr(xprv9uBuvtdjghkz8D1qzsSXS9Vs64mqrUnXqzNccj2xcvnCHPpXKYE1U2Gbh9CDHk8UPyF2VuXpVkDA7fk5ZP4Hd9KnhUmTscKmhee9Dp5sBMK)";
Tester::new(&Secp256k1::default(), DESC_STR)
}
fn evaluate_bnb(
initial_selector: CoinSelector,
max_tries: usize,
) -> Result<Evaluation, EvaluationError> {
evaluate(initial_selector, |cs| {
coin_select_bnb(max_tries, cs.clone()).map_or(false, |new_cs| {
*cs = new_cs;
true
})
})
}
#[test]
fn not_enough_coins() {
let t = tester();
let candidates: Vec<WeightedValue> = vec![
t.gen_candidate(0, 100_000).into(),
t.gen_candidate(1, 100_000).into(),
];
let opts = t.gen_opts(200_000);
let selector = CoinSelector::new(&candidates, &opts);
assert!(!coin_select_bnb(10_000, selector).is_some());
}
#[test]
fn exactly_enough_coins_preselected() {
let t = tester();
let candidates: Vec<WeightedValue> = vec![
t.gen_candidate(0, 100_000).into(), // to preselect
t.gen_candidate(1, 100_000).into(), // to preselect
t.gen_candidate(2, 100_000).into(),
];
let opts = CoinSelectorOpt {
target_feerate: 0.0,
..t.gen_opts(200_000)
};
let selector = {
let mut selector = CoinSelector::new(&candidates, &opts);
selector.select(0); // preselect
selector.select(1); // preselect
selector
};
let evaluation = evaluate_bnb(selector, 10_000).expect("eval failed");
println!("{}", evaluation);
assert_eq!(evaluation.solution.selected, (0..=1).collect());
assert_eq!(evaluation.solution.excess_strategies.len(), 1);
assert_eq!(
evaluation.feerate_offset(ExcessStrategyKind::ToFee).floor(),
0.0
);
}
/// `cost_of_change` acts as the upper-bound in Bnb; we check whether these boundaries are
/// enforced in code
#[test]
fn cost_of_change() {
let t = tester();
let candidates: Vec<WeightedValue> = vec![
t.gen_candidate(0, 200_000).into(),
t.gen_candidate(1, 200_000).into(),
t.gen_candidate(2, 200_000).into(),
];
// lowest and highest possible `recipient_value` opts for derived `drain_waste`, assuming
// that we want 2 candidates selected
let (lowest_opts, highest_opts) = {
let opts = t.gen_opts(0);
let fee_from_inputs =
(candidates[0].weight as f32 * opts.target_feerate).ceil() as u64 * 2;
let fee_from_template =
((opts.base_weight + 2) as f32 * opts.target_feerate).ceil() as u64;
let lowest_opts = CoinSelectorOpt {
target_value: Some(
400_000 - fee_from_inputs - fee_from_template - opts.drain_waste() as u64,
),
..opts
};
let highest_opts = CoinSelectorOpt {
target_value: Some(400_000 - fee_from_inputs - fee_from_template),
..opts
};
(lowest_opts, highest_opts)
};
// test lowest possible target we can select
let lowest_eval = evaluate_bnb(CoinSelector::new(&candidates, &lowest_opts), 10_000);
assert!(lowest_eval.is_ok());
let lowest_eval = lowest_eval.unwrap();
println!("LB {}", lowest_eval);
assert_eq!(lowest_eval.solution.selected.len(), 2);
assert_eq!(lowest_eval.solution.excess_strategies.len(), 1);
assert_eq!(
lowest_eval
.feerate_offset(ExcessStrategyKind::ToFee)
.floor(),
0.0
);
// test the highest possible target we can select
let highest_eval = evaluate_bnb(CoinSelector::new(&candidates, &highest_opts), 10_000);
assert!(highest_eval.is_ok());
let highest_eval = highest_eval.unwrap();
println!("UB {}", highest_eval);
assert_eq!(highest_eval.solution.selected.len(), 2);
assert_eq!(highest_eval.solution.excess_strategies.len(), 1);
assert_eq!(
highest_eval
.feerate_offset(ExcessStrategyKind::ToFee)
.floor(),
0.0
);
// test lower out of bounds
let loob_opts = CoinSelectorOpt {
target_value: lowest_opts.target_value.map(|v| v - 1),
..lowest_opts
};
let loob_eval = evaluate_bnb(CoinSelector::new(&candidates, &loob_opts), 10_000);
assert!(loob_eval.is_err());
println!("Lower OOB: {}", loob_eval.unwrap_err());
// test upper out of bounds
let uoob_opts = CoinSelectorOpt {
target_value: highest_opts.target_value.map(|v| v + 1),
..highest_opts
};
let uoob_eval = evaluate_bnb(CoinSelector::new(&candidates, &uoob_opts), 10_000);
assert!(uoob_eval.is_err());
println!("Upper OOB: {}", uoob_eval.unwrap_err());
}
#[test]
fn try_select() {
let t = tester();
let candidates: Vec<WeightedValue> = vec![
t.gen_candidate(0, 300_000).into(),
t.gen_candidate(1, 300_000).into(),
t.gen_candidate(2, 300_000).into(),
t.gen_candidate(3, 200_000).into(),
t.gen_candidate(4, 200_000).into(),
];
let make_opts = |v: u64| -> CoinSelectorOpt {
CoinSelectorOpt {
target_feerate: 0.0,
..t.gen_opts(v)
}
};
let test_cases = vec![
(make_opts(100_000), false, 0),
(make_opts(200_000), true, 1),
(make_opts(300_000), true, 1),
(make_opts(500_000), true, 2),
(make_opts(1_000_000), true, 4),
(make_opts(1_200_000), false, 0),
(make_opts(1_300_000), true, 5),
(make_opts(1_400_000), false, 0),
];
for (opts, expect_solution, expect_selected) in test_cases {
let res = evaluate_bnb(CoinSelector::new(&candidates, &opts), 10_000);
assert_eq!(res.is_ok(), expect_solution);
match res {
Ok(eval) => {
println!("{}", eval);
assert_eq!(eval.feerate_offset(ExcessStrategyKind::ToFee), 0.0);
assert_eq!(eval.solution.selected.len(), expect_selected as _);
}
Err(err) => println!("expected failure: {}", err),
}
}
}
#[test]
fn early_bailout_optimization() {
let t = tester();
// target: 300_000
// candidates: 2x of 125_000, 1000x of 100_000, 1x of 50_000
// expected solution: 2x 125_000, 1x 50_000
// set bnb max tries: 1100, should succeed
let candidates = {
let mut candidates: Vec<WeightedValue> = vec![
t.gen_candidate(0, 125_000).into(),
t.gen_candidate(1, 125_000).into(),
t.gen_candidate(2, 50_000).into(),
];
(3..3 + 1000_u32)
.for_each(|index| candidates.push(t.gen_candidate(index, 100_000).into()));
candidates
};
let opts = CoinSelectorOpt {
target_feerate: 0.0,
..t.gen_opts(300_000)
};
let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 1100);
assert!(result.is_ok());
let eval = result.unwrap();
println!("{}", eval);
assert_eq!(eval.solution.selected, (0..=2).collect());
}
#[test]
fn should_exhaust_iteration() {
static MAX_TRIES: usize = 1000;
let t = tester();
let candidates = (0..MAX_TRIES + 1)
.map(|index| t.gen_candidate(index as _, 10_000).into())
.collect::<Vec<WeightedValue>>();
let opts = t.gen_opts(10_001 * MAX_TRIES as u64);
let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), MAX_TRIES);
assert!(result.is_err());
println!("error as expected: {}", result.unwrap_err());
}
/// Solution should have fee >= min_absolute_fee (or no solution at all)
#[test]
fn min_absolute_fee() {
let t = tester();
let candidates = {
let mut candidates = Vec::new();
t.gen_weighted_values(&mut candidates, 5, 10_000);
t.gen_weighted_values(&mut candidates, 5, 20_000);
t.gen_weighted_values(&mut candidates, 5, 30_000);
t.gen_weighted_values(&mut candidates, 10, 10_300);
t.gen_weighted_values(&mut candidates, 10, 10_500);
t.gen_weighted_values(&mut candidates, 10, 10_700);
t.gen_weighted_values(&mut candidates, 10, 10_900);
t.gen_weighted_values(&mut candidates, 10, 11_000);
t.gen_weighted_values(&mut candidates, 10, 12_000);
t.gen_weighted_values(&mut candidates, 10, 13_000);
candidates
};
let mut opts = CoinSelectorOpt {
min_absolute_fee: 1,
..t.gen_opts(100_000)
};
(1..=120_u64).for_each(|fee_factor| {
opts.min_absolute_fee = fee_factor * 31;
let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 21_000);
match result {
Ok(result) => {
println!("Solution {}", result);
let fee = result.solution.excess_strategies[&ExcessStrategyKind::ToFee].fee;
assert!(fee >= opts.min_absolute_fee);
assert_eq!(result.solution.excess_strategies.len(), 1);
}
Err(err) => {
println!("No Solution: {}", err);
}
}
});
}
/// For a decreasing feerate (long-term feerate is lower than effective feerate), we should
/// select less. For increasing feerate (long-term feerate is higher than effective feerate), we
/// should select more.
#[test]
fn feerate_difference() {
let t = tester();
let candidates = {
let mut candidates = Vec::new();
t.gen_weighted_values(&mut candidates, 10, 2_000);
t.gen_weighted_values(&mut candidates, 10, 5_000);
t.gen_weighted_values(&mut candidates, 10, 20_000);
candidates
};
let decreasing_feerate_opts = CoinSelectorOpt {
target_feerate: 1.25,
long_term_feerate: Some(0.25),
..t.gen_opts(100_000)
};
let increasing_feerate_opts = CoinSelectorOpt {
target_feerate: 0.25,
long_term_feerate: Some(1.25),
..t.gen_opts(100_000)
};
let decreasing_res = evaluate_bnb(
CoinSelector::new(&candidates, &decreasing_feerate_opts),
21_000,
)
.expect("no result");
let decreasing_len = decreasing_res.solution.selected.len();
let increasing_res = evaluate_bnb(
CoinSelector::new(&candidates, &increasing_feerate_opts),
21_000,
)
.expect("no result");
let increasing_len = increasing_res.solution.selected.len();
println!("decreasing_len: {}", decreasing_len);
println!("increasing_len: {}", increasing_len);
assert!(decreasing_len < increasing_len);
}
/// TODO: UNIMPLEMENTED TESTS:
/// * Excess strategies:
/// * We should always have `ExcessStrategy::ToFee`.
/// * We should only have `ExcessStrategy::ToRecipient` when `max_extra_target > 0`.
/// * We should only have `ExcessStrategy::ToDrain` when `drain_value >= min_drain_value`.
/// * Fuzz
/// * Solution feerate should never be lower than target feerate
/// * Solution fee should never be lower than `min_absolute_fee`.
/// * Preselected should always remain selected
fn _todo() {}
}
// #[cfg(all(test, feature = "miniscript"))]
// mod test {
// use bitcoin::secp256k1::Secp256k1;
//
// use crate::coin_select::{evaluate_cs::evaluate, ExcessStrategyKind};
//
// use super::{
// coin_select_bnb,
// evaluate_cs::{Evaluation, EvaluationError},
// tester::Tester,
// CoinSelector, CoinSelectorOpt, Vec, WeightedValue,
// };
//
// fn tester() -> Tester {
// const DESC_STR: &str = "tr(xprv9uBuvtdjghkz8D1qzsSXS9Vs64mqrUnXqzNccj2xcvnCHPpXKYE1U2Gbh9CDHk8UPyF2VuXpVkDA7fk5ZP4Hd9KnhUmTscKmhee9Dp5sBMK)";
// Tester::new(&Secp256k1::default(), DESC_STR)
// }
//
// fn evaluate_bnb(
// initial_selector: CoinSelector,
// max_tries: usize,
// ) -> Result<Evaluation, EvaluationError> {
// evaluate(initial_selector, |cs| {
// coin_select_bnb(max_tries, cs.clone()).map_or(false, |new_cs| {
// *cs = new_cs;
// true
// })
// })
// }
//
// #[test]
// fn not_enough_coins() {
// let t = tester();
// let candidates: Vec<WeightedValue> = vec![
// t.gen_candidate(0, 100_000).into(),
// t.gen_candidate(1, 100_000).into(),
// ];
// let opts = t.gen_opts(200_000);
// let selector = CoinSelector::new(&candidates, &opts);
// assert!(!coin_select_bnb(10_000, selector).is_some());
// }
//
// #[test]
// fn exactly_enough_coins_preselected() {
// let t = tester();
// let candidates: Vec<WeightedValue> = vec![
// t.gen_candidate(0, 100_000).into(), // to preselect
// t.gen_candidate(1, 100_000).into(), // to preselect
// t.gen_candidate(2, 100_000).into(),
// ];
// let opts = CoinSelectorOpt {
// target_feerate: 0.0,
// ..t.gen_opts(200_000)
// };
// let selector = {
// let mut selector = CoinSelector::new(&candidates, &opts);
// selector.select(0); // preselect
// selector.select(1); // preselect
// selector
// };
//
// let evaluation = evaluate_bnb(selector, 10_000).expect("eval failed");
// println!("{}", evaluation);
// assert_eq!(evaluation.solution.selected, (0..=1).collect());
// assert_eq!(evaluation.solution.excess_strategies.len(), 1);
// assert_eq!(
// evaluation.feerate_offset(ExcessStrategyKind::ToFee).floor(),
// 0.0
// );
// }
//
// /// `cost_of_change` acts as the upper-bound in Bnb; we check whether these boundaries are
// /// enforced in code
// #[test]
// fn cost_of_change() {
// let t = tester();
// let candidates: Vec<WeightedValue> = vec![
// t.gen_candidate(0, 200_000).into(),
// t.gen_candidate(1, 200_000).into(),
// t.gen_candidate(2, 200_000).into(),
// ];
//
// // lowest and highest possible `recipient_value` opts for derived `drain_waste`, assuming
// // that we want 2 candidates selected
// let (lowest_opts, highest_opts) = {
// let opts = t.gen_opts(0);
//
// let fee_from_inputs =
// (candidates[0].weight as f32 * opts.target_feerate).ceil() as u64 * 2;
// let fee_from_template =
// ((opts.base_weight + 2) as f32 * opts.target_feerate).ceil() as u64;
//
// let lowest_opts = CoinSelectorOpt {
// target_value: Some(
// 400_000 - fee_from_inputs - fee_from_template - opts.drain_waste() as u64,
// ),
// ..opts
// };
//
// let highest_opts = CoinSelectorOpt {
// target_value: Some(400_000 - fee_from_inputs - fee_from_template),
// ..opts
// };
//
// (lowest_opts, highest_opts)
// };
//
// // test lowest possible target we can select
// let lowest_eval = evaluate_bnb(CoinSelector::new(&candidates, &lowest_opts), 10_000);
// assert!(lowest_eval.is_ok());
// let lowest_eval = lowest_eval.unwrap();
// println!("LB {}", lowest_eval);
// assert_eq!(lowest_eval.solution.selected.len(), 2);
// assert_eq!(lowest_eval.solution.excess_strategies.len(), 1);
// assert_eq!(
// lowest_eval
// .feerate_offset(ExcessStrategyKind::ToFee)
// .floor(),
// 0.0
// );
//
// // test the highest possible target we can select
// let highest_eval = evaluate_bnb(CoinSelector::new(&candidates, &highest_opts), 10_000);
// assert!(highest_eval.is_ok());
// let highest_eval = highest_eval.unwrap();
// println!("UB {}", highest_eval);
// assert_eq!(highest_eval.solution.selected.len(), 2);
// assert_eq!(highest_eval.solution.excess_strategies.len(), 1);
// assert_eq!(
// highest_eval
// .feerate_offset(ExcessStrategyKind::ToFee)
// .floor(),
// 0.0
// );
//
// // test lower out of bounds
// let loob_opts = CoinSelectorOpt {
// target_value: lowest_opts.target_value.map(|v| v - 1),
// ..lowest_opts
// };
// let loob_eval = evaluate_bnb(CoinSelector::new(&candidates, &loob_opts), 10_000);
// assert!(loob_eval.is_err());
// println!("Lower OOB: {}", loob_eval.unwrap_err());
//
// // test upper out of bounds
// let uoob_opts = CoinSelectorOpt {
// target_value: highest_opts.target_value.map(|v| v + 1),
// ..highest_opts
// };
// let uoob_eval = evaluate_bnb(CoinSelector::new(&candidates, &uoob_opts), 10_000);
// assert!(uoob_eval.is_err());
// println!("Upper OOB: {}", uoob_eval.unwrap_err());
// }
//
// #[test]
// fn try_select() {
// let t = tester();
// let candidates: Vec<WeightedValue> = vec![
// t.gen_candidate(0, 300_000).into(),
// t.gen_candidate(1, 300_000).into(),
// t.gen_candidate(2, 300_000).into(),
// t.gen_candidate(3, 200_000).into(),
// t.gen_candidate(4, 200_000).into(),
// ];
// let make_opts = |v: u64| -> CoinSelectorOpt {
// CoinSelectorOpt {
// target_feerate: 0.0,
// ..t.gen_opts(v)
// }
// };
//
// let test_cases = vec![
// (make_opts(100_000), false, 0),
// (make_opts(200_000), true, 1),
// (make_opts(300_000), true, 1),
// (make_opts(500_000), true, 2),
// (make_opts(1_000_000), true, 4),
// (make_opts(1_200_000), false, 0),
// (make_opts(1_300_000), true, 5),
// (make_opts(1_400_000), false, 0),
// ];
//
// for (opts, expect_solution, expect_selected) in test_cases {
// let res = evaluate_bnb(CoinSelector::new(&candidates, &opts), 10_000);
// assert_eq!(res.is_ok(), expect_solution);
//
// match res {
// Ok(eval) => {
// println!("{}", eval);
// assert_eq!(eval.feerate_offset(ExcessStrategyKind::ToFee), 0.0);
// assert_eq!(eval.solution.selected.len(), expect_selected as _);
// }
// Err(err) => println!("expected failure: {}", err),
// }
// }
// }
//
// #[test]
// fn early_bailout_optimization() {
// let t = tester();
//
// // target: 300_000
// // candidates: 2x of 125_000, 1000x of 100_000, 1x of 50_000
// // expected solution: 2x 125_000, 1x 50_000
// // set bnb max tries: 1100, should succeed
// let candidates = {
// let mut candidates: Vec<WeightedValue> = vec![
// t.gen_candidate(0, 125_000).into(),
// t.gen_candidate(1, 125_000).into(),
// t.gen_candidate(2, 50_000).into(),
// ];
// (3..3 + 1000_u32)
// .for_each(|index| candidates.push(t.gen_candidate(index, 100_000).into()));
// candidates
// };
// let opts = CoinSelectorOpt {
// target_feerate: 0.0,
// ..t.gen_opts(300_000)
// };
//
// let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 1100);
// assert!(result.is_ok());
//
// let eval = result.unwrap();
// println!("{}", eval);
// assert_eq!(eval.solution.selected, (0..=2).collect());
// }
//
// #[test]
// fn should_exhaust_iteration() {
// static MAX_TRIES: usize = 1000;
// let t = tester();
// let candidates = (0..MAX_TRIES + 1)
// .map(|index| t.gen_candidate(index as _, 10_000).into())
// .collect::<Vec<WeightedValue>>();
// let opts = t.gen_opts(10_001 * MAX_TRIES as u64);
// let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), MAX_TRIES);
// assert!(result.is_err());
// println!("error as expected: {}", result.unwrap_err());
// }
//
// /// Solution should have fee >= min_absolute_fee (or no solution at all)
// #[test]
// fn min_absolute_fee() {
// let t = tester();
// let candidates = {
// let mut candidates = Vec::new();
// t.gen_weighted_values(&mut candidates, 5, 10_000);
// t.gen_weighted_values(&mut candidates, 5, 20_000);
// t.gen_weighted_values(&mut candidates, 5, 30_000);
// t.gen_weighted_values(&mut candidates, 10, 10_300);
// t.gen_weighted_values(&mut candidates, 10, 10_500);
// t.gen_weighted_values(&mut candidates, 10, 10_700);
// t.gen_weighted_values(&mut candidates, 10, 10_900);
// t.gen_weighted_values(&mut candidates, 10, 11_000);
// t.gen_weighted_values(&mut candidates, 10, 12_000);
// t.gen_weighted_values(&mut candidates, 10, 13_000);
// candidates
// };
// let mut opts = CoinSelectorOpt {
// min_absolute_fee: 1,
// ..t.gen_opts(100_000)
// };
//
// (1..=120_u64).for_each(|fee_factor| {
// opts.min_absolute_fee = fee_factor * 31;
//
// let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 21_000);
// match result {
// Ok(result) => {
// println!("Solution {}", result);
// let fee = result.solution.excess_strategies[&ExcessStrategyKind::ToFee].fee;
// assert!(fee >= opts.min_absolute_fee);
// assert_eq!(result.solution.excess_strategies.len(), 1);
// }
// Err(err) => {
// println!("No Solution: {}", err);
// }
// }
// });
// }
//
// /// For a decreasing feerate (long-term feerate is lower than effective feerate), we should
// /// select less. For increasing feerate (long-term feerate is higher than effective feerate), we
// /// should select more.
// #[test]
// fn feerate_difference() {
// let t = tester();
// let candidates = {
// let mut candidates = Vec::new();
// t.gen_weighted_values(&mut candidates, 10, 2_000);
// t.gen_weighted_values(&mut candidates, 10, 5_000);
// t.gen_weighted_values(&mut candidates, 10, 20_000);
// candidates
// };
//
// let decreasing_feerate_opts = CoinSelectorOpt {
// target_feerate: 1.25,
// long_term_feerate: Some(0.25),
// ..t.gen_opts(100_000)
// };
//
// let increasing_feerate_opts = CoinSelectorOpt {
// target_feerate: 0.25,
// long_term_feerate: Some(1.25),
// ..t.gen_opts(100_000)
// };
//
// let decreasing_res = evaluate_bnb(
// CoinSelector::new(&candidates, &decreasing_feerate_opts),
// 21_000,
// )
// .expect("no result");
// let decreasing_len = decreasing_res.solution.selected.len();
//
// let increasing_res = evaluate_bnb(
// CoinSelector::new(&candidates, &increasing_feerate_opts),
// 21_000,
// )
// .expect("no result");
// let increasing_len = increasing_res.solution.selected.len();
//
// println!("decreasing_len: {}", decreasing_len);
// println!("increasing_len: {}", increasing_len);
// assert!(decreasing_len < increasing_len);
// }
//
// /// TODO: UNIMPLEMENTED TESTS:
// /// * Excess strategies:
// /// * We should always have `ExcessStrategy::ToFee`.
// /// * We should only have `ExcessStrategy::ToRecipient` when `max_extra_target > 0`.
// /// * We should only have `ExcessStrategy::ToDrain` when `drain_value >= min_drain_value`.
// /// * Fuzz
// /// * Solution feerate should never be lower than target feerate
// /// * Solution fee should never be lower than `min_absolute_fee`.
// /// * Preselected should always remain selected
// fn _todo() {}
// }