From c069b0fb4171aa11e980e8e070982d3046e07110 Mon Sep 17 00:00:00 2001 From: LLFourn Date: Thu, 2 Mar 2023 16:23:06 +1100 Subject: [PATCH] =?UTF-8?q?Move=20everything=20else=20over=20=F0=9F=8E=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This completes the move of things from https://github.com/LLFourn/bdk_core_staging --- Cargo.toml | 11 +- crates/bdk/src/descriptor/policy.rs | 2 +- crates/bdk/src/wallet/export.rs | 8 +- crates/electrum/Cargo.toml | 4 +- crates/esplora/Cargo.toml | 16 + crates/esplora/README.md | 3 + crates/esplora/src/lib.rs | 303 ++++++++ crates/file_store/Cargo.toml | 5 +- crates/file_store/tests/test_file_store.rs | 6 +- .../.gitignore | 1 + .../Cargo.toml | 9 + .../README.md | 6 + .../src/main.rs | 248 +++++++ .../.gitignore | 3 + .../Cargo.toml | 11 + .../src/main.rs | 241 ++++++ .../keychain_tracker_example_cli/.gitignore | 1 + .../keychain_tracker_example_cli/Cargo.toml | 16 + .../keychain_tracker_example_cli/README.md | 1 + .../keychain_tracker_example_cli/src/lib.rs | 688 ++++++++++++++++++ nursery/README.md | 5 + nursery/coin_select/Cargo.toml | 11 + nursery/coin_select/src/bnb.rs | 651 +++++++++++++++++ nursery/coin_select/src/coin_selector.rs | 617 ++++++++++++++++ nursery/coin_select/src/lib.rs | 33 + nursery/tmp_plan/Cargo.toml | 13 + nursery/tmp_plan/README.md | 3 + nursery/tmp_plan/bdk_tmp_plan/Cargo.toml | 13 + nursery/tmp_plan/bdk_tmp_plan/README.md | 3 + nursery/tmp_plan/bdk_tmp_plan/src/lib.rs | 436 +++++++++++ .../tmp_plan/bdk_tmp_plan/src/plan_impls.rs | 323 ++++++++ .../tmp_plan/bdk_tmp_plan/src/requirements.rs | 218 ++++++ nursery/tmp_plan/bdk_tmp_plan/src/template.rs | 76 ++ nursery/tmp_plan/src/lib.rs | 436 +++++++++++ nursery/tmp_plan/src/plan_impls.rs | 323 ++++++++ nursery/tmp_plan/src/requirements.rs | 218 ++++++ nursery/tmp_plan/src/template.rs | 76 ++ 37 files changed, 5023 insertions(+), 15 deletions(-) create mode 100644 crates/esplora/Cargo.toml create mode 100644 crates/esplora/README.md create mode 100644 crates/esplora/src/lib.rs create mode 100644 example-crates/keychain_tracker_electrum_example/.gitignore create mode 100644 example-crates/keychain_tracker_electrum_example/Cargo.toml create mode 100644 example-crates/keychain_tracker_electrum_example/README.md create mode 100644 example-crates/keychain_tracker_electrum_example/src/main.rs create mode 100644 example-crates/keychain_tracker_esplora_example/.gitignore create mode 100644 example-crates/keychain_tracker_esplora_example/Cargo.toml create mode 100644 example-crates/keychain_tracker_esplora_example/src/main.rs create mode 100644 example-crates/keychain_tracker_example_cli/.gitignore create mode 100644 example-crates/keychain_tracker_example_cli/Cargo.toml create mode 100644 example-crates/keychain_tracker_example_cli/README.md create mode 100644 example-crates/keychain_tracker_example_cli/src/lib.rs create mode 100644 nursery/README.md create mode 100644 nursery/coin_select/Cargo.toml create mode 100644 nursery/coin_select/src/bnb.rs create mode 100644 nursery/coin_select/src/coin_selector.rs create mode 100644 nursery/coin_select/src/lib.rs create mode 100644 nursery/tmp_plan/Cargo.toml create mode 100644 nursery/tmp_plan/README.md create mode 100644 nursery/tmp_plan/bdk_tmp_plan/Cargo.toml create mode 100644 nursery/tmp_plan/bdk_tmp_plan/README.md create mode 100644 nursery/tmp_plan/bdk_tmp_plan/src/lib.rs create mode 100644 nursery/tmp_plan/bdk_tmp_plan/src/plan_impls.rs create mode 100644 nursery/tmp_plan/bdk_tmp_plan/src/requirements.rs create mode 100644 nursery/tmp_plan/bdk_tmp_plan/src/template.rs create mode 100644 nursery/tmp_plan/src/lib.rs create mode 100644 nursery/tmp_plan/src/plan_impls.rs create mode 100644 nursery/tmp_plan/src/requirements.rs create mode 100644 nursery/tmp_plan/src/template.rs diff --git a/Cargo.toml b/Cargo.toml index e8a93b6a..7f97bf6a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,11 +1,16 @@ [workspace] members = [ "crates/bdk", - "crates/bdk_chain", - "crates/bdk_file_store", - "crates/bdk_electrum", + "crates/chain", + "crates/file_store", + "crates/electrum", "example-crates/esplora-wallet", "example-crates/electrum-wallet", + "example-crates/keychain_tracker_electrum_example", + "example-crates/keychain_tracker_esplora_example", + "example-crates/keychain_tracker_example_cli", + "nursery/tmp_plan", + "nursery/coin_select" ] [workspace.package] diff --git a/crates/bdk/src/descriptor/policy.rs b/crates/bdk/src/descriptor/policy.rs index 96889ffa..af3e4a3b 100644 --- a/crates/bdk/src/descriptor/policy.rs +++ b/crates/bdk/src/descriptor/policy.rs @@ -32,7 +32,7 @@ //! //! let signers = Arc::new(SignersContainer::build(key_map, &extended_desc, &secp)); //! let policy = extended_desc.extract_policy(&signers, BuildSatisfaction::None, &secp)?; -//! println!("policy: {}", serde_json::to_string(&policy)?); +//! println!("policy: {}", serde_json::to_string(&policy).unwrap()); //! # Ok::<(), bdk::Error>(()) //! ``` diff --git a/crates/bdk/src/wallet/export.rs b/crates/bdk/src/wallet/export.rs index 3ec43d2b..90563844 100644 --- a/crates/bdk/src/wallet/export.rs +++ b/crates/bdk/src/wallet/export.rs @@ -34,7 +34,7 @@ //! import.change_descriptor().as_ref(), //! Network::Testnet, //! )?; -//! # Ok::<_, bdk::Error>(()) +//! # Ok::<_, Box>(()) //! ``` //! //! ### Export a `Wallet` @@ -47,12 +47,10 @@ //! Some("wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/1/*)"), //! Network::Testnet, //! )?; -//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true) -//! .map_err(ToString::to_string) -//! .map_err(bdk::Error::Generic)?; +//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true).unwrap(); //! //! println!("Exported: {}", export.to_string()); -//! # Ok::<_, bdk::Error>(()) +//! # Ok::<_, Box>(()) //! ``` use core::str::FromStr; diff --git a/crates/electrum/Cargo.toml b/crates/electrum/Cargo.toml index 315e8619..a0f70c7e 100644 --- a/crates/electrum/Cargo.toml +++ b/crates/electrum/Cargo.toml @@ -5,12 +5,12 @@ edition = "2021" homepage = "https://bitcoindevkit.org" repository = "https://github.com/LLFourn/bdk_core_staging" documentation = "https://docs.rs/bdk_electrum" -description = "BDK Electrum client library for updating the keychain tracker." +description = "Fetch data from electrum in the form BDK accepts" license = "MIT OR Apache-2.0" readme = "README.md" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -bdk_chain = { path = "../bdk_chain", version = "0.3", features = ["serde", "miniscript"] } +bdk_chain = { path = "../chain", version = "0.3", features = ["serde", "miniscript"] } electrum-client = { version = "0.12" } diff --git a/crates/esplora/Cargo.toml b/crates/esplora/Cargo.toml new file mode 100644 index 00000000..680ed3c5 --- /dev/null +++ b/crates/esplora/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "bdk_esplora" +version = "0.1.0" +edition = "2021" +homepage = "https://bitcoindevkit.org" +repository = "https://github.com/LLFourn/bdk_core_staging" +documentation = "https://docs.rs/bdk_esplora" +description = "Fetch data from esplora in the form that accepts" +license = "MIT OR Apache-2.0" +readme = "README.md" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../chain", version = "0.3", features = ["serde", "miniscript"] } +esplora-client = { version = "0.3" } diff --git a/crates/esplora/README.md b/crates/esplora/README.md new file mode 100644 index 00000000..637a7d52 --- /dev/null +++ b/crates/esplora/README.md @@ -0,0 +1,3 @@ +# BDK Esplora + +BDK Esplora client library for updating the `bdk_chain` structures. diff --git a/crates/esplora/src/lib.rs b/crates/esplora/src/lib.rs new file mode 100644 index 00000000..a8eae786 --- /dev/null +++ b/crates/esplora/src/lib.rs @@ -0,0 +1,303 @@ +//! This crate is used for updating structures of [`bdk_chain`] with data from an esplora server. +//! +//! The star of the show is the [`EsploraExt::scan`] method which scans for relevant +//! blockchain data (via esplora) and outputs a [`KeychainScan`]. + +use bdk_chain::{ + bitcoin::{BlockHash, OutPoint, Script, Txid}, + chain_graph::ChainGraph, + keychain::KeychainScan, + sparse_chain, BlockId, ConfirmationTime, +}; +use esplora_client::{OutputStatus, TxStatus}; +use std::collections::BTreeMap; + +pub use esplora_client; +use esplora_client::Error; + +/// Trait to extend [`esplora_client::BlockingClient`] functionality. +/// +/// Refer to [crate-level documentation] for more. +/// +/// [crate-level documentation]: crate +pub trait EsploraExt { + /// Scan the blockchain (via esplora) for the data specified and returns a [`KeychainScan`]. + /// + /// - `local_chain`: the most recent block hashes present locally + /// - `keychain_spks`: keychains that we want to scan transactions for + /// - `txids`: transactions that we want updated [`ChainPosition`]s for + /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we + /// want to included in the update + /// + /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated + /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in + /// parallel. + /// + /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error>; + + /// Convenience method to call [`scan`] without requiring a keychain. + /// + /// [`scan`]: EsploraExt::scan + fn scan_without_keychain( + &self, + local_chain: &BTreeMap, + misc_spks: impl IntoIterator, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + parallel_requests: usize, + ) -> Result, Error> { + let wallet_scan = self.scan( + local_chain, + [( + (), + misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)), + )] + .into(), + txids, + outpoints, + usize::MAX, + parallel_requests, + )?; + + Ok(wallet_scan.update) + } +} + +impl EsploraExt for esplora_client::BlockingClient { + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error> { + let parallel_requests = parallel_requests.max(1); + let mut scan = KeychainScan::default(); + let update = &mut scan.update; + let last_active_indices = &mut scan.last_active_indices; + + for (&height, &original_hash) in local_chain.iter().rev() { + let update_block_id = BlockId { + height, + hash: self.get_block_hash(height)?, + }; + let _ = update + .insert_checkpoint(update_block_id) + .expect("cannot repeat height here"); + if update_block_id.hash == original_hash { + break; + } + } + let tip_at_start = BlockId { + height: self.get_height()?, + hash: self.get_tip_hash()?, + }; + if let Err(failure) = update.insert_checkpoint(tip_at_start) { + match failure { + sparse_chain::InsertCheckpointError::HashNotMatching { .. } => { + // there has been a re-org before we started scanning. We haven't consumed any iterators so it's safe to recursively call. + return EsploraExt::scan( + self, + local_chain, + keychain_spks, + txids, + outpoints, + stop_gap, + parallel_requests, + ); + } + } + } + + for (keychain, spks) in keychain_spks { + let mut spks = spks.into_iter(); + let mut last_active_index = None; + let mut empty_scripts = 0; + + loop { + let handles = (0..parallel_requests) + .filter_map( + |_| -> Option< + std::thread::JoinHandle), _>>, + > { + let (index, script) = spks.next()?; + let client = self.clone(); + Some(std::thread::spawn(move || { + let mut related_txs = client.scripthash_txs(&script, None)?; + + let n_confirmed = + related_txs.iter().filter(|tx| tx.status.confirmed).count(); + // esplora pages on 25 confirmed transactions. If there's 25 or more we + // keep requesting to see if there's more. + if n_confirmed >= 25 { + loop { + let new_related_txs = client.scripthash_txs( + &script, + Some(related_txs.last().unwrap().txid), + )?; + let n = new_related_txs.len(); + related_txs.extend(new_related_txs); + // we've reached the end + if n < 25 { + break; + } + } + } + + Result::<_, esplora_client::Error>::Ok((index, related_txs)) + })) + }, + ) + .collect::>(); + + let n_handles = handles.len(); + + for handle in handles { + let (index, related_txs) = handle.join().unwrap()?; // TODO: don't unwrap + if related_txs.is_empty() { + empty_scripts += 1; + } else { + last_active_index = Some(index); + empty_scripts = 0; + } + for tx in related_txs { + let confirmation_time = + map_confirmation_time(&tx.status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx.to_tx(), confirmation_time) { + use bdk_chain::{ + chain_graph::InsertTxError, sparse_chain::InsertTxError::*, + }; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + } + + if n_handles == 0 || empty_scripts >= stop_gap { + break; + } + } + + if let Some(last_active_index) = last_active_index { + last_active_indices.insert(keychain, last_active_index); + } + } + + for txid in txids.into_iter() { + let (tx, tx_status) = match (self.get_tx(&txid)?, self.get_tx_status(&txid)?) { + (Some(tx), Some(tx_status)) => (tx, tx_status), + _ => continue, + }; + + let confirmation_time = map_confirmation_time(&tx_status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx, confirmation_time) { + use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + + for op in outpoints.into_iter() { + let mut op_txs = Vec::with_capacity(2); + if let (Some(tx), Some(tx_status)) = + (self.get_tx(&op.txid)?, self.get_tx_status(&op.txid)?) + { + op_txs.push((tx, tx_status)); + if let Some(OutputStatus { + txid: Some(txid), + status: Some(spend_status), + .. + }) = self.get_output_status(&op.txid, op.vout as _)? + { + if let Some(spend_tx) = self.get_tx(&txid)? { + op_txs.push((spend_tx, spend_status)); + } + } + } + + for (tx, status) in op_txs { + let confirmation_time = map_confirmation_time(&status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx, confirmation_time) { + use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + } + + let reorg_occurred = { + if let Some(checkpoint) = update.chain().latest_checkpoint() { + self.get_block_hash(checkpoint.height)? != checkpoint.hash + } else { + false + } + }; + + if reorg_occurred { + // A reorg occurred so lets find out where all the txids we found are in the chain now. + // XXX: collect required because of weird type naming issues + let txids_found = update + .chain() + .txids() + .map(|(_, txid)| *txid) + .collect::>(); + scan.update = EsploraExt::scan_without_keychain( + self, + local_chain, + [], + txids_found, + [], + parallel_requests, + )?; + } + + Ok(scan) + } +} + +fn map_confirmation_time(tx_status: &TxStatus, height_at_start: u32) -> ConfirmationTime { + match (tx_status.block_time, tx_status.block_height) { + (Some(time), Some(height)) if height <= height_at_start => { + ConfirmationTime::Confirmed { height, time } + } + _ => ConfirmationTime::Unconfirmed, + } +} diff --git a/crates/file_store/Cargo.toml b/crates/file_store/Cargo.toml index 1670cc09..2a535825 100644 --- a/crates/file_store/Cargo.toml +++ b/crates/file_store/Cargo.toml @@ -5,6 +5,9 @@ edition = "2021" license = "MIT OR Apache-2.0" [dependencies] -bdk_chain = { path = "../bdk_chain", version = "0.3", features = [ "serde", "miniscript" ] } +bdk_chain = { path = "../chain", version = "0.3", features = [ "serde", "miniscript" ] } bincode = { version = "2.0.0-rc.2", features = [ "serde" ] } serde = { version = "1", features = ["derive"] } + +[dev-dependencies] +tempfile = "3" diff --git a/crates/file_store/tests/test_file_store.rs b/crates/file_store/tests/test_file_store.rs index 5230c097..5842c3fd 100644 --- a/crates/file_store/tests/test_file_store.rs +++ b/crates/file_store/tests/test_file_store.rs @@ -89,16 +89,16 @@ fn new_fails_if_file_is_too_short() { #[test] fn new_fails_if_magic_bytes_are_invalid() { - let invalid_magic_mnemonic = "ldkfs0000000"; + let invalid_magic_bytes = "ldkfs0000000"; let path = TempPath::new(); path.open() - .write_all(invalid_magic_mnemonic.as_bytes()) + .write_all(invalid_magic_bytes.as_bytes()) .expect("should write"); match KeychainStore::::new(path.open()) { Err(FileError::InvalidMagicBytes(b)) => { - assert_eq!(b, invalid_magic_mnemonic.as_bytes()) + assert_eq!(b, invalid_magic_bytes.as_bytes()) } unexpected => panic!("unexpected result: {:?}", unexpected), }; diff --git a/example-crates/keychain_tracker_electrum_example/.gitignore b/example-crates/keychain_tracker_electrum_example/.gitignore new file mode 100644 index 00000000..ea8c4bf7 --- /dev/null +++ b/example-crates/keychain_tracker_electrum_example/.gitignore @@ -0,0 +1 @@ +/target diff --git a/example-crates/keychain_tracker_electrum_example/Cargo.toml b/example-crates/keychain_tracker_electrum_example/Cargo.toml new file mode 100644 index 00000000..4eceaa70 --- /dev/null +++ b/example-crates/keychain_tracker_electrum_example/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "keychain_tracker_electrum_example" +version = "0.1.0" +edition = "2021" + +[dependencies] +bdk_chain = { path = "../../crates/chain", version = "0.3", features = ["serde"] } +bdk_electrum = { path = "../../crates/electrum" } +keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli"} diff --git a/example-crates/keychain_tracker_electrum_example/README.md b/example-crates/keychain_tracker_electrum_example/README.md new file mode 100644 index 00000000..b8bdea21 --- /dev/null +++ b/example-crates/keychain_tracker_electrum_example/README.md @@ -0,0 +1,6 @@ +# Keychain Tracker with electrum + +This example shows how you use the `KeychainTracker` from `bdk_chain` to create a simple command +line wallet. + + diff --git a/example-crates/keychain_tracker_electrum_example/src/main.rs b/example-crates/keychain_tracker_electrum_example/src/main.rs new file mode 100644 index 00000000..0fe27fd0 --- /dev/null +++ b/example-crates/keychain_tracker_electrum_example/src/main.rs @@ -0,0 +1,248 @@ +use bdk_chain::bitcoin::{Address, OutPoint, Txid}; +use bdk_electrum::bdk_chain::{self, bitcoin::Network, TxHeight}; +use bdk_electrum::{ + electrum_client::{self, ElectrumApi}, + ElectrumExt, ElectrumUpdate, +}; +use keychain_tracker_example_cli::{ + self as cli, + anyhow::{self, Context}, + clap::{self, Parser, Subcommand}, +}; +use std::{collections::BTreeMap, fmt::Debug, io, io::Write}; + +#[derive(Subcommand, Debug, Clone)] +enum ElectrumCommands { + /// Scans the addresses in the wallet using esplora API. + Scan { + /// When a gap this large has been found for a keychain it will stop. + #[clap(long, default_value = "5")] + stop_gap: usize, + #[clap(flatten)] + scan_options: ScanOptions, + }, + /// Scans particular addresses using esplora API + Sync { + /// Scan all the unused addresses + #[clap(long)] + unused_spks: bool, + /// Scan every address that you have derived + #[clap(long)] + all_spks: bool, + /// Scan unspent outpoints for spends or changes to confirmation status of residing tx + #[clap(long)] + utxos: bool, + /// Scan unconfirmed transactions for updates + #[clap(long)] + unconfirmed: bool, + #[clap(flatten)] + scan_options: ScanOptions, + }, +} + +#[derive(Parser, Debug, Clone, PartialEq)] +pub struct ScanOptions { + /// Set batch size for each script_history call to electrum client + #[clap(long, default_value = "25")] + pub batch_size: usize, +} + +fn main() -> anyhow::Result<()> { + let (args, keymap, mut tracker, mut db) = cli::init::()?; + + let electrum_url = match args.network { + Network::Bitcoin => "ssl://electrum.blockstream.info:50002", + Network::Testnet => "ssl://electrum.blockstream.info:60002", + Network::Regtest => "tcp://localhost:60401", + Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001", + }; + let config = electrum_client::Config::builder() + .validate_domain(match args.network { + Network::Bitcoin => true, + _ => false, + }) + .build(); + + let client = electrum_client::Client::from_config(electrum_url, config)?; + + let electrum_cmd = match args.command { + cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd, + general_command => { + return cli::handle_commands( + general_command, + |transaction| { + let _txid = client.transaction_broadcast(transaction)?; + Ok(()) + }, + &mut tracker, + &mut db, + args.network, + &keymap, + ) + } + }; + + let response = match electrum_cmd { + ElectrumCommands::Scan { + stop_gap, + scan_options: scan_option, + } => { + let (spk_iterators, local_chain) = { + // Get a short lock on the tracker to get the spks iterators + // and local chain state + let tracker = &*tracker.lock().unwrap(); + let spk_iterators = tracker + .txout_index + .spks_of_all_keychains() + .into_iter() + .map(|(keychain, iter)| { + let mut first = true; + let spk_iter = iter.inspect(move |(i, _)| { + if first { + eprint!("\nscanning {}: ", keychain); + first = false; + } + + eprint!("{} ", i); + let _ = io::stdout().flush(); + }); + (keychain, spk_iter) + }) + .collect::>(); + let local_chain = tracker.chain().checkpoints().clone(); + (spk_iterators, local_chain) + }; + + // we scan the spks **without** a lock on the tracker + client.scan( + &local_chain, + spk_iterators, + core::iter::empty(), + core::iter::empty(), + stop_gap, + scan_option.batch_size, + )? + } + ElectrumCommands::Sync { + mut unused_spks, + mut utxos, + mut unconfirmed, + all_spks, + scan_options, + } => { + // Get a short lock on the tracker to get the spks we're interested in + let tracker = tracker.lock().unwrap(); + + if !(all_spks || unused_spks || utxos || unconfirmed) { + unused_spks = true; + unconfirmed = true; + utxos = true; + } else if all_spks { + unused_spks = false; + } + + let mut spks: Box> = + Box::new(core::iter::empty()); + if all_spks { + let all_spks = tracker + .txout_index + .all_spks() + .iter() + .map(|(k, v)| (k.clone(), v.clone())) + .collect::>(); + spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { + eprintln!("scanning {:?}", index); + script + }))); + } + if unused_spks { + let unused_spks = tracker + .txout_index + .unused_spks(..) + .map(|(k, v)| (k.clone(), v.clone())) + .collect::>(); + spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { + eprintln!( + "Checking if address {} {:?} has been used", + Address::from_script(&script, args.network).unwrap(), + index + ); + + script + }))); + } + + let mut outpoints: Box> = Box::new(core::iter::empty()); + + if utxos { + let utxos = tracker + .full_utxos() + .map(|(_, utxo)| utxo) + .collect::>(); + outpoints = Box::new( + utxos + .into_iter() + .inspect(|utxo| { + eprintln!( + "Checking if outpoint {} (value: {}) has been spent", + utxo.outpoint, utxo.txout.value + ); + }) + .map(|utxo| utxo.outpoint), + ); + }; + + let mut txids: Box> = Box::new(core::iter::empty()); + + if unconfirmed { + let unconfirmed_txids = tracker + .chain() + .range_txids_by_height(TxHeight::Unconfirmed..) + .map(|(_, txid)| *txid) + .collect::>(); + + txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| { + eprintln!("Checking if {} is confirmed yet", txid); + })); + } + + let local_chain = tracker.chain().checkpoints().clone(); + // drop lock on tracker + drop(tracker); + + // we scan the spks **without** a lock on the tracker + ElectrumUpdate { + chain_update: client + .scan_without_keychain( + &local_chain, + spks, + txids, + outpoints, + scan_options.batch_size, + ) + .context("scanning the blockchain")?, + ..Default::default() + } + } + }; + + let missing_txids = response.missing_full_txs(&*tracker.lock().unwrap()); + + // fetch the missing full transactions **without** a lock on the tracker + let new_txs = client + .batch_transaction_get(missing_txids) + .context("fetching full transactions")?; + + { + // Get a final short lock to apply the changes + let mut tracker = tracker.lock().unwrap(); + let changeset = { + let scan = response.into_keychain_scan(new_txs, &*tracker)?; + tracker.determine_changeset(&scan)? + }; + db.lock().unwrap().append_changeset(&changeset)?; + tracker.apply_changeset(changeset); + }; + + Ok(()) +} diff --git a/example-crates/keychain_tracker_esplora_example/.gitignore b/example-crates/keychain_tracker_esplora_example/.gitignore new file mode 100644 index 00000000..8359723a --- /dev/null +++ b/example-crates/keychain_tracker_esplora_example/.gitignore @@ -0,0 +1,3 @@ +/target +Cargo.lock +.bdk_example_db diff --git a/example-crates/keychain_tracker_esplora_example/Cargo.toml b/example-crates/keychain_tracker_esplora_example/Cargo.toml new file mode 100644 index 00000000..57e9d9c0 --- /dev/null +++ b/example-crates/keychain_tracker_esplora_example/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "keychain_tracker_esplora_example" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../../crates/chain", version = "0.3", features = ["serde", "miniscript"] } +bdk_esplora = { path = "../../crates/esplora" } +keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli" } diff --git a/example-crates/keychain_tracker_esplora_example/src/main.rs b/example-crates/keychain_tracker_esplora_example/src/main.rs new file mode 100644 index 00000000..fba4a3b4 --- /dev/null +++ b/example-crates/keychain_tracker_esplora_example/src/main.rs @@ -0,0 +1,241 @@ +use bdk_chain::bitcoin::{Address, OutPoint, Txid}; +use bdk_chain::{bitcoin::Network, TxHeight}; +use bdk_esplora::esplora_client; +use bdk_esplora::EsploraExt; + +use std::io::{self, Write}; + +use keychain_tracker_example_cli::{ + self as cli, + anyhow::{self, Context}, + clap::{self, Parser, Subcommand}, +}; + +#[derive(Subcommand, Debug, Clone)] +enum EsploraCommands { + /// Scans the addresses in the wallet using esplora API. + Scan { + /// When a gap this large has been found for a keychain it will stop. + #[clap(long, default_value = "5")] + stop_gap: usize, + + #[clap(flatten)] + scan_options: ScanOptions, + }, + /// Scans particular addresses using esplora API + Sync { + /// Scan all the unused addresses + #[clap(long)] + unused_spks: bool, + /// Scan every address that you have derived + #[clap(long)] + all_spks: bool, + /// Scan unspent outpoints for spends or changes to confirmation status of residing tx + #[clap(long)] + utxos: bool, + /// Scan unconfirmed transactions for updates + #[clap(long)] + unconfirmed: bool, + + #[clap(flatten)] + scan_options: ScanOptions, + }, +} + +#[derive(Parser, Debug, Clone, PartialEq)] +pub struct ScanOptions { + #[clap(long, default_value = "5")] + pub parallel_requests: usize, +} + +fn main() -> anyhow::Result<()> { + let (args, keymap, keychain_tracker, db) = cli::init::()?; + let esplora_url = match args.network { + Network::Bitcoin => "https://mempool.space/api", + Network::Testnet => "https://mempool.space/testnet/api", + Network::Regtest => "http://localhost:3002", + Network::Signet => "https://mempool.space/signet/api", + }; + + let client = esplora_client::Builder::new(esplora_url).build_blocking()?; + + let esplora_cmd = match args.command { + cli::Commands::ChainSpecific(esplora_cmd) => esplora_cmd, + general_command => { + return cli::handle_commands( + general_command, + |transaction| Ok(client.broadcast(transaction)?), + &keychain_tracker, + &db, + args.network, + &keymap, + ) + } + }; + + match esplora_cmd { + EsploraCommands::Scan { + stop_gap, + scan_options, + } => { + let (spk_iterators, local_chain) = { + // Get a short lock on the tracker to get the spks iterators + // and local chain state + let tracker = &*keychain_tracker.lock().unwrap(); + let spk_iterators = tracker + .txout_index + .spks_of_all_keychains() + .into_iter() + .map(|(keychain, iter)| { + let mut first = true; + ( + keychain, + iter.inspect(move |(i, _)| { + if first { + eprint!("\nscanning {}: ", keychain); + first = false; + } + + eprint!("{} ", i); + let _ = io::stdout().flush(); + }), + ) + }) + .collect(); + + let local_chain = tracker.chain().checkpoints().clone(); + (spk_iterators, local_chain) + }; + + // we scan the iterators **without** a lock on the tracker + let wallet_scan = client + .scan( + &local_chain, + spk_iterators, + core::iter::empty(), + core::iter::empty(), + stop_gap, + scan_options.parallel_requests, + ) + .context("scanning the blockchain")?; + eprintln!(); + + { + // we take a short lock to apply results to tracker and db + let tracker = &mut *keychain_tracker.lock().unwrap(); + let db = &mut *db.lock().unwrap(); + let changeset = tracker.apply_update(wallet_scan)?; + db.append_changeset(&changeset)?; + } + } + EsploraCommands::Sync { + mut unused_spks, + mut utxos, + mut unconfirmed, + all_spks, + scan_options, + } => { + // Get a short lock on the tracker to get the spks we're interested in + let tracker = keychain_tracker.lock().unwrap(); + + if !(all_spks || unused_spks || utxos || unconfirmed) { + unused_spks = true; + unconfirmed = true; + utxos = true; + } else if all_spks { + unused_spks = false; + } + + let mut spks: Box> = + Box::new(core::iter::empty()); + if all_spks { + let all_spks = tracker + .txout_index + .all_spks() + .iter() + .map(|(k, v)| (k.clone(), v.clone())) + .collect::>(); + spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { + eprintln!("scanning {:?}", index); + script + }))); + } + if unused_spks { + let unused_spks = tracker + .txout_index + .unused_spks(..) + .map(|(k, v)| (k.clone(), v.clone())) + .collect::>(); + spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { + eprintln!( + "Checking if address {} {:?} has been used", + Address::from_script(&script, args.network).unwrap(), + index + ); + + script + }))); + } + + let mut outpoints: Box> = Box::new(core::iter::empty()); + + if utxos { + let utxos = tracker + .full_utxos() + .map(|(_, utxo)| utxo) + .collect::>(); + outpoints = Box::new( + utxos + .into_iter() + .inspect(|utxo| { + eprintln!( + "Checking if outpoint {} (value: {}) has been spent", + utxo.outpoint, utxo.txout.value + ); + }) + .map(|utxo| utxo.outpoint), + ); + }; + + let mut txids: Box> = Box::new(core::iter::empty()); + + if unconfirmed { + let unconfirmed_txids = tracker + .chain() + .range_txids_by_height(TxHeight::Unconfirmed..) + .map(|(_, txid)| *txid) + .collect::>(); + + txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| { + eprintln!("Checking if {} is confirmed yet", txid); + })); + } + + let local_chain = tracker.chain().checkpoints().clone(); + + // drop lock on tracker + drop(tracker); + + // we scan the desired spks **without** a lock on the tracker + let scan = client + .scan_without_keychain( + &local_chain, + spks, + txids, + outpoints, + scan_options.parallel_requests, + ) + .context("scanning the blockchain")?; + + { + // we take a short lock to apply the results to the tracker and db + let tracker = &mut *keychain_tracker.lock().unwrap(); + let changeset = tracker.apply_update(scan.into())?; + let db = &mut *db.lock().unwrap(); + db.append_changeset(&changeset)?; + } + } + } + + Ok(()) +} diff --git a/example-crates/keychain_tracker_example_cli/.gitignore b/example-crates/keychain_tracker_example_cli/.gitignore new file mode 100644 index 00000000..ea8c4bf7 --- /dev/null +++ b/example-crates/keychain_tracker_example_cli/.gitignore @@ -0,0 +1 @@ +/target diff --git a/example-crates/keychain_tracker_example_cli/Cargo.toml b/example-crates/keychain_tracker_example_cli/Cargo.toml new file mode 100644 index 00000000..e2565e48 --- /dev/null +++ b/example-crates/keychain_tracker_example_cli/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "keychain_tracker_example_cli" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +bdk_chain = { path = "../../crates/chain", version = "0.3", features = ["serde", "miniscript"]} +bdk_file_store = { path = "../../crates/file_store" } +bdk_tmp_plan = { path = "../../nursery/tmp_plan" } +bdk_coin_select = { path = "../../nursery/coin_select" } + +clap = { version = "4", features = ["derive", "env"] } +anyhow = "1" +serde = { version = "1", features = ["derive"] } +serde_json = { version = "^1.0" } diff --git a/example-crates/keychain_tracker_example_cli/README.md b/example-crates/keychain_tracker_example_cli/README.md new file mode 100644 index 00000000..1d9370d1 --- /dev/null +++ b/example-crates/keychain_tracker_example_cli/README.md @@ -0,0 +1 @@ +Provides common command line processing logic between examples using the `KeychainTracker` diff --git a/example-crates/keychain_tracker_example_cli/src/lib.rs b/example-crates/keychain_tracker_example_cli/src/lib.rs new file mode 100644 index 00000000..019abc8e --- /dev/null +++ b/example-crates/keychain_tracker_example_cli/src/lib.rs @@ -0,0 +1,688 @@ +pub extern crate anyhow; +use anyhow::{anyhow, Context, Result}; +use bdk_chain::{ + bitcoin::{ + secp256k1::Secp256k1, + util::sighash::{Prevouts, SighashCache}, + Address, LockTime, Network, Sequence, Transaction, TxIn, TxOut, + }, + chain_graph::InsertTxError, + keychain::{DerivationAdditions, KeychainChangeSet, KeychainTracker}, + miniscript::{ + descriptor::{DescriptorSecretKey, KeyMap}, + Descriptor, DescriptorPublicKey, + }, + sparse_chain::{self, ChainPosition}, + DescriptorExt, FullTxOut, +}; +use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue}; +use bdk_file_store::KeychainStore; +use clap::{Parser, Subcommand}; +use std::{ + cmp::Reverse, collections::HashMap, fmt::Debug, path::PathBuf, sync::Mutex, time::Duration, +}; + +pub use bdk_file_store; +pub use clap; + +#[derive(Parser)] +#[clap(author, version, about, long_about = None)] +#[clap(propagate_version = true)] +pub struct Args { + #[clap(env = "DESCRIPTOR")] + pub descriptor: String, + #[clap(env = "CHANGE_DESCRIPTOR")] + pub change_descriptor: Option, + + #[clap(env = "BITCOIN_NETWORK", long, default_value = "signet")] + pub network: Network, + + #[clap(env = "BDK_DB_PATH", long, default_value = ".bdk_example_db")] + pub db_path: PathBuf, + + #[clap(env = "BDK_CP_LIMIT", long, default_value = "20")] + pub cp_limit: usize, + + #[clap(subcommand)] + pub command: Commands, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum Commands { + #[clap(flatten)] + ChainSpecific(C), + /// Address generation and inspection + Address { + #[clap(subcommand)] + addr_cmd: AddressCmd, + }, + /// Get the wallet balance + Balance, + /// TxOut related commands + #[clap(name = "txout")] + TxOut { + #[clap(subcommand)] + txout_cmd: TxOutCmd, + }, + /// Send coins to an address + Send { + value: u64, + address: Address, + #[clap(short, default_value = "largest-first")] + coin_select: CoinSelectionAlgo, + }, +} + +#[derive(Clone, Debug)] +pub enum CoinSelectionAlgo { + LargestFirst, + SmallestFirst, + OldestFirst, + NewestFirst, + BranchAndBound, +} + +impl Default for CoinSelectionAlgo { + fn default() -> Self { + Self::LargestFirst + } +} + +impl core::str::FromStr for CoinSelectionAlgo { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + use CoinSelectionAlgo::*; + Ok(match s { + "largest-first" => LargestFirst, + "smallest-first" => SmallestFirst, + "oldest-first" => OldestFirst, + "newest-first" => NewestFirst, + "bnb" => BranchAndBound, + unknown => return Err(anyhow!("unknown coin selection algorithm '{}'", unknown)), + }) + } +} + +impl core::fmt::Display for CoinSelectionAlgo { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use CoinSelectionAlgo::*; + write!( + f, + "{}", + match self { + LargestFirst => "largest-first", + SmallestFirst => "smallest-first", + OldestFirst => "oldest-first", + NewestFirst => "newest-first", + BranchAndBound => "bnb", + } + ) + } +} + +#[derive(Subcommand, Debug, Clone)] +pub enum AddressCmd { + /// Get the next unused address + Next, + /// Get a new address regardless if the existing ones haven't been used + New, + /// List all addresses + List { + #[clap(long)] + change: bool, + }, + Index, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum TxOutCmd { + List { + /// Return only spent outputs + #[clap(short, long)] + spent: bool, + /// Return only unspent outputs + #[clap(short, long)] + unspent: bool, + /// Return only confirmed outputs + #[clap(long)] + confirmed: bool, + /// Return only unconfirmed outputs + #[clap(long)] + unconfirmed: bool, + }, +} + +#[derive( + Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, serde::Deserialize, serde::Serialize, +)] +pub enum Keychain { + External, + Internal, +} + +impl core::fmt::Display for Keychain { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Keychain::External => write!(f, "external"), + Keychain::Internal => write!(f, "internal"), + } + } +} + +/// A structure defining output of a AddressCmd execution. +#[derive(serde::Serialize, serde::Deserialize)] +pub struct AddrsOutput { + keychain: String, + index: u32, + addrs: Address, + used: bool, +} + +pub fn run_address_cmd

( + tracker: &Mutex>, + db: &Mutex>, + addr_cmd: AddressCmd, + network: Network, +) -> Result<()> +where + P: bdk_chain::sparse_chain::ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + let mut tracker = tracker.lock().unwrap(); + let txout_index = &mut tracker.txout_index; + + let addr_cmmd_output = match addr_cmd { + AddressCmd::Next => Some(txout_index.next_unused_spk(&Keychain::External)), + AddressCmd::New => Some(txout_index.reveal_next_spk(&Keychain::External)), + _ => None, + }; + + if let Some(((index, spk), additions)) = addr_cmmd_output { + let mut db = db.lock().unwrap(); + // update database since we're about to give out a new address + db.append_changeset(&additions.into())?; + + let spk = spk.clone(); + let address = + Address::from_script(&spk, network).expect("should always be able to derive address"); + eprintln!("This is the address at index {}", index); + println!("{}", address); + } + + match addr_cmd { + AddressCmd::Next | AddressCmd::New => { + /* covered */ + Ok(()) + } + AddressCmd::Index => { + for (keychain, derivation_index) in txout_index.last_revealed_indices() { + println!("{:?}: {}", keychain, derivation_index); + } + Ok(()) + } + AddressCmd::List { change } => { + let target_keychain = match change { + true => Keychain::Internal, + false => Keychain::External, + }; + for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) { + let address = Address::from_script(&spk, network) + .expect("should always be able to derive address"); + println!( + "{:?} {} used:{}", + index, + address, + txout_index.is_used(&(target_keychain, index)) + ); + } + Ok(()) + } + } +} + +pub fn run_balance_cmd(tracker: &Mutex>) { + let tracker = tracker.lock().unwrap(); + let (confirmed, unconfirmed) = + tracker + .full_utxos() + .fold((0, 0), |(confirmed, unconfirmed), (_, utxo)| { + if utxo.chain_position.height().is_confirmed() { + (confirmed + utxo.txout.value, unconfirmed) + } else { + (confirmed, unconfirmed + utxo.txout.value) + } + }); + + println!("confirmed: {}", confirmed); + println!("unconfirmed: {}", unconfirmed); +} + +pub fn run_txo_cmd( + txout_cmd: TxOutCmd, + tracker: &Mutex>, + network: Network, +) { + match txout_cmd { + TxOutCmd::List { + unspent, + spent, + confirmed, + unconfirmed, + } => { + let tracker = tracker.lock().unwrap(); + let txouts: Box)>> = match (unspent, spent) + { + (true, false) => Box::new(tracker.full_utxos()), + (false, true) => Box::new( + tracker + .full_txouts() + .filter(|(_, txout)| txout.spent_by.is_some()), + ), + _ => Box::new(tracker.full_txouts()), + }; + + let txouts: Box)>> = + match (confirmed, unconfirmed) { + (true, false) => Box::new( + txouts.filter(|(_, txout)| txout.chain_position.height().is_confirmed()), + ), + (false, true) => Box::new( + txouts.filter(|(_, txout)| !txout.chain_position.height().is_confirmed()), + ), + _ => txouts, + }; + + for (spk_index, full_txout) in txouts { + let address = + Address::from_script(&full_txout.txout.script_pubkey, network).unwrap(); + + println!( + "{:?} {} {} {} spent:{:?}", + spk_index, + full_txout.txout.value, + full_txout.outpoint, + address, + full_txout.spent_by + ) + } + } + } +} + +pub fn create_tx( + value: u64, + address: Address, + coin_select: CoinSelectionAlgo, + keychain_tracker: &mut KeychainTracker, + keymap: &HashMap, +) -> Result<( + Transaction, + Option<(DerivationAdditions, (Keychain, u32))>, +)> { + let mut additions = DerivationAdditions::default(); + + let assets = bdk_tmp_plan::Assets { + keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(), + ..Default::default() + }; + + // TODO use planning module + let mut candidates = planned_utxos(keychain_tracker, &assets).collect::>(); + + // apply coin selection algorithm + match coin_select { + CoinSelectionAlgo::LargestFirst => { + candidates.sort_by_key(|(_, utxo)| Reverse(utxo.txout.value)) + } + CoinSelectionAlgo::SmallestFirst => candidates.sort_by_key(|(_, utxo)| utxo.txout.value), + CoinSelectionAlgo::OldestFirst => { + candidates.sort_by_key(|(_, utxo)| utxo.chain_position.clone()) + } + CoinSelectionAlgo::NewestFirst => { + candidates.sort_by_key(|(_, utxo)| Reverse(utxo.chain_position.clone())) + } + CoinSelectionAlgo::BranchAndBound => {} + } + + // turn the txos we chose into a weight and value + let wv_candidates = candidates + .iter() + .map(|(plan, utxo)| { + WeightedValue::new( + utxo.txout.value, + plan.expected_weight() as _, + plan.witness_version().is_some(), + ) + }) + .collect(); + + let mut outputs = vec![TxOut { + value, + script_pubkey: address.script_pubkey(), + }]; + + let internal_keychain = if keychain_tracker + .txout_index + .keychains() + .get(&Keychain::Internal) + .is_some() + { + Keychain::Internal + } else { + Keychain::External + }; + + let ((change_index, change_script), change_additions) = keychain_tracker + .txout_index + .next_unused_spk(&internal_keychain); + additions.append(change_additions); + + // Clone to drop the immutable reference. + let change_script = change_script.clone(); + + let change_plan = bdk_tmp_plan::plan_satisfaction( + &keychain_tracker + .txout_index + .keychains() + .get(&internal_keychain) + .expect("must exist") + .at_derivation_index(change_index), + &assets, + ) + .expect("failed to obtain change plan"); + + let mut change_output = TxOut { + value: 0, + script_pubkey: change_script, + }; + + let cs_opts = CoinSelectorOpt { + target_feerate: 0.5, + min_drain_value: keychain_tracker + .txout_index + .keychains() + .get(&internal_keychain) + .expect("must exist") + .dust_value(), + ..CoinSelectorOpt::fund_outputs( + &outputs, + &change_output, + change_plan.expected_weight() as u32, + ) + }; + + // TODO: How can we make it easy to shuffle in order of inputs and outputs here? + // apply coin selection by saying we need to fund these outputs + let mut coin_selector = CoinSelector::new(&wv_candidates, &cs_opts); + + // just select coins in the order provided until we have enough + // only use first result (least waste) + let selection = match coin_select { + CoinSelectionAlgo::BranchAndBound => { + coin_select_bnb(Duration::from_secs(10), coin_selector.clone()) + .map_or_else(|| coin_selector.select_until_finished(), |cs| cs.finish())? + } + _ => coin_selector.select_until_finished()?, + }; + let (_, selection_meta) = selection.best_strategy(); + + // get the selected utxos + let selected_txos = selection.apply_selection(&candidates).collect::>(); + + if let Some(drain_value) = selection_meta.drain_value { + change_output.value = drain_value; + // if the selection tells us to use change and the change value is sufficient we add it as an output + outputs.push(change_output) + } + + let mut transaction = Transaction { + version: 0x02, + lock_time: keychain_tracker + .chain() + .latest_checkpoint() + .and_then(|block_id| LockTime::from_height(block_id.height).ok()) + .unwrap_or(LockTime::ZERO) + .into(), + input: selected_txos + .iter() + .map(|(_, utxo)| TxIn { + previous_output: utxo.outpoint, + sequence: Sequence::ENABLE_RBF_NO_LOCKTIME, + ..Default::default() + }) + .collect(), + output: outputs, + }; + + let prevouts = selected_txos + .iter() + .map(|(_, utxo)| utxo.txout.clone()) + .collect::>(); + let sighash_prevouts = Prevouts::All(&prevouts); + + // first set tx values for plan so that we don't change them while signing + for (i, (plan, _)) in selected_txos.iter().enumerate() { + if let Some(sequence) = plan.required_sequence() { + transaction.input[i].sequence = sequence + } + } + + // create a short lived transaction + let _sighash_tx = transaction.clone(); + let mut sighash_cache = SighashCache::new(&_sighash_tx); + + for (i, (plan, _)) in selected_txos.iter().enumerate() { + let requirements = plan.requirements(); + let mut auth_data = bdk_tmp_plan::SatisfactionMaterial::default(); + assert!( + !requirements.requires_hash_preimages(), + "can't have hash pre-images since we didn't provide any" + ); + assert!( + requirements.signatures.sign_with_keymap( + i, + &keymap, + &sighash_prevouts, + None, + None, + &mut sighash_cache, + &mut auth_data, + &Secp256k1::default(), + )?, + "we should have signed with this input" + ); + + match plan.try_complete(&auth_data) { + bdk_tmp_plan::PlanState::Complete { + final_script_sig, + final_script_witness, + } => { + if let Some(witness) = final_script_witness { + transaction.input[i].witness = witness; + } + + if let Some(script_sig) = final_script_sig { + transaction.input[i].script_sig = script_sig; + } + } + bdk_tmp_plan::PlanState::Incomplete(_) => { + return Err(anyhow!( + "we weren't able to complete the plan with our keys" + )); + } + } + } + + let change_info = if selection_meta.drain_value.is_some() { + Some((additions, (internal_keychain, change_index))) + } else { + None + }; + + Ok((transaction, change_info)) +} + +pub fn handle_commands( + command: Commands, + broadcast: impl FnOnce(&Transaction) -> Result<()>, + // we Mutexes around these not because we need them for a simple CLI app but to demonsrate how + // all the stuff we're doing can be thread safe and also not keep locks up over an IO bound. + tracker: &Mutex>, + store: &Mutex>, + network: Network, + keymap: &HashMap, +) -> Result<()> +where + P: ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + match command { + // TODO: Make these functions return stuffs + Commands::Address { addr_cmd } => run_address_cmd(&tracker, &store, addr_cmd, network), + Commands::Balance => { + run_balance_cmd(&tracker); + Ok(()) + } + Commands::TxOut { txout_cmd } => { + run_txo_cmd(txout_cmd, &tracker, network); + Ok(()) + } + Commands::Send { + value, + address, + coin_select, + } => { + let (transaction, change_index) = { + // take mutable ref to construct tx -- it is only open for a short time while building it. + let tracker = &mut *tracker.lock().unwrap(); + let (transaction, change_info) = + create_tx(value, address, coin_select, tracker, &keymap)?; + + if let Some((change_derivation_changes, (change_keychain, index))) = change_info { + // We must first persist to disk the fact that we've got a new address from the + // change keychain so future scans will find the tx we're about to broadcast. + // If we're unable to persist this then we don't want to broadcast. + let store = &mut *store.lock().unwrap(); + store.append_changeset(&change_derivation_changes.into())?; + + // We don't want other callers/threads to use this address while we're using it + // but we also don't want to scan the tx we just created because it's not + // technically in the blockchain yet. + tracker.txout_index.mark_used(&change_keychain, index); + (transaction, Some((change_keychain, index))) + } else { + (transaction, None) + } + }; + + match (broadcast)(&transaction) { + Ok(_) => { + println!("Broadcasted Tx : {}", transaction.txid()); + let mut tracker = tracker.lock().unwrap(); + match tracker.insert_tx(transaction.clone(), P::unconfirmed()) { + Ok(changeset) => { + let store = &mut *store.lock().unwrap(); + // We know the tx is at least unconfirmed now. Note if persisting here + // fails it's not a big deal since we can always find it again form + // blockchain. + store.append_changeset(&changeset)?; + Ok(()) + } + Err(e) => match e { + InsertTxError::Chain(e) => match e { + // TODO: add insert_unconfirmed_tx to chain graph and sparse chain + sparse_chain::InsertTxError::TxTooHigh { .. } => unreachable!("we are inserting at unconfirmed position"), + sparse_chain::InsertTxError::TxMovedUnexpectedly { txid, original_pos, ..} => Err(anyhow!("the tx we created {} has already been confirmed at block {:?}", txid, original_pos)), + }, + InsertTxError::UnresolvableConflict(e) => Err(e).context("another tx that conflicts with the one we tried to create has been confirmed"), + } + } + } + Err(e) => { + let tracker = &mut *tracker.lock().unwrap(); + if let Some((keychain, index)) = change_index { + // We failed to broadcast so allow our change address to be used in the future + tracker.txout_index.unmark_used(&keychain, index); + } + Err(e.into()) + } + } + } + Commands::ChainSpecific(_) => { + todo!("example code is meant to handle this!") + } + } +} + +pub fn init() -> anyhow::Result<( + Args, + KeyMap, + // These don't need to have mutexes around them but we want the cli example code to make it obvious how they + // are thread safe so this forces the example developer to show where they would lock and unlock things. + Mutex>, + Mutex>, +)> +where + P: sparse_chain::ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + let args = Args::::parse(); + let secp = Secp256k1::default(); + let (descriptor, mut keymap) = + Descriptor::::parse_descriptor(&secp, &args.descriptor)?; + + let mut tracker = KeychainTracker::default(); + tracker.set_checkpoint_limit(Some(args.cp_limit)); + + tracker + .txout_index + .add_keychain(Keychain::External, descriptor); + + let internal = args + .change_descriptor + .clone() + .map(|descriptor| Descriptor::::parse_descriptor(&secp, &descriptor)) + .transpose()?; + if let Some((internal_descriptor, internal_keymap)) = internal { + keymap.extend(internal_keymap); + tracker + .txout_index + .add_keychain(Keychain::Internal, internal_descriptor); + }; + + let mut db = KeychainStore::::new_from_path(args.db_path.as_path())?; + + if let Err(e) = db.load_into_keychain_tracker(&mut tracker) { + match tracker.chain().latest_checkpoint() { + Some(checkpoint) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), checkpoint.height, e), + None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e), + + } + eprintln!("âš  Consider running a rescan of chain data."); + } + + Ok((args, keymap, Mutex::new(tracker), Mutex::new(db))) +} + +pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, P: ChainPosition>( + tracker: &'a KeychainTracker, + assets: &'a bdk_tmp_plan::Assets, +) -> impl Iterator, FullTxOut

)> + 'a { + tracker + .full_utxos() + .filter_map(|((keychain, derivation_index), full_txout)| { + Some(( + bdk_tmp_plan::plan_satisfaction( + &tracker + .txout_index + .keychains() + .get(keychain) + .expect("must exist since we have a utxo for it") + .at_derivation_index(*derivation_index), + assets, + )?, + full_txout, + )) + }) +} diff --git a/nursery/README.md b/nursery/README.md new file mode 100644 index 00000000..e136a20d --- /dev/null +++ b/nursery/README.md @@ -0,0 +1,5 @@ +# Bitcoin Dev Kit Nursery + +This is a directory for crates that are experimental and have not been released yet. +Keep in mind that they may never be released. +Things in `/example-crates` may use them to demonsrate how things might look in the future. diff --git a/nursery/coin_select/Cargo.toml b/nursery/coin_select/Cargo.toml new file mode 100644 index 00000000..b1350752 --- /dev/null +++ b/nursery/coin_select/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "bdk_coin_select" +version = "0.0.1" +authors = [ "LLFourn " ] + +[dependencies] +bdk_chain = { version = "0.3", path = "../../crates/chain" } + +[features] +default = ["std"] +std = [] diff --git a/nursery/coin_select/src/bnb.rs b/nursery/coin_select/src/bnb.rs new file mode 100644 index 00000000..f9a956c3 --- /dev/null +++ b/nursery/coin_select/src/bnb.rs @@ -0,0 +1,651 @@ +use super::*; + +/// Strategy in which we should branch. +pub enum BranchStrategy { + /// We continue exploring subtrees of this node, starting with the inclusion branch. + Continue, + /// We continue exploring ONY the omission branch of this node, skipping the inclusion branch. + SkipInclusion, + /// We skip both the inclusion and omission branches of this node. + SkipBoth, +} + +impl BranchStrategy { + pub fn will_continue(&self) -> bool { + match self { + Self::Continue | Self::SkipInclusion => true, + _ => false, + } + } +} + +/// Closure to decide the branching strategy, alongside a score (if the current selection is a +/// candidate solution). +pub type DecideStrategy<'c, S> = dyn Fn(&Bnb<'c, S>) -> (BranchStrategy, Option); + +/// [`Bnb`] represents the current state of the BnB algorithm. +pub struct Bnb<'c, S> { + pub pool: Vec<(usize, &'c WeightedValue)>, + pub pool_pos: usize, + pub best_score: S, + + pub selection: CoinSelector<'c>, + pub rem_abs: u64, + pub rem_eff: i64, +} + +impl<'c, S: Ord> Bnb<'c, S> { + /// Creates a new [`Bnb`]. + pub fn new(selector: CoinSelector<'c>, pool: Vec<(usize, &'c WeightedValue)>, max: S) -> Self { + let (rem_abs, rem_eff) = pool.iter().fold((0, 0), |(abs, eff), (_, c)| { + ( + abs + c.value, + eff + c.effective_value(selector.opts.target_feerate), + ) + }); + + Self { + pool, + pool_pos: 0, + best_score: max, + selection: selector, + rem_abs, + rem_eff, + } + } + + /// Turns our [`Bnb`] state into an iterator. + /// + /// `strategy` should assess our current selection/node and determine the branching strategy and + /// whether this selection is a candidate solution (if so, return the score of the selection). + pub fn into_iter<'f>(self, strategy: &'f DecideStrategy<'c, S>) -> BnbIter<'c, 'f, S> { + BnbIter { + state: self, + done: false, + strategy, + } + } + + /// Attempt to backtrack to the previously selected node's omission branch, return false + /// otherwise (no more solutions). + pub fn backtrack(&mut self) -> bool { + (0..self.pool_pos) + .rev() + .find(|&pos| { + let (index, candidate) = self.pool[pos]; + + if self.selection.is_selected(index) { + // deselect last `pos`, so next round will check omission branch + self.pool_pos = pos; + self.selection.deselect(index); + return true; + } else { + self.rem_abs += candidate.value; + self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate); + return false; + } + }) + .is_some() + } + + /// Continue down this branch, skip inclusion branch if specified. + pub fn forward(&mut self, skip: bool) { + let (index, candidate) = self.pool[self.pool_pos]; + self.rem_abs -= candidate.value; + self.rem_eff -= candidate.effective_value(self.selection.opts.target_feerate); + + if !skip { + self.selection.select(index); + } + } + + /// Compare advertised score with current best. New best will be the smaller value. Return true + /// if best is replaced. + pub fn advertise_new_score(&mut self, score: S) -> bool { + if score <= self.best_score { + self.best_score = score; + return true; + } + return false; + } +} + +pub struct BnbIter<'c, 'f, S> { + state: Bnb<'c, S>, + done: bool, + + /// Check our current selection (node), and returns the branching strategy, alongside a score + /// (if the current selection is a candidate solution). + strategy: &'f DecideStrategy<'c, S>, +} + +impl<'c, 'f, S: Ord + Copy + Display> Iterator for BnbIter<'c, 'f, S> { + type Item = Option>; + + fn next(&mut self) -> Option { + if self.done { + return None; + } + + let (strategy, score) = (self.strategy)(&self.state); + + let mut found_best = Option::::None; + + if let Some(score) = score { + if self.state.advertise_new_score(score) { + found_best = Some(self.state.selection.clone()); + } + } + + debug_assert!( + !strategy.will_continue() || self.state.pool_pos < self.state.pool.len(), + "Faulty strategy implementation! Strategy suggested that we continue traversing, however we have already reached the end of the candidates pool! pool_len={}, pool_pos={}", + self.state.pool.len(), self.state.pool_pos, + ); + + match strategy { + BranchStrategy::Continue => { + self.state.forward(false); + } + BranchStrategy::SkipInclusion => { + self.state.forward(true); + } + BranchStrategy::SkipBoth => { + if !self.state.backtrack() { + self.done = true; + } + } + }; + + // increment selection pool position for next round + self.state.pool_pos += 1; + + if found_best.is_some() || !self.done { + Some(found_best) + } else { + // we have traversed all branches + None + } + } +} + +/// Determines how we should limit rounds of branch and bound. +pub enum BnbLimit { + Rounds(usize), + #[cfg(feature = "std")] + Duration(core::time::Duration), +} + +impl From for BnbLimit { + fn from(v: usize) -> Self { + Self::Rounds(v) + } +} + +#[cfg(feature = "std")] +impl From for BnbLimit { + fn from(v: core::time::Duration) -> Self { + Self::Duration(v) + } +} + +/// This is a variation of the Branch and Bound Coin Selection algorithm designed by Murch (as seen +/// in Bitcoin Core). +/// +/// The differences are as follows: +/// * In additional to working with effective values, we also work with absolute values. +/// This way, we can use bounds of absolute values to enforce `min_absolute_fee` (which is used by +/// RBF), and `max_extra_target` (which can be used to increase the possible solution set, given +/// that the sender is okay with sending extra to the receiver). +/// +/// Murch's Master Thesis: +/// Bitcoin Core Implementation: +/// +/// TODO: Another optimization we could do is figure out candidate with smallest waste, and +/// if we find a result with waste equal to this, we can just break. +pub fn coin_select_bnb(limit: L, selector: CoinSelector) -> Option +where + L: Into, +{ + let opts = selector.opts; + + // prepare pool of candidates to select from: + // * filter out candidates with negative/zero effective values + // * sort candidates by descending effective value + let pool = { + let mut pool = selector + .unselected() + .filter(|(_, c)| c.effective_value(opts.target_feerate) > 0) + .collect::>(); + pool.sort_unstable_by(|(_, a), (_, b)| { + let a = a.effective_value(opts.target_feerate); + let b = b.effective_value(opts.target_feerate); + b.cmp(&a) + }); + pool + }; + + let feerate_decreases = opts.target_feerate > opts.long_term_feerate(); + + let target_abs = opts.target_value.unwrap_or(0) + opts.min_absolute_fee; + let target_eff = selector.effective_target(); + + let upper_bound_abs = target_abs + (opts.drain_weight as f32 * opts.target_feerate) as u64; + let upper_bound_eff = target_eff + opts.drain_waste(); + + let strategy = move |bnb: &Bnb| -> (BranchStrategy, Option) { + let selected_abs = bnb.selection.selected_absolute_value(); + let selected_eff = bnb.selection.selected_effective_value(); + + // backtrack if remaining value is not enough to reach target + if selected_abs + bnb.rem_abs < target_abs || selected_eff + bnb.rem_eff < target_eff { + return (BranchStrategy::SkipBoth, None); + } + + // backtrack if selected value already surpassed upper bounds + if selected_abs > upper_bound_abs && selected_eff > upper_bound_eff { + return (BranchStrategy::SkipBoth, None); + } + + let selected_waste = bnb.selection.selected_waste(); + + // when feerate decreases, waste without excess is guaranteed to increase with each + // selection. So if we have already surpassed best score, we can backtrack. + if feerate_decreases && selected_waste > bnb.best_score { + return (BranchStrategy::SkipBoth, None); + } + + // solution? + if selected_abs >= target_abs && selected_eff >= target_eff { + let waste = selected_waste + bnb.selection.current_excess(); + return (BranchStrategy::SkipBoth, Some(waste)); + } + + // early bailout optimization: + // If the candidate at the previous position is NOT selected and has the same weight and + // value as the current candidate, we can skip selecting the current candidate. + if bnb.pool_pos > 0 && !bnb.selection.is_empty() { + let (_, candidate) = bnb.pool[bnb.pool_pos]; + let (prev_index, prev_candidate) = bnb.pool[bnb.pool_pos - 1]; + + if !bnb.selection.is_selected(prev_index) + && candidate.value == prev_candidate.value + && candidate.weight == prev_candidate.weight + { + return (BranchStrategy::SkipInclusion, None); + } + } + + // check out inclusion branch first + return (BranchStrategy::Continue, None); + }; + + // determine sum of absolute and effective values for current selection + let (selected_abs, selected_eff) = selector.selected().fold((0, 0), |(abs, eff), (_, c)| { + ( + abs + c.value, + eff + c.effective_value(selector.opts.target_feerate), + ) + }); + + let bnb = Bnb::new(selector, pool, i64::MAX); + + // not enough to select anyway + if selected_abs + bnb.rem_abs < target_abs || selected_eff + bnb.rem_eff < target_eff { + return None; + } + + match limit.into() { + BnbLimit::Rounds(rounds) => { + bnb.into_iter(&strategy) + .take(rounds) + .reduce(|b, c| if c.is_some() { c } else { b }) + } + #[cfg(feature = "std")] + BnbLimit::Duration(duration) => { + let start = std::time::SystemTime::now(); + bnb.into_iter(&strategy) + .take_while(|_| start.elapsed().expect("failed to get system time") <= duration) + .reduce(|b, c| if c.is_some() { c } else { b }) + } + }? +} + +#[cfg(all(test, feature = "miniscript"))] +mod test { + use bitcoin::secp256k1::Secp256k1; + + use crate::coin_select::{evaluate_cs::evaluate, ExcessStrategyKind}; + + use super::{ + coin_select_bnb, + evaluate_cs::{Evaluation, EvaluationError}, + tester::Tester, + CoinSelector, CoinSelectorOpt, Vec, WeightedValue, + }; + + fn tester() -> Tester { + const DESC_STR: &str = "tr(xprv9uBuvtdjghkz8D1qzsSXS9Vs64mqrUnXqzNccj2xcvnCHPpXKYE1U2Gbh9CDHk8UPyF2VuXpVkDA7fk5ZP4Hd9KnhUmTscKmhee9Dp5sBMK)"; + Tester::new(&Secp256k1::default(), DESC_STR) + } + + fn evaluate_bnb( + initial_selector: CoinSelector, + max_tries: usize, + ) -> Result { + evaluate(initial_selector, |cs| { + coin_select_bnb(max_tries, cs.clone()).map_or(false, |new_cs| { + *cs = new_cs; + true + }) + }) + } + + #[test] + fn not_enough_coins() { + let t = tester(); + let candidates: Vec = vec![ + t.gen_candidate(0, 100_000).into(), + t.gen_candidate(1, 100_000).into(), + ]; + let opts = t.gen_opts(200_000); + let selector = CoinSelector::new(&candidates, &opts); + assert!(!coin_select_bnb(10_000, selector).is_some()); + } + + #[test] + fn exactly_enough_coins_preselected() { + let t = tester(); + let candidates: Vec = vec![ + t.gen_candidate(0, 100_000).into(), // to preselect + t.gen_candidate(1, 100_000).into(), // to preselect + t.gen_candidate(2, 100_000).into(), + ]; + let opts = CoinSelectorOpt { + target_feerate: 0.0, + ..t.gen_opts(200_000) + }; + let selector = { + let mut selector = CoinSelector::new(&candidates, &opts); + selector.select(0); // preselect + selector.select(1); // preselect + selector + }; + + let evaluation = evaluate_bnb(selector, 10_000).expect("eval failed"); + println!("{}", evaluation); + assert_eq!(evaluation.solution.selected, (0..=1).collect()); + assert_eq!(evaluation.solution.excess_strategies.len(), 1); + assert_eq!( + evaluation.feerate_offset(ExcessStrategyKind::ToFee).floor(), + 0.0 + ); + } + + /// `cost_of_change` acts as the upper-bound in Bnb, we check whether these boundaries are + /// enforced in code + #[test] + fn cost_of_change() { + let t = tester(); + let candidates: Vec = vec![ + t.gen_candidate(0, 200_000).into(), + t.gen_candidate(1, 200_000).into(), + t.gen_candidate(2, 200_000).into(), + ]; + + // lowest and highest possible `recipient_value` opts for derived `drain_waste`, assuming + // that we want 2 candidates selected + let (lowest_opts, highest_opts) = { + let opts = t.gen_opts(0); + + let fee_from_inputs = + (candidates[0].weight as f32 * opts.target_feerate).ceil() as u64 * 2; + let fee_from_template = + ((opts.base_weight + 2) as f32 * opts.target_feerate).ceil() as u64; + + let lowest_opts = CoinSelectorOpt { + target_value: Some( + 400_000 - fee_from_inputs - fee_from_template - opts.drain_waste() as u64, + ), + ..opts + }; + + let highest_opts = CoinSelectorOpt { + target_value: Some(400_000 - fee_from_inputs - fee_from_template), + ..opts + }; + + (lowest_opts, highest_opts) + }; + + // test lowest possible target we are able to select + let lowest_eval = evaluate_bnb(CoinSelector::new(&candidates, &lowest_opts), 10_000); + assert!(lowest_eval.is_ok()); + let lowest_eval = lowest_eval.unwrap(); + println!("LB {}", lowest_eval); + assert_eq!(lowest_eval.solution.selected.len(), 2); + assert_eq!(lowest_eval.solution.excess_strategies.len(), 1); + assert_eq!( + lowest_eval + .feerate_offset(ExcessStrategyKind::ToFee) + .floor(), + 0.0 + ); + + // test highest possible target we are able to select + let highest_eval = evaluate_bnb(CoinSelector::new(&candidates, &highest_opts), 10_000); + assert!(highest_eval.is_ok()); + let highest_eval = highest_eval.unwrap(); + println!("UB {}", highest_eval); + assert_eq!(highest_eval.solution.selected.len(), 2); + assert_eq!(highest_eval.solution.excess_strategies.len(), 1); + assert_eq!( + highest_eval + .feerate_offset(ExcessStrategyKind::ToFee) + .floor(), + 0.0 + ); + + // test lower out of bounds + let loob_opts = CoinSelectorOpt { + target_value: lowest_opts.target_value.map(|v| v - 1), + ..lowest_opts + }; + let loob_eval = evaluate_bnb(CoinSelector::new(&candidates, &loob_opts), 10_000); + assert!(loob_eval.is_err()); + println!("Lower OOB: {}", loob_eval.unwrap_err()); + + // test upper out of bounds + let uoob_opts = CoinSelectorOpt { + target_value: highest_opts.target_value.map(|v| v + 1), + ..highest_opts + }; + let uoob_eval = evaluate_bnb(CoinSelector::new(&candidates, &uoob_opts), 10_000); + assert!(uoob_eval.is_err()); + println!("Upper OOB: {}", uoob_eval.unwrap_err()); + } + + #[test] + fn try_select() { + let t = tester(); + let candidates: Vec = vec![ + t.gen_candidate(0, 300_000).into(), + t.gen_candidate(1, 300_000).into(), + t.gen_candidate(2, 300_000).into(), + t.gen_candidate(3, 200_000).into(), + t.gen_candidate(4, 200_000).into(), + ]; + let make_opts = |v: u64| -> CoinSelectorOpt { + CoinSelectorOpt { + target_feerate: 0.0, + ..t.gen_opts(v) + } + }; + + let test_cases = vec![ + (make_opts(100_000), false, 0), + (make_opts(200_000), true, 1), + (make_opts(300_000), true, 1), + (make_opts(500_000), true, 2), + (make_opts(1_000_000), true, 4), + (make_opts(1_200_000), false, 0), + (make_opts(1_300_000), true, 5), + (make_opts(1_400_000), false, 0), + ]; + + for (opts, expect_solution, expect_selected) in test_cases { + let res = evaluate_bnb(CoinSelector::new(&candidates, &opts), 10_000); + assert_eq!(res.is_ok(), expect_solution); + + match res { + Ok(eval) => { + println!("{}", eval); + assert_eq!(eval.feerate_offset(ExcessStrategyKind::ToFee), 0.0); + assert_eq!(eval.solution.selected.len(), expect_selected as _); + } + Err(err) => println!("expected failure: {}", err), + } + } + } + + #[test] + fn early_bailout_optimization() { + let t = tester(); + + // target: 300_000 + // candidates: 2x of 125_000, 1000x of 100_000, 1x of 50_000 + // expected solution: 2x 125_000, 1x 50_000 + // set bnb max tries: 1100, should succeed + let candidates = { + let mut candidates: Vec = vec![ + t.gen_candidate(0, 125_000).into(), + t.gen_candidate(1, 125_000).into(), + t.gen_candidate(2, 50_000).into(), + ]; + (3..3 + 1000_u32) + .for_each(|index| candidates.push(t.gen_candidate(index, 100_000).into())); + candidates + }; + let opts = CoinSelectorOpt { + target_feerate: 0.0, + ..t.gen_opts(300_000) + }; + + let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 1100); + assert!(result.is_ok()); + + let eval = result.unwrap(); + println!("{}", eval); + assert_eq!(eval.solution.selected, (0..=2).collect()); + } + + #[test] + fn should_exhaust_iteration() { + static MAX_TRIES: usize = 1000; + let t = tester(); + let candidates = (0..MAX_TRIES + 1) + .map(|index| t.gen_candidate(index as _, 10_000).into()) + .collect::>(); + let opts = t.gen_opts(10_001 * MAX_TRIES as u64); + let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), MAX_TRIES); + assert!(result.is_err()); + println!("error as expected: {}", result.unwrap_err()); + } + + /// Solution should have fee >= min_absolute_fee (or no solution at all) + #[test] + fn min_absolute_fee() { + let t = tester(); + let candidates = { + let mut candidates = Vec::new(); + t.gen_weighted_values(&mut candidates, 5, 10_000); + t.gen_weighted_values(&mut candidates, 5, 20_000); + t.gen_weighted_values(&mut candidates, 5, 30_000); + t.gen_weighted_values(&mut candidates, 10, 10_300); + t.gen_weighted_values(&mut candidates, 10, 10_500); + t.gen_weighted_values(&mut candidates, 10, 10_700); + t.gen_weighted_values(&mut candidates, 10, 10_900); + t.gen_weighted_values(&mut candidates, 10, 11_000); + t.gen_weighted_values(&mut candidates, 10, 12_000); + t.gen_weighted_values(&mut candidates, 10, 13_000); + candidates + }; + let mut opts = CoinSelectorOpt { + min_absolute_fee: 1, + ..t.gen_opts(100_000) + }; + + (1..=120_u64).for_each(|fee_factor| { + opts.min_absolute_fee = fee_factor * 31; + + let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 21_000); + match result { + Ok(result) => { + println!("Solution {}", result); + let fee = result.solution.excess_strategies[&ExcessStrategyKind::ToFee].fee; + assert!(fee >= opts.min_absolute_fee); + assert_eq!(result.solution.excess_strategies.len(), 1); + } + Err(err) => { + println!("No Solution: {}", err); + } + } + }); + } + + /// For a decreasing feerate (longterm feerate is lower than effective feerate), we should + /// select less. For increasing feerate (longterm feerate is higher than effective feerate), we + /// should select more. + #[test] + fn feerate_difference() { + let t = tester(); + let candidates = { + let mut candidates = Vec::new(); + t.gen_weighted_values(&mut candidates, 10, 2_000); + t.gen_weighted_values(&mut candidates, 10, 5_000); + t.gen_weighted_values(&mut candidates, 10, 20_000); + candidates + }; + + let decreasing_feerate_opts = CoinSelectorOpt { + target_feerate: 1.25, + long_term_feerate: Some(0.25), + ..t.gen_opts(100_000) + }; + + let increasing_feerate_opts = CoinSelectorOpt { + target_feerate: 0.25, + long_term_feerate: Some(1.25), + ..t.gen_opts(100_000) + }; + + let decreasing_res = evaluate_bnb( + CoinSelector::new(&candidates, &decreasing_feerate_opts), + 21_000, + ) + .expect("no result"); + let decreasing_len = decreasing_res.solution.selected.len(); + + let increasing_res = evaluate_bnb( + CoinSelector::new(&candidates, &increasing_feerate_opts), + 21_000, + ) + .expect("no result"); + let increasing_len = increasing_res.solution.selected.len(); + + println!("decreasing_len: {}", decreasing_len); + println!("increasing_len: {}", increasing_len); + assert!(decreasing_len < increasing_len); + } + + /// TODO: UNIMPLEMENTED TESTS: + /// * Excess strategies: + /// * We should always have `ExcessStrategy::ToFee`. + /// * We should only have `ExcessStrategy::ToRecipient` when `max_extra_target > 0`. + /// * We should only have `ExcessStrategy::ToDrain` when `drain_value >= min_drain_value`. + /// * Fuzz + /// * Solution feerate should never be lower than target feerate + /// * Solution fee should never be lower than `min_absolute_fee` + /// * Preselected should always remain selected + fn _todo() {} +} diff --git a/nursery/coin_select/src/coin_selector.rs b/nursery/coin_select/src/coin_selector.rs new file mode 100644 index 00000000..b6bde0dc --- /dev/null +++ b/nursery/coin_select/src/coin_selector.rs @@ -0,0 +1,617 @@ +use super::*; + +/// A [`WeightedValue`] represents an input candidate for [`CoinSelector`]. This can either be a +/// single UTXO, or a group of UTXOs that should be spent together. +#[derive(Debug, Clone, Copy)] +pub struct WeightedValue { + /// Total value of the UTXO(s) that this [`WeightedValue`] represents. + pub value: u64, + /// Total weight of including this/these UTXO(s). + /// `txin` fields: `prevout`, `nSequence`, `scriptSigLen`, `scriptSig`, `scriptWitnessLen`, + /// `scriptWitness` should all be included. + pub weight: u32, + /// Total number of inputs; so we can calculate extra `varint` weight due to `vin` len changes. + pub input_count: usize, + /// Whether this [`WeightedValue`] contains at least one segwit spend. + pub is_segwit: bool, +} + +impl WeightedValue { + /// Create a new [`WeightedValue`] that represents a single input. + /// + /// `satisfaction_weight` is the weight of `scriptSigLen + scriptSig + scriptWitnessLen + + /// scriptWitness`. + pub fn new(value: u64, satisfaction_weight: u32, is_segwit: bool) -> WeightedValue { + let weight = TXIN_BASE_WEIGHT + satisfaction_weight; + WeightedValue { + value, + weight, + input_count: 1, + is_segwit, + } + } + + /// Effective value of this input candidate: `actual_value - input_weight * feerate (sats/wu)`. + pub fn effective_value(&self, effective_feerate: f32) -> i64 { + // We prefer undershooting the candidate's effective value (so we over estimate the fee of a + // candidate). If we overshoot the candidate's effective value, it may be possible to find a + // solution which does not meet the target feerate. + self.value as i64 - (self.weight as f32 * effective_feerate).ceil() as i64 + } +} + +#[derive(Debug, Clone, Copy)] +pub struct CoinSelectorOpt { + /// The value we need to select. + /// If the value is `None` then the selection will be complete if it can pay for the drain + /// output and satisfy the other constraints (e.g. minimum fees). + pub target_value: Option, + /// Additional leeway for the target value. + pub max_extra_target: u64, // TODO: Maybe out of scope here? + + /// The feerate we should try and achieve in sats per weight unit. + pub target_feerate: f32, + /// The feerate + pub long_term_feerate: Option, // TODO: Maybe out of scope? (waste) + /// The minimum absolute fee. I.e. needed for RBF. + pub min_absolute_fee: u64, + + /// The weight of the template transaction including fixed fields and outputs. + pub base_weight: u32, + /// Additional weight if we include the drain (change) output. + pub drain_weight: u32, + /// Weight of spending the drain (change) output in the future. + pub spend_drain_weight: u32, // TODO: Maybe out of scope? (waste) + + /// Minimum value allowed for a drain (change) output. + pub min_drain_value: u64, +} + +impl CoinSelectorOpt { + fn from_weights(base_weight: u32, drain_weight: u32, spend_drain_weight: u32) -> Self { + // 0.25 sats/wu == 1 sat/vb + let target_feerate = 0.25_f32; + + // set `min_drain_value` to dust limit + let min_drain_value = + 3 * ((drain_weight + spend_drain_weight) as f32 * target_feerate) as u64; + + Self { + target_value: None, + max_extra_target: 0, + target_feerate, + long_term_feerate: None, + min_absolute_fee: 0, + base_weight, + drain_weight, + spend_drain_weight, + min_drain_value, + } + } + + pub fn fund_outputs( + txouts: &[TxOut], + drain_output: &TxOut, + drain_satisfaction_weight: u32, + ) -> Self { + let mut tx = Transaction { + input: vec![], + version: 1, + lock_time: LockTime::ZERO.into(), + output: txouts.to_vec(), + }; + let base_weight = tx.weight(); + // this awkward calculation is necessary since TxOut doesn't have \.weight() + let drain_weight = { + tx.output.push(drain_output.clone()); + tx.weight() - base_weight + }; + Self { + target_value: if txouts.is_empty() { + None + } else { + Some(txouts.iter().map(|txout| txout.value).sum()) + }, + ..Self::from_weights( + base_weight as u32, + drain_weight as u32, + TXIN_BASE_WEIGHT + drain_satisfaction_weight, + ) + } + } + + pub fn long_term_feerate(&self) -> f32 { + self.long_term_feerate.unwrap_or(self.target_feerate) + } + + pub fn drain_waste(&self) -> i64 { + (self.drain_weight as f32 * self.target_feerate + + self.spend_drain_weight as f32 * self.long_term_feerate()) as i64 + } +} + +/// [`CoinSelector`] is responsible for selecting and deselecting from a set of canididates. +#[derive(Debug, Clone)] +pub struct CoinSelector<'a> { + pub opts: &'a CoinSelectorOpt, + pub candidates: &'a Vec, + selected: BTreeSet, +} + +impl<'a> CoinSelector<'a> { + pub fn candidate(&self, index: usize) -> &WeightedValue { + &self.candidates[index] + } + + pub fn new(candidates: &'a Vec, opts: &'a CoinSelectorOpt) -> Self { + Self { + candidates, + selected: Default::default(), + opts, + } + } + + pub fn select(&mut self, index: usize) -> bool { + assert!(index < self.candidates.len()); + self.selected.insert(index) + } + + pub fn deselect(&mut self, index: usize) -> bool { + self.selected.remove(&index) + } + + pub fn is_selected(&self, index: usize) -> bool { + self.selected.contains(&index) + } + + pub fn is_empty(&self) -> bool { + self.selected.is_empty() + } + + /// Weight sum of all selected inputs. + pub fn selected_weight(&self) -> u32 { + self.selected + .iter() + .map(|&index| self.candidates[index].weight) + .sum() + } + + /// Effective value sum of all selected inputs. + pub fn selected_effective_value(&self) -> i64 { + self.selected + .iter() + .map(|&index| self.candidates[index].effective_value(self.opts.target_feerate)) + .sum() + } + + /// Absolute value sum of all selected inputs. + pub fn selected_absolute_value(&self) -> u64 { + self.selected + .iter() + .map(|&index| self.candidates[index].value) + .sum() + } + + /// Waste sum of all selected inputs. + pub fn selected_waste(&self) -> i64 { + (self.selected_weight() as f32 * (self.opts.target_feerate - self.opts.long_term_feerate())) + as i64 + } + + /// Current weight of template tx + selected inputs. + pub fn current_weight(&self) -> u32 { + let witness_header_extra_weight = self + .selected() + .find(|(_, wv)| wv.is_segwit) + .map(|_| 2) + .unwrap_or(0); + let vin_count_varint_extra_weight = { + let input_count = self.selected().map(|(_, wv)| wv.input_count).sum::(); + (varint_size(input_count) - 1) * 4 + }; + self.opts.base_weight + + self.selected_weight() + + witness_header_extra_weight + + vin_count_varint_extra_weight + } + + /// Current excess. + pub fn current_excess(&self) -> i64 { + self.selected_effective_value() - self.effective_target() + } + + /// This is the effective target value. + pub fn effective_target(&self) -> i64 { + let (has_segwit, max_input_count) = self + .candidates + .iter() + .fold((false, 0_usize), |(is_segwit, input_count), c| { + (is_segwit || c.is_segwit, input_count + c.input_count) + }); + + let effective_base_weight = self.opts.base_weight + + if has_segwit { 2_u32 } else { 0_u32 } + + (varint_size(max_input_count) - 1) * 4; + + self.opts.target_value.unwrap_or(0) as i64 + + (effective_base_weight as f32 * self.opts.target_feerate).ceil() as i64 + } + + pub fn selected_count(&self) -> usize { + self.selected.len() + } + + pub fn selected(&self) -> impl Iterator + '_ { + self.selected + .iter() + .map(move |&index| (index, &self.candidates[index])) + } + + pub fn unselected(&self) -> impl Iterator + '_ { + self.candidates + .iter() + .enumerate() + .filter(move |(index, _)| !self.selected.contains(index)) + } + + pub fn selected_indexes(&self) -> impl Iterator + '_ { + self.selected.iter().cloned() + } + + pub fn unselected_indexes(&self) -> impl Iterator + '_ { + (0..self.candidates.len()).filter(move |index| !self.selected.contains(index)) + } + + pub fn all_selected(&self) -> bool { + self.selected.len() == self.candidates.len() + } + + pub fn select_all(&mut self) { + self.selected = (0..self.candidates.len()).collect(); + } + + pub fn select_until_finished(&mut self) -> Result { + let mut selection = self.finish(); + + if selection.is_ok() { + return selection; + } + + let unselected = self.unselected_indexes().collect::>(); + + for index in unselected { + self.select(index); + selection = self.finish(); + + if selection.is_ok() { + break; + } + } + + selection + } + + pub fn finish(&self) -> Result { + let weight_without_drain = self.current_weight(); + let weight_with_drain = weight_without_drain + self.opts.drain_weight; + + let fee_without_drain = + (weight_without_drain as f32 * self.opts.target_feerate).ceil() as u64; + let fee_with_drain = (weight_with_drain as f32 * self.opts.target_feerate).ceil() as u64; + + let inputs_minus_outputs = { + let target_value = self.opts.target_value.unwrap_or(0); + let selected = self.selected_absolute_value(); + + // find the largest unsatisfied constraint (if any), and return error of that constraint + // "selected" should always be greater than or equal to these selected values + [ + ( + SelectionConstraint::TargetValue, + target_value.saturating_sub(selected), + ), + ( + SelectionConstraint::TargetFee, + (target_value + fee_without_drain).saturating_sub(selected), + ), + ( + SelectionConstraint::MinAbsoluteFee, + (target_value + self.opts.min_absolute_fee).saturating_sub(selected), + ), + ( + SelectionConstraint::MinDrainValue, + // when we have no target value (hence no recipient txouts), we need to ensure + // the selected amount can satisfy requirements for a drain output (so we at + // least have one txout) + if self.opts.target_value.is_none() { + (fee_with_drain + self.opts.min_drain_value).saturating_sub(selected) + } else { + 0 + }, + ), + ] + .iter() + .filter(|&(_, v)| v > &0) + .max_by_key(|&(_, v)| v) + .map_or(Ok(()), |(constraint, missing)| { + Err(SelectionError { + selected, + missing: *missing, + constraint: *constraint, + }) + })?; + + (selected - target_value) as u64 + }; + + let fee_without_drain = fee_without_drain.max(self.opts.min_absolute_fee); + let fee_with_drain = fee_with_drain.max(self.opts.min_absolute_fee); + + let excess_without_drain = inputs_minus_outputs - fee_without_drain; + let input_waste = self.selected_waste(); + + // begin preparing excess strategies for final selection + let mut excess_strategies = HashMap::new(); + + // only allow `ToFee` and `ToRecipient` excess strategies when we have a `target_value`, + // otherwise we will result in a result with no txouts, or attempt to add value to an output + // that does not exist + if self.opts.target_value.is_some() { + // no drain, excess to fee + excess_strategies.insert( + ExcessStrategyKind::ToFee, + ExcessStrategy { + recipient_value: self.opts.target_value, + drain_value: None, + fee: fee_without_drain + excess_without_drain, + weight: weight_without_drain, + waste: input_waste + excess_without_drain as i64, + }, + ); + + // no drain, excess to recipient + // if `excess == 0`, this result will be the same as the previous, so don't consider it + // if `max_extra_target == 0`, there is no leeway for this strategy + if excess_without_drain > 0 && self.opts.max_extra_target > 0 { + let extra_recipient_value = + core::cmp::min(self.opts.max_extra_target, excess_without_drain); + let extra_fee = excess_without_drain - extra_recipient_value; + excess_strategies.insert( + ExcessStrategyKind::ToRecipient, + ExcessStrategy { + recipient_value: self.opts.target_value.map(|v| v + extra_recipient_value), + drain_value: None, + fee: fee_without_drain + extra_fee, + weight: weight_without_drain, + waste: input_waste + extra_fee as i64, + }, + ); + } + } + + // with drain + if fee_with_drain >= self.opts.min_absolute_fee + && inputs_minus_outputs >= fee_with_drain + self.opts.min_drain_value + { + excess_strategies.insert( + ExcessStrategyKind::ToDrain, + ExcessStrategy { + recipient_value: self.opts.target_value, + drain_value: Some(inputs_minus_outputs.saturating_sub(fee_with_drain)), + fee: fee_with_drain, + weight: weight_with_drain, + waste: input_waste + self.opts.drain_waste(), + }, + ); + } + + debug_assert!( + !excess_strategies.is_empty(), + "should have at least one excess strategy" + ); + + Ok(Selection { + selected: self.selected.clone(), + excess: excess_without_drain, + excess_strategies, + }) + } +} + +#[derive(Clone, Debug)] +pub struct SelectionError { + selected: u64, + missing: u64, + constraint: SelectionConstraint, +} + +impl core::fmt::Display for SelectionError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + SelectionError { + selected, + missing, + constraint, + } => write!( + f, + "insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}", + selected, missing, constraint + ), + } + } +} + +#[cfg(feature = "std")] +impl std::error::Error for SelectionError {} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum SelectionConstraint { + /// The target is not met + TargetValue, + /// The target fee (given the feerate) is not met + TargetFee, + /// Min absolute fee is not met + MinAbsoluteFee, + /// Min drain value is not met + MinDrainValue, +} + +impl core::fmt::Display for SelectionConstraint { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + SelectionConstraint::TargetValue => core::write!(f, "target_value"), + SelectionConstraint::TargetFee => core::write!(f, "target_fee"), + SelectionConstraint::MinAbsoluteFee => core::write!(f, "min_absolute_fee"), + SelectionConstraint::MinDrainValue => core::write!(f, "min_drain_value"), + } + } +} + +#[derive(Clone, Debug)] +pub struct Selection { + pub selected: BTreeSet, + pub excess: u64, + pub excess_strategies: HashMap, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, core::hash::Hash)] +pub enum ExcessStrategyKind { + ToFee, + ToRecipient, + ToDrain, +} + +#[derive(Clone, Copy, Debug)] +pub struct ExcessStrategy { + pub recipient_value: Option, + pub drain_value: Option, + pub fee: u64, + pub weight: u32, + pub waste: i64, +} + +impl Selection { + pub fn apply_selection<'a, T>( + &'a self, + candidates: &'a [T], + ) -> impl Iterator + 'a { + self.selected.iter().map(move |i| &candidates[*i]) + } + + /// Returns the [`ExcessStrategy`] that results in the least waste. + pub fn best_strategy(&self) -> (&ExcessStrategyKind, &ExcessStrategy) { + self.excess_strategies + .iter() + .min_by_key(|&(_, a)| a.waste) + .expect("selection has no excess strategy") + } +} + +impl core::fmt::Display for ExcessStrategyKind { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + ExcessStrategyKind::ToFee => core::write!(f, "to_fee"), + ExcessStrategyKind::ToRecipient => core::write!(f, "to_recipient"), + ExcessStrategyKind::ToDrain => core::write!(f, "to_drain"), + } + } +} + +impl ExcessStrategy { + /// Returns feerate in sats/wu. + pub fn feerate(&self) -> f32 { + self.fee as f32 / self.weight as f32 + } +} + +#[cfg(test)] +mod test { + use crate::{ExcessStrategyKind, SelectionConstraint}; + + use super::{CoinSelector, CoinSelectorOpt, WeightedValue}; + + /// Ensure `target_value` is respected. Can't have no disrespect. + #[test] + fn target_value_respected() { + let target_value = 1000_u64; + + let candidates = (500..1500_u64) + .map(|value| WeightedValue { + value, + weight: 100, + input_count: 1, + is_segwit: false, + }) + .collect::>(); + + let opts = CoinSelectorOpt { + target_value: Some(target_value), + max_extra_target: 0, + target_feerate: 0.00, + long_term_feerate: None, + min_absolute_fee: 0, + base_weight: 10, + drain_weight: 10, + spend_drain_weight: 10, + min_drain_value: 10, + }; + + for (index, v) in candidates.iter().enumerate() { + let mut selector = CoinSelector::new(&candidates, &opts); + assert!(selector.select(index)); + + let res = selector.finish(); + if v.value < opts.target_value.unwrap_or(0) { + let err = res.expect_err("should have failed"); + assert_eq!(err.selected, v.value); + assert_eq!(err.missing, target_value - v.value); + assert_eq!(err.constraint, SelectionConstraint::MinAbsoluteFee); + } else { + let sel = res.expect("should have succeeded"); + assert_eq!(sel.excess, v.value - opts.target_value.unwrap_or(0)); + } + } + } + + #[test] + fn drain_all() { + let candidates = (0..100) + .map(|_| WeightedValue { + value: 666, + weight: 166, + input_count: 1, + is_segwit: false, + }) + .collect::>(); + + let opts = CoinSelectorOpt { + target_value: None, + max_extra_target: 0, + target_feerate: 0.25, + long_term_feerate: None, + min_absolute_fee: 0, + base_weight: 10, + drain_weight: 100, + spend_drain_weight: 66, + min_drain_value: 1000, + }; + + let selection = CoinSelector::new(&candidates, &opts) + .select_until_finished() + .expect("should succeed"); + + assert!(selection.selected.len() > 1); + assert_eq!(selection.excess_strategies.len(), 1); + + let (kind, strategy) = selection.best_strategy(); + assert_eq!(*kind, ExcessStrategyKind::ToDrain); + assert!(strategy.recipient_value.is_none()); + assert!(strategy.drain_value.is_some()); + } + + /// TODO: Tests to add: + /// * `finish` should ensure at least `target_value` is selected. + /// * actual feerate should be equal or higher than `target_feerate`. + /// * actual drain value should be equal or higher than `min_drain_value` (or else no drain). + fn _todo() {} +} diff --git a/nursery/coin_select/src/lib.rs b/nursery/coin_select/src/lib.rs new file mode 100644 index 00000000..ff4d4539 --- /dev/null +++ b/nursery/coin_select/src/lib.rs @@ -0,0 +1,33 @@ +#![no_std] + +#[cfg(feature = "std")] +extern crate std; + +#[macro_use] +extern crate alloc; +extern crate bdk_chain; + +use alloc::vec::Vec; +use bdk_chain::{ + bitcoin, + collections::{BTreeSet, HashMap}, +}; +use bitcoin::{LockTime, Transaction, TxOut}; +use core::fmt::{Debug, Display}; + +mod coin_selector; +pub use coin_selector::*; + +mod bnb; +pub use bnb::*; + +/// Txin "base" fields include `outpoint` (32+4) and `nSequence` (4). This does not include +/// `scriptSigLen` or `scriptSig`. +pub const TXIN_BASE_WEIGHT: u32 = (32 + 4 + 4) * 4; + +/// Helper to calculate varint size. `v` is the value the varint represents. +// Shamelessly copied from +// https://github.com/rust-bitcoin/rust-miniscript/blob/d5615acda1a7fdc4041a11c1736af139b8c7ebe8/src/util.rs#L8 +pub(crate) fn varint_size(v: usize) -> u32 { + bitcoin::VarInt(v as u64).len() as u32 +} diff --git a/nursery/tmp_plan/Cargo.toml b/nursery/tmp_plan/Cargo.toml new file mode 100644 index 00000000..67eb57ef --- /dev/null +++ b/nursery/tmp_plan/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "bdk_tmp_plan" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { version = "0.3", features = ["miniscript"], path = "../../crates/chain" } + +[features] +default = ["std"] +std = [] diff --git a/nursery/tmp_plan/README.md b/nursery/tmp_plan/README.md new file mode 100644 index 00000000..70cc100d --- /dev/null +++ b/nursery/tmp_plan/README.md @@ -0,0 +1,3 @@ +# Temporary planning module + +A temporary place to hold the planning module until https://github.com/rust-bitcoin/rust-miniscript/pull/481 is merged and released diff --git a/nursery/tmp_plan/bdk_tmp_plan/Cargo.toml b/nursery/tmp_plan/bdk_tmp_plan/Cargo.toml new file mode 100644 index 00000000..ecbfaad6 --- /dev/null +++ b/nursery/tmp_plan/bdk_tmp_plan/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "bdk_tmp_plan" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { version = "0.3", features = ["miniscript"] } + +[features] +default = ["std"] +std = [] diff --git a/nursery/tmp_plan/bdk_tmp_plan/README.md b/nursery/tmp_plan/bdk_tmp_plan/README.md new file mode 100644 index 00000000..70cc100d --- /dev/null +++ b/nursery/tmp_plan/bdk_tmp_plan/README.md @@ -0,0 +1,3 @@ +# Temporary planning module + +A temporary place to hold the planning module until https://github.com/rust-bitcoin/rust-miniscript/pull/481 is merged and released diff --git a/nursery/tmp_plan/bdk_tmp_plan/src/lib.rs b/nursery/tmp_plan/bdk_tmp_plan/src/lib.rs new file mode 100644 index 00000000..a64d4492 --- /dev/null +++ b/nursery/tmp_plan/bdk_tmp_plan/src/lib.rs @@ -0,0 +1,436 @@ +#![allow(unused)] +#![allow(missing_docs)] +//! A spending plan or *plan* for short is a representation of a particular spending path on a +//! descriptor. This allows us to analayze a choice of spending path without producing any +//! signatures or other witness data for it. +//! +//! To make a plan you provide the descriptor with "assets" like which keys you are able to use, hash +//! pre-images you have access to, the current block height etc. +//! +//! Once you've got a plan it can tell you its expected satisfaction weight which can be useful for +//! doing coin selection. Furthermore it provides which subset of those keys and hash pre-images you +//! will actually need as well as what locktime or sequence number you need to set. +//! +//! Once you've obstained signatures, hash pre-images etc required by the plan, it can create a +//! witness/script_sig for the input. +use bdk_chain::{bitcoin, collections::*, miniscript}; +use bitcoin::{ + blockdata::{locktime::LockTime, transaction::Sequence}, + hashes::{hash160, ripemd160, sha256}, + secp256k1::Secp256k1, + util::{ + address::WitnessVersion, + bip32::{DerivationPath, Fingerprint, KeySource}, + taproot::{LeafVersion, TapBranchHash, TapLeafHash}, + }, + EcdsaSig, SchnorrSig, Script, TxIn, Witness, +}; +use miniscript::{ + descriptor::{InnerXKey, Tr}, + hash256, DefiniteDescriptorKey, Descriptor, DescriptorPublicKey, ScriptContext, ToPublicKey, +}; + +pub(crate) fn varint_len(v: usize) -> usize { + bitcoin::VarInt(v as u64).len() as usize +} + +mod plan_impls; +mod requirements; +mod template; +pub use requirements::*; +pub use template::PlanKey; +use template::TemplateItem; + +#[derive(Clone, Debug)] +enum TrSpend { + KeySpend, + LeafSpend { + script: Script, + leaf_version: LeafVersion, + }, +} + +#[derive(Clone, Debug)] +enum Target { + Legacy, + Segwitv0 { + script_code: Script, + }, + Segwitv1 { + tr: Tr, + tr_plan: TrSpend, + }, +} + +impl Target {} + +#[derive(Clone, Debug)] +/// A plan represents a particular spending path for a descriptor. +/// +/// See the module level documentation for more info. +pub struct Plan { + template: Vec>, + target: Target, + set_locktime: Option, + set_sequence: Option, +} + +impl Default for Target { + fn default() -> Self { + Target::Legacy + } +} + +#[derive(Clone, Debug, Default)] +/// Signatures and hash pre-images that can be used to complete a plan. +pub struct SatisfactionMaterial { + /// Schnorr signautres under their keys + pub schnorr_sigs: BTreeMap, + /// ECDSA signatures under their keys + pub ecdsa_sigs: BTreeMap, + /// SHA256 pre-images under their images + pub sha256_preimages: BTreeMap>, + /// hash160 pre-images under their images + pub hash160_preimages: BTreeMap>, + /// hash256 pre-images under their images + pub hash256_preimages: BTreeMap>, + /// ripemd160 pre-images under their images + pub ripemd160_preimages: BTreeMap>, +} + +impl Plan +where + Ak: Clone, +{ + /// The expected satisfaction weight for the plan if it is completed. + pub fn expected_weight(&self) -> usize { + let script_sig_size = match self.target { + Target::Legacy => unimplemented!(), // self + // .template + // .iter() + // .map(|step| { + // let size = step.expected_size(); + // size + push_opcode_size(size) + // }) + // .sum() + Target::Segwitv0 { .. } | Target::Segwitv1 { .. } => 1, + }; + let witness_elem_sizes: Option> = match &self.target { + Target::Legacy => None, + Target::Segwitv0 { .. } => Some( + self.template + .iter() + .map(|step| step.expected_size()) + .collect(), + ), + Target::Segwitv1 { tr, tr_plan } => { + let mut witness_elems = self + .template + .iter() + .map(|step| step.expected_size()) + .collect::>(); + + if let TrSpend::LeafSpend { + script, + leaf_version, + } = tr_plan + { + let control_block = tr + .spend_info() + .control_block(&(script.clone(), *leaf_version)) + .expect("must exist"); + witness_elems.push(script.len()); + witness_elems.push(control_block.size()); + } + + Some(witness_elems) + } + }; + + let witness_size: usize = match witness_elem_sizes { + Some(elems) => { + varint_len(elems.len()) + + elems + .into_iter() + .map(|elem| varint_len(elem) + elem) + .sum::() + } + None => 0, + }; + + script_sig_size * 4 + witness_size + } + + pub fn requirements(&self) -> Requirements { + match self.try_complete(&SatisfactionMaterial::default()) { + PlanState::Complete { .. } => Requirements::default(), + PlanState::Incomplete(requirements) => requirements, + } + } + + pub fn try_complete(&self, auth_data: &SatisfactionMaterial) -> PlanState { + let unsatisfied_items = self + .template + .iter() + .filter(|step| match step { + TemplateItem::Sign(key) => { + !auth_data.schnorr_sigs.contains_key(&key.descriptor_key) + } + TemplateItem::Hash160(image) => !auth_data.hash160_preimages.contains_key(image), + TemplateItem::Hash256(image) => !auth_data.hash256_preimages.contains_key(image), + TemplateItem::Sha256(image) => !auth_data.sha256_preimages.contains_key(image), + TemplateItem::Ripemd160(image) => { + !auth_data.ripemd160_preimages.contains_key(image) + } + TemplateItem::Pk { .. } | TemplateItem::One | TemplateItem::Zero => false, + }) + .collect::>(); + + if unsatisfied_items.is_empty() { + let mut witness = self + .template + .iter() + .flat_map(|step| step.to_witness_stack(&auth_data)) + .collect::>(); + match &self.target { + Target::Segwitv0 { .. } => todo!(), + Target::Legacy => todo!(), + Target::Segwitv1 { + tr_plan: TrSpend::KeySpend, + .. + } => PlanState::Complete { + final_script_sig: None, + final_script_witness: Some(Witness::from_vec(witness)), + }, + Target::Segwitv1 { + tr, + tr_plan: + TrSpend::LeafSpend { + script, + leaf_version, + }, + } => { + let spend_info = tr.spend_info(); + let control_block = spend_info + .control_block(&(script.clone(), *leaf_version)) + .expect("must exist"); + witness.push(script.clone().into_bytes()); + witness.push(control_block.serialize()); + + PlanState::Complete { + final_script_sig: None, + final_script_witness: Some(Witness::from_vec(witness)), + } + } + } + } else { + let mut requirements = Requirements::default(); + + match &self.target { + Target::Legacy => { + todo!() + } + Target::Segwitv0 { .. } => { + todo!() + } + Target::Segwitv1 { tr, tr_plan } => { + let spend_info = tr.spend_info(); + match tr_plan { + TrSpend::KeySpend => match &self.template[..] { + [TemplateItem::Sign(ref plan_key)] => { + requirements.signatures = RequiredSignatures::TapKey { + merkle_root: spend_info.merkle_root(), + plan_key: plan_key.clone(), + }; + } + _ => unreachable!("tapkey spend will always have only one sign step"), + }, + TrSpend::LeafSpend { + script, + leaf_version, + } => { + let leaf_hash = TapLeafHash::from_script(&script, *leaf_version); + requirements.signatures = RequiredSignatures::TapScript { + leaf_hash, + plan_keys: vec![], + } + } + } + } + } + + let required_signatures = match requirements.signatures { + RequiredSignatures::Legacy { .. } => todo!(), + RequiredSignatures::Segwitv0 { .. } => todo!(), + RequiredSignatures::TapKey { .. } => return PlanState::Incomplete(requirements), + RequiredSignatures::TapScript { + plan_keys: ref mut keys, + .. + } => keys, + }; + + for step in unsatisfied_items { + match step { + TemplateItem::Sign(plan_key) => { + required_signatures.push(plan_key.clone()); + } + TemplateItem::Hash160(image) => { + requirements.hash160_images.insert(image.clone()); + } + TemplateItem::Hash256(image) => { + requirements.hash256_images.insert(image.clone()); + } + TemplateItem::Sha256(image) => { + requirements.sha256_images.insert(image.clone()); + } + TemplateItem::Ripemd160(image) => { + requirements.ripemd160_images.insert(image.clone()); + } + TemplateItem::Pk { .. } | TemplateItem::One | TemplateItem::Zero => { /* no requirements */ + } + } + } + + PlanState::Incomplete(requirements) + } + } + + /// Witness version for the plan + pub fn witness_version(&self) -> Option { + match self.target { + Target::Legacy => None, + Target::Segwitv0 { .. } => Some(WitnessVersion::V0), + Target::Segwitv1 { .. } => Some(WitnessVersion::V1), + } + } + + /// The minimum required locktime height or time on the transaction using the plan. + pub fn required_locktime(&self) -> Option { + self.set_locktime.clone() + } + + /// The minimum required sequence (height or time) on the input to satisfy the plan + pub fn required_sequence(&self) -> Option { + self.set_sequence.clone() + } + + /// The minmum required transaction version required on the transaction using the plan. + pub fn min_version(&self) -> Option { + if let Some(_) = self.set_sequence { + Some(2) + } else { + Some(1) + } + } +} + +/// The returned value from [`Plan::try_complete`]. +pub enum PlanState { + /// The plan is complete + Complete { + /// The script sig that should be set on the input + final_script_sig: Option