Compare commits
128 Commits
v1.0.0-alp
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
785371e0a1 | ||
|
|
18626c66ac | ||
|
|
8bf8c7d080 | ||
|
|
a8efeaa0fb | ||
|
|
82141a8201 | ||
|
|
28d75304e1 | ||
|
|
17a9850cba | ||
|
|
53bea0d902 | ||
|
|
5478bb1ebb | ||
|
|
79262185d5 | ||
|
|
7c07b9de02 | ||
|
|
0c8ee1dfe2 | ||
|
|
64eb576348 | ||
|
|
8875c92ec1 | ||
|
|
2cf07d686b | ||
|
|
93f9b83e27 | ||
|
|
892b97d441 | ||
|
|
3aed4cf179 | ||
|
|
af4ee0fa4b | ||
|
|
22d02ed3d1 | ||
|
|
eb73f0659e | ||
|
|
6b43001951 | ||
|
|
d99b3ef4b4 | ||
|
|
1a62488abf | ||
|
|
e761adf481 | ||
|
|
d7f4ab71e2 | ||
|
|
1a39821b88 | ||
|
|
3f9ed95e2e | ||
|
|
8714e9d806 | ||
|
|
43f093d918 | ||
|
|
962305f415 | ||
|
|
db8fbd729d | ||
|
|
e7ec5a8733 | ||
|
|
139eec7da0 | ||
|
|
3bee563c81 | ||
|
|
e5cb7b2066 | ||
|
|
c3fc1dd123 | ||
|
|
a112b4d97c | ||
|
|
af75817d4b | ||
|
|
6204d2c766 | ||
|
|
496601b8b1 | ||
|
|
c4057297a9 | ||
|
|
b34790c6b6 | ||
|
|
2ce4bb4dfc | ||
|
|
36f58870cb | ||
|
|
bbc19c3536 | ||
|
|
22368ab7b0 | ||
|
|
d75d9f94ce | ||
|
|
8f5b172e59 | ||
|
|
46c6f18cc3 | ||
|
|
cf7aca84d1 | ||
|
|
5c7cc30978 | ||
|
|
438cd4682d | ||
|
|
275e069cf4 | ||
|
|
55a17293a4 | ||
|
|
f2a2dae84c | ||
|
|
324eeb3eb4 | ||
|
|
6dab68d35b | ||
|
|
e406675f43 | ||
|
|
4bddb0de62 | ||
|
|
996605f2bf | ||
|
|
45c0cae0a4 | ||
|
|
0543801787 | ||
|
|
e21affdbbb | ||
|
|
410ba173e4 | ||
|
|
a0bf45bef1 | ||
|
|
feb27df180 | ||
|
|
1eca568be5 | ||
|
|
bc420923c2 | ||
|
|
782eb56bd4 | ||
|
|
ec36c7ecca | ||
|
|
19328d4999 | ||
|
|
2e40b0118c | ||
|
|
36e82ec686 | ||
|
|
9e97ac0330 | ||
|
|
54b0c11cbe | ||
|
|
aa640ab277 | ||
|
|
1c593a34ee | ||
|
|
8dd174479f | ||
|
|
639d735ca0 | ||
|
|
5a02f40122 | ||
|
|
c77e12bae7 | ||
|
|
4d3846abf4 | ||
|
|
8779afdb0b | ||
|
|
69f2a695f7 | ||
|
|
5a584d0fd8 | ||
|
|
b8ba5a0206 | ||
|
|
101a09a97f | ||
|
|
bce070b1d6 | ||
|
|
4d2442c37f | ||
|
|
bc2a8be979 | ||
|
|
3b2ff0cc95 | ||
|
|
3b040a7ee6 | ||
|
|
11200810d0 | ||
|
|
2a4564097b | ||
|
|
473ef9714f | ||
|
|
25b914ba0a | ||
|
|
b4a847f801 | ||
|
|
c5a3b62d63 | ||
|
|
29c8a00b43 | ||
|
|
8bc3d35f6c | ||
|
|
412dee1f5b | ||
|
|
c2513e1090 | ||
|
|
9d954cf7d2 | ||
|
|
8eef350bd0 | ||
|
|
20341a3ca1 | ||
|
|
363d9f42e5 | ||
|
|
26586fa7fe | ||
|
|
2d2656acfa | ||
|
|
53fa35096f | ||
|
|
a03949adb0 | ||
|
|
50137b0425 | ||
|
|
4a8452f9b8 | ||
|
|
108061dddb | ||
|
|
a2d940132d | ||
|
|
2a055de555 | ||
|
|
096b8ef781 | ||
|
|
2eea0f4e90 | ||
|
|
475c5024ec | ||
|
|
b8aa76cd05 | ||
|
|
0958ff56b2 | ||
|
|
54942a902d | ||
|
|
d975a48e7c | ||
|
|
2f059a1588 | ||
|
|
af15ebba94 | ||
|
|
1b7c6df569 | ||
|
|
7607b49283 | ||
|
|
f6781652b7 |
21
.github/workflows/cont_integration.yml
vendored
21
.github/workflows/cont_integration.yml
vendored
@@ -34,6 +34,9 @@ jobs:
|
|||||||
cargo update -p time --precise "0.3.20"
|
cargo update -p time --precise "0.3.20"
|
||||||
cargo update -p home --precise "0.5.5"
|
cargo update -p home --precise "0.5.5"
|
||||||
cargo update -p proptest --precise "1.2.0"
|
cargo update -p proptest --precise "1.2.0"
|
||||||
|
cargo update -p url --precise "2.5.0"
|
||||||
|
cargo update -p cc --precise "1.0.105"
|
||||||
|
cargo update -p tokio --precise "1.38.1"
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build ${{ matrix.features }}
|
run: cargo build ${{ matrix.features }}
|
||||||
- name: Test
|
- name: Test
|
||||||
@@ -57,15 +60,15 @@ jobs:
|
|||||||
- name: Check bdk_chain
|
- name: Check bdk_chain
|
||||||
working-directory: ./crates/chain
|
working-directory: ./crates/chain
|
||||||
# TODO "--target thumbv6m-none-eabi" should work but currently does not
|
# TODO "--target thumbv6m-none-eabi" should work but currently does not
|
||||||
run: cargo check --no-default-features --features bitcoin/no-std,miniscript/no-std,hashbrown
|
run: cargo check --no-default-features --features miniscript/no-std,hashbrown
|
||||||
- name: Check bdk
|
- name: Check bdk wallet
|
||||||
working-directory: ./crates/bdk
|
working-directory: ./crates/wallet
|
||||||
# TODO "--target thumbv6m-none-eabi" should work but currently does not
|
# TODO "--target thumbv6m-none-eabi" should work but currently does not
|
||||||
run: cargo check --no-default-features --features bitcoin/no-std,miniscript/no-std,bdk_chain/hashbrown
|
run: cargo check --no-default-features --features miniscript/no-std,bdk_chain/hashbrown
|
||||||
- name: Check esplora
|
- name: Check esplora
|
||||||
working-directory: ./crates/esplora
|
working-directory: ./crates/esplora
|
||||||
# TODO "--target thumbv6m-none-eabi" should work but currently does not
|
# TODO "--target thumbv6m-none-eabi" should work but currently does not
|
||||||
run: cargo check --no-default-features --features bitcoin/no-std,miniscript/no-std,bdk_chain/hashbrown
|
run: cargo check --no-default-features --features miniscript/no-std,bdk_chain/hashbrown
|
||||||
|
|
||||||
check-wasm:
|
check-wasm:
|
||||||
name: Check WASM
|
name: Check WASM
|
||||||
@@ -89,12 +92,12 @@ jobs:
|
|||||||
target: "wasm32-unknown-unknown"
|
target: "wasm32-unknown-unknown"
|
||||||
- name: Rust Cache
|
- name: Rust Cache
|
||||||
uses: Swatinem/rust-cache@v2.2.1
|
uses: Swatinem/rust-cache@v2.2.1
|
||||||
- name: Check bdk
|
- name: Check bdk wallet
|
||||||
working-directory: ./crates/bdk
|
working-directory: ./crates/wallet
|
||||||
run: cargo check --target wasm32-unknown-unknown --no-default-features --features bitcoin/no-std,miniscript/no-std,bdk_chain/hashbrown,dev-getrandom-wasm
|
run: cargo check --target wasm32-unknown-unknown --no-default-features --features miniscript/no-std,bdk_chain/hashbrown
|
||||||
- name: Check esplora
|
- name: Check esplora
|
||||||
working-directory: ./crates/esplora
|
working-directory: ./crates/esplora
|
||||||
run: cargo check --target wasm32-unknown-unknown --no-default-features --features bitcoin/no-std,miniscript/no-std,bdk_chain/hashbrown,async
|
run: cargo check --target wasm32-unknown-unknown --no-default-features --features miniscript/no-std,bdk_chain/hashbrown,async
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
name: Rust fmt
|
name: Rust fmt
|
||||||
|
|||||||
4
.github/workflows/nightly_docs.yml
vendored
4
.github/workflows/nightly_docs.yml
vendored
@@ -10,15 +10,13 @@ jobs:
|
|||||||
- name: Checkout sources
|
- name: Checkout sources
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
- name: Set default toolchain
|
- name: Set default toolchain
|
||||||
run: rustup default nightly-2022-12-14
|
run: rustup default nightly-2024-05-12
|
||||||
- name: Set profile
|
- name: Set profile
|
||||||
run: rustup set profile minimal
|
run: rustup set profile minimal
|
||||||
- name: Update toolchain
|
- name: Update toolchain
|
||||||
run: rustup update
|
run: rustup update
|
||||||
- name: Rust Cache
|
- name: Rust Cache
|
||||||
uses: Swatinem/rust-cache@v2.2.1
|
uses: Swatinem/rust-cache@v2.2.1
|
||||||
- name: Pin dependencies for MSRV
|
|
||||||
run: cargo update -p home --precise "0.5.5"
|
|
||||||
- name: Build docs
|
- name: Build docs
|
||||||
run: cargo doc --no-deps
|
run: cargo doc --no-deps
|
||||||
env:
|
env:
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,3 +7,4 @@ Cargo.lock
|
|||||||
|
|
||||||
# Example persisted files.
|
# Example persisted files.
|
||||||
*.db
|
*.db
|
||||||
|
*.sqlite*
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
"crates/bdk",
|
"crates/wallet",
|
||||||
"crates/chain",
|
"crates/chain",
|
||||||
"crates/file_store",
|
"crates/file_store",
|
||||||
"crates/electrum",
|
"crates/electrum",
|
||||||
"crates/esplora",
|
"crates/esplora",
|
||||||
"crates/bitcoind_rpc",
|
"crates/bitcoind_rpc",
|
||||||
"crates/hwi",
|
"crates/hwi",
|
||||||
"crates/persist",
|
|
||||||
"crates/testenv",
|
"crates/testenv",
|
||||||
"example-crates/example_cli",
|
"example-crates/example_cli",
|
||||||
"example-crates/example_electrum",
|
"example-crates/example_electrum",
|
||||||
|
|||||||
27
README.md
27
README.md
@@ -10,11 +10,11 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<a href="https://crates.io/crates/bdk"><img alt="Crate Info" src="https://img.shields.io/crates/v/bdk.svg"/></a>
|
<a href="https://crates.io/crates/bdk_wallet"><img alt="Crate Info" src="https://img.shields.io/crates/v/bdk_wallet.svg"/></a>
|
||||||
<a href="https://github.com/bitcoindevkit/bdk/blob/master/LICENSE"><img alt="MIT or Apache-2.0 Licensed" src="https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg"/></a>
|
<a href="https://github.com/bitcoindevkit/bdk/blob/master/LICENSE"><img alt="MIT or Apache-2.0 Licensed" src="https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg"/></a>
|
||||||
<a href="https://github.com/bitcoindevkit/bdk/actions?query=workflow%3ACI"><img alt="CI Status" src="https://github.com/bitcoindevkit/bdk/workflows/CI/badge.svg"></a>
|
<a href="https://github.com/bitcoindevkit/bdk/actions?query=workflow%3ACI"><img alt="CI Status" src="https://github.com/bitcoindevkit/bdk/workflows/CI/badge.svg"></a>
|
||||||
<a href="https://coveralls.io/github/bitcoindevkit/bdk?branch=master"><img src="https://coveralls.io/repos/github/bitcoindevkit/bdk/badge.svg?branch=master"/></a>
|
<a href="https://coveralls.io/github/bitcoindevkit/bdk?branch=master"><img src="https://coveralls.io/repos/github/bitcoindevkit/bdk/badge.svg?branch=master"/></a>
|
||||||
<a href="https://docs.rs/bdk"><img alt="API Docs" src="https://img.shields.io/badge/docs.rs-bdk-green"/></a>
|
<a href="https://docs.rs/bdk_wallet"><img alt="Wallet API Docs" src="https://img.shields.io/badge/docs.rs-bdk_wallet-green"/></a>
|
||||||
<a href="https://blog.rust-lang.org/2022/08/11/Rust-1.63.0.html"><img alt="Rustc Version 1.63.0+" src="https://img.shields.io/badge/rustc-1.63.0%2B-lightgrey.svg"/></a>
|
<a href="https://blog.rust-lang.org/2022/08/11/Rust-1.63.0.html"><img alt="Rustc Version 1.63.0+" src="https://img.shields.io/badge/rustc-1.63.0%2B-lightgrey.svg"/></a>
|
||||||
<a href="https://discord.gg/d7NkDKm"><img alt="Chat on Discord" src="https://img.shields.io/discord/753336465005608961?logo=discord"></a>
|
<a href="https://discord.gg/d7NkDKm"><img alt="Chat on Discord" src="https://img.shields.io/discord/753336465005608961?logo=discord"></a>
|
||||||
</p>
|
</p>
|
||||||
@@ -22,7 +22,7 @@
|
|||||||
<h4>
|
<h4>
|
||||||
<a href="https://bitcoindevkit.org">Project Homepage</a>
|
<a href="https://bitcoindevkit.org">Project Homepage</a>
|
||||||
<span> | </span>
|
<span> | </span>
|
||||||
<a href="https://docs.rs/bdk">Documentation</a>
|
<a href="https://docs.rs/bdk_wallet">Documentation</a>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ It is built upon the excellent [`rust-bitcoin`] and [`rust-miniscript`] crates.
|
|||||||
|
|
||||||
The project is split up into several crates in the `/crates` directory:
|
The project is split up into several crates in the `/crates` directory:
|
||||||
|
|
||||||
- [`bdk`](./crates/bdk): Contains the central high level `Wallet` type that is built from the low-level mechanisms provided by the other components
|
- [`wallet`](./crates/wallet): Contains the central high level `Wallet` type that is built from the low-level mechanisms provided by the other components
|
||||||
- [`chain`](./crates/chain): Tools for storing and indexing chain data
|
- [`chain`](./crates/chain): Tools for storing and indexing chain data
|
||||||
- [`persist`](./crates/persist): Types that define data persistence of a BDK wallet
|
- [`persist`](./crates/persist): Types that define data persistence of a BDK wallet
|
||||||
- [`file_store`](./crates/file_store): A (experimental) persistence backend for storing chain data in a single file.
|
- [`file_store`](./crates/file_store): A (experimental) persistence backend for storing chain data in a single file.
|
||||||
@@ -47,10 +47,10 @@ The project is split up into several crates in the `/crates` directory:
|
|||||||
- [`electrum`](./crates/electrum): Extends the [`electrum-client`] crate with methods to fetch chain data from an electrum server in the form that [`bdk_chain`] and `Wallet` can consume.
|
- [`electrum`](./crates/electrum): Extends the [`electrum-client`] crate with methods to fetch chain data from an electrum server in the form that [`bdk_chain`] and `Wallet` can consume.
|
||||||
|
|
||||||
Fully working examples of how to use these components are in `/example-crates`:
|
Fully working examples of how to use these components are in `/example-crates`:
|
||||||
- [`example_cli`](./example-crates/example_cli): Library used by the `example_*` crates. Provides utilities for syncing, showing the balance, generating addresses and creating transactions without using the bdk `Wallet`.
|
- [`example_cli`](./example-crates/example_cli): Library used by the `example_*` crates. Provides utilities for syncing, showing the balance, generating addresses and creating transactions without using the bdk_wallet `Wallet`.
|
||||||
- [`example_electrum`](./example-crates/example_electrum): A command line Bitcoin wallet application built on top of `example_cli` and the `electrum` crate. It shows the power of the bdk tools (`chain` + `file_store` + `electrum`), without depending on the main `bdk` library.
|
- [`example_electrum`](./example-crates/example_electrum): A command line Bitcoin wallet application built on top of `example_cli` and the `electrum` crate. It shows the power of the bdk tools (`chain` + `file_store` + `electrum`), without depending on the main `bdk_wallet` library.
|
||||||
- [`example_esplora`](./example-crates/example_esplora): A command line Bitcoin wallet application built on top of `example_cli` and the `esplora` crate. It shows the power of the bdk tools (`chain` + `file_store` + `esplora`), without depending on the main `bdk` library.
|
- [`example_esplora`](./example-crates/example_esplora): A command line Bitcoin wallet application built on top of `example_cli` and the `esplora` crate. It shows the power of the bdk tools (`chain` + `file_store` + `esplora`), without depending on the main `bdk_wallet` library.
|
||||||
- [`example_bitcoind_rpc_polling`](./example-crates/example_bitcoind_rpc_polling): A command line Bitcoin wallet application built on top of `example_cli` and the `bitcoind_rpc` crate. It shows the power of the bdk tools (`chain` + `file_store` + `bitcoind_rpc`), without depending on the main `bdk` library.
|
- [`example_bitcoind_rpc_polling`](./example-crates/example_bitcoind_rpc_polling): A command line Bitcoin wallet application built on top of `example_cli` and the `bitcoind_rpc` crate. It shows the power of the bdk tools (`chain` + `file_store` + `bitcoind_rpc`), without depending on the main `bdk_wallet` library.
|
||||||
- [`wallet_esplora_blocking`](./example-crates/wallet_esplora_blocking): Uses the `Wallet` to sync and spend using the Esplora blocking interface.
|
- [`wallet_esplora_blocking`](./example-crates/wallet_esplora_blocking): Uses the `Wallet` to sync and spend using the Esplora blocking interface.
|
||||||
- [`wallet_esplora_async`](./example-crates/wallet_esplora_async): Uses the `Wallet` to sync and spend using the Esplora asynchronous interface.
|
- [`wallet_esplora_async`](./example-crates/wallet_esplora_async): Uses the `Wallet` to sync and spend using the Esplora asynchronous interface.
|
||||||
- [`wallet_electrum`](./example-crates/wallet_electrum): Uses the `Wallet` to sync and spend using Electrum.
|
- [`wallet_electrum`](./example-crates/wallet_electrum): Uses the `Wallet` to sync and spend using Electrum.
|
||||||
@@ -68,16 +68,13 @@ This library should compile with any combination of features with Rust 1.63.0.
|
|||||||
To build with the MSRV you will need to pin dependencies as follows:
|
To build with the MSRV you will need to pin dependencies as follows:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
# zip 0.6.3 has MSRV 1.64.0
|
cargo update -p zstd-sys --precise "2.0.8+zstd.1.5.5"
|
||||||
cargo update -p zip --precise "0.6.2"
|
|
||||||
# time 0.3.21 has MSRV 1.65.0
|
|
||||||
cargo update -p time --precise "0.3.20"
|
cargo update -p time --precise "0.3.20"
|
||||||
# jobserver 0.1.27 has MSRV 1.66.0
|
|
||||||
cargo update -p jobserver --precise "0.1.26"
|
|
||||||
# home 0.5.9 has MSRV 1.70.0
|
|
||||||
cargo update -p home --precise "0.5.5"
|
cargo update -p home --precise "0.5.5"
|
||||||
# proptest 1.4.0 has MSRV 1.65.0
|
|
||||||
cargo update -p proptest --precise "1.2.0"
|
cargo update -p proptest --precise "1.2.0"
|
||||||
|
cargo update -p url --precise "2.5.0"
|
||||||
|
cargo update -p cc --precise "1.0.105"
|
||||||
|
cargo update -p tokio --precise "1.38.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
msrv="1.63.0"
|
msrv="1.63.0"
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
// Bitcoin Dev Kit
|
|
||||||
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
|
||||||
//
|
|
||||||
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
|
||||||
//
|
|
||||||
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
|
||||||
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
||||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
|
||||||
// You may not use this file except in accordance with one or both of these
|
|
||||||
// licenses.
|
|
||||||
|
|
||||||
extern crate bdk;
|
|
||||||
extern crate bitcoin;
|
|
||||||
extern crate miniscript;
|
|
||||||
extern crate serde_json;
|
|
||||||
|
|
||||||
use std::error::Error;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use bitcoin::Network;
|
|
||||||
use miniscript::policy::Concrete;
|
|
||||||
use miniscript::Descriptor;
|
|
||||||
|
|
||||||
use bdk::{KeychainKind, Wallet};
|
|
||||||
|
|
||||||
/// Miniscript policy is a high level abstraction of spending conditions. Defined in the
|
|
||||||
/// rust-miniscript library here https://docs.rs/miniscript/7.0.0/miniscript/policy/index.html
|
|
||||||
/// rust-miniscript provides a `compile()` function that can be used to compile any miniscript policy
|
|
||||||
/// into a descriptor. This descriptor then in turn can be used in bdk a fully functioning wallet
|
|
||||||
/// can be derived from the policy.
|
|
||||||
///
|
|
||||||
/// This example demonstrates the interaction between a bdk wallet and miniscript policy.
|
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
|
||||||
// We start with a generic miniscript policy string
|
|
||||||
let policy_str = "or(10@thresh(4,pk(029ffbe722b147f3035c87cb1c60b9a5947dd49c774cc31e94773478711a929ac0),pk(025f05815e3a1a8a83bfbb03ce016c9a2ee31066b98f567f6227df1d76ec4bd143),pk(025625f41e4a065efc06d5019cbbd56fe8c07595af1231e7cbc03fafb87ebb71ec),pk(02a27c8b850a00f67da3499b60562673dcf5fdfb82b7e17652a7ac54416812aefd),pk(03e618ec5f384d6e19ca9ebdb8e2119e5bef978285076828ce054e55c4daf473e2)),1@and(older(4209713),thresh(2,pk(03deae92101c790b12653231439f27b8897264125ecb2f46f48278603102573165),pk(033841045a531e1adf9910a6ec279589a90b3b8a904ee64ffd692bd08a8996c1aa),pk(02aebf2d10b040eb936a6f02f44ee82f8b34f5c1ccb20ff3949c2b28206b7c1068))))";
|
|
||||||
println!("Compiling policy: \n{}", policy_str);
|
|
||||||
|
|
||||||
// Parse the string as a [`Concrete`] type miniscript policy.
|
|
||||||
let policy = Concrete::<String>::from_str(policy_str)?;
|
|
||||||
|
|
||||||
// Create a `wsh` type descriptor from the policy.
|
|
||||||
// `policy.compile()` returns the resulting miniscript from the policy.
|
|
||||||
let descriptor = Descriptor::new_wsh(policy.compile()?)?;
|
|
||||||
|
|
||||||
println!("Compiled into following Descriptor: \n{}", descriptor);
|
|
||||||
|
|
||||||
// Create a new wallet from this descriptor
|
|
||||||
let mut wallet = Wallet::new_no_persist(&format!("{}", descriptor), None, Network::Regtest)?;
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"First derived address from the descriptor: \n{}",
|
|
||||||
wallet.next_unused_address(KeychainKind::External)?,
|
|
||||||
);
|
|
||||||
|
|
||||||
// BDK also has it's own `Policy` structure to represent the spending condition in a more
|
|
||||||
// human readable json format.
|
|
||||||
let spending_policy = wallet.policies(KeychainKind::External)?;
|
|
||||||
println!(
|
|
||||||
"The BDK spending policy: \n{}",
|
|
||||||
serde_json::to_string_pretty(&spending_policy)?
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bdk_bitcoind_rpc"
|
name = "bdk_bitcoind_rpc"
|
||||||
version = "0.10.0"
|
version = "0.13.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.63"
|
rust-version = "1.63"
|
||||||
homepage = "https://bitcoindevkit.org"
|
homepage = "https://bitcoindevkit.org"
|
||||||
@@ -13,13 +13,12 @@ readme = "README.md"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# For no-std, remember to enable the bitcoin/no-std feature
|
bitcoin = { version = "0.32.0", default-features = false }
|
||||||
bitcoin = { version = "0.31", default-features = false }
|
bitcoincore-rpc = { version = "0.19.0" }
|
||||||
bitcoincore-rpc = { version = "0.18" }
|
bdk_chain = { path = "../chain", version = "0.17", default-features = false }
|
||||||
bdk_chain = { path = "../chain", version = "0.14", default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
bdk_testenv = { path = "../testenv", default_features = false }
|
bdk_testenv = { path = "../testenv", default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
|
|||||||
@@ -3,9 +3,9 @@ use std::collections::{BTreeMap, BTreeSet};
|
|||||||
use bdk_bitcoind_rpc::Emitter;
|
use bdk_bitcoind_rpc::Emitter;
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
bitcoin::{Address, Amount, Txid},
|
bitcoin::{Address, Amount, Txid},
|
||||||
keychain::Balance,
|
|
||||||
local_chain::{CheckPoint, LocalChain},
|
local_chain::{CheckPoint, LocalChain},
|
||||||
Append, BlockId, IndexedTxGraph, SpkTxOutIndex,
|
spk_txout::SpkTxOutIndex,
|
||||||
|
Balance, BlockId, IndexedTxGraph, Merge,
|
||||||
};
|
};
|
||||||
use bdk_testenv::{anyhow, TestEnv};
|
use bdk_testenv::{anyhow, TestEnv};
|
||||||
use bitcoin::{hashes::Hash, Block, OutPoint, ScriptBuf, WScriptHash};
|
use bitcoin::{hashes::Hash, Block, OutPoint, ScriptBuf, WScriptHash};
|
||||||
@@ -48,7 +48,7 @@ pub fn test_sync_local_chain() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
local_chain.apply_update(emission.checkpoint,)?,
|
local_chain.apply_update(emission.checkpoint,)?,
|
||||||
BTreeMap::from([(height, Some(hash))]),
|
[(height, Some(hash))].into(),
|
||||||
"chain update changeset is unexpected",
|
"chain update changeset is unexpected",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -94,11 +94,13 @@ pub fn test_sync_local_chain() -> anyhow::Result<()> {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
local_chain.apply_update(emission.checkpoint,)?,
|
local_chain.apply_update(emission.checkpoint,)?,
|
||||||
if exp_height == exp_hashes.len() - reorged_blocks.len() {
|
if exp_height == exp_hashes.len() - reorged_blocks.len() {
|
||||||
core::iter::once((height, Some(hash)))
|
bdk_chain::local_chain::ChangeSet {
|
||||||
.chain((height + 1..exp_hashes.len() as u32).map(|h| (h, None)))
|
blocks: core::iter::once((height, Some(hash)))
|
||||||
.collect::<bdk_chain::local_chain::ChangeSet>()
|
.chain((height + 1..exp_hashes.len() as u32).map(|h| (h, None)))
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
BTreeMap::from([(height, Some(hash))])
|
[(height, Some(hash))].into()
|
||||||
},
|
},
|
||||||
"chain update changeset is unexpected",
|
"chain update changeset is unexpected",
|
||||||
);
|
);
|
||||||
@@ -194,15 +196,15 @@ fn test_into_tx_graph() -> anyhow::Result<()> {
|
|||||||
let indexed_additions = indexed_tx_graph.batch_insert_unconfirmed(mempool_txs);
|
let indexed_additions = indexed_tx_graph.batch_insert_unconfirmed(mempool_txs);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
indexed_additions
|
indexed_additions
|
||||||
.graph
|
.tx_graph
|
||||||
.txs
|
.txs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|tx| tx.txid())
|
.map(|tx| tx.compute_txid())
|
||||||
.collect::<BTreeSet<Txid>>(),
|
.collect::<BTreeSet<Txid>>(),
|
||||||
exp_txids,
|
exp_txids,
|
||||||
"changeset should have the 3 mempool transactions",
|
"changeset should have the 3 mempool transactions",
|
||||||
);
|
);
|
||||||
assert!(indexed_additions.graph.anchors.is_empty());
|
assert!(indexed_additions.tx_graph.anchors.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
// mine a block that confirms the 3 txs
|
// mine a block that confirms the 3 txs
|
||||||
@@ -225,9 +227,9 @@ fn test_into_tx_graph() -> anyhow::Result<()> {
|
|||||||
let height = emission.block_height();
|
let height = emission.block_height();
|
||||||
let _ = chain.apply_update(emission.checkpoint)?;
|
let _ = chain.apply_update(emission.checkpoint)?;
|
||||||
let indexed_additions = indexed_tx_graph.apply_block_relevant(&emission.block, height);
|
let indexed_additions = indexed_tx_graph.apply_block_relevant(&emission.block, height);
|
||||||
assert!(indexed_additions.graph.txs.is_empty());
|
assert!(indexed_additions.tx_graph.txs.is_empty());
|
||||||
assert!(indexed_additions.graph.txouts.is_empty());
|
assert!(indexed_additions.tx_graph.txouts.is_empty());
|
||||||
assert_eq!(indexed_additions.graph.anchors, exp_anchors);
|
assert_eq!(indexed_additions.tx_graph.anchors, exp_anchors);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -392,7 +394,6 @@ fn tx_can_become_unconfirmed_after_reorg() -> anyhow::Result<()> {
|
|||||||
get_balance(&recv_chain, &recv_graph)?,
|
get_balance(&recv_chain, &recv_graph)?,
|
||||||
Balance {
|
Balance {
|
||||||
confirmed: SEND_AMOUNT * (ADDITIONAL_COUNT - reorg_count) as u64,
|
confirmed: SEND_AMOUNT * (ADDITIONAL_COUNT - reorg_count) as u64,
|
||||||
trusted_pending: SEND_AMOUNT * reorg_count as u64,
|
|
||||||
..Balance::default()
|
..Balance::default()
|
||||||
},
|
},
|
||||||
"reorg_count: {}",
|
"reorg_count: {}",
|
||||||
@@ -440,7 +441,7 @@ fn mempool_avoids_re_emission() -> anyhow::Result<()> {
|
|||||||
let emitted_txids = emitter
|
let emitted_txids = emitter
|
||||||
.mempool()?
|
.mempool()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(tx, _)| tx.txid())
|
.map(|(tx, _)| tx.compute_txid())
|
||||||
.collect::<BTreeSet<Txid>>();
|
.collect::<BTreeSet<Txid>>();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
emitted_txids, exp_txids,
|
emitted_txids, exp_txids,
|
||||||
@@ -509,7 +510,7 @@ fn mempool_re_emits_if_tx_introduction_height_not_reached() -> anyhow::Result<()
|
|||||||
emitter
|
emitter
|
||||||
.mempool()?
|
.mempool()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(tx, _)| tx.txid())
|
.map(|(tx, _)| tx.compute_txid())
|
||||||
.collect::<BTreeSet<_>>(),
|
.collect::<BTreeSet<_>>(),
|
||||||
tx_introductions.iter().map(|&(_, txid)| txid).collect(),
|
tx_introductions.iter().map(|&(_, txid)| txid).collect(),
|
||||||
"first mempool emission should include all txs",
|
"first mempool emission should include all txs",
|
||||||
@@ -518,7 +519,7 @@ fn mempool_re_emits_if_tx_introduction_height_not_reached() -> anyhow::Result<()
|
|||||||
emitter
|
emitter
|
||||||
.mempool()?
|
.mempool()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(tx, _)| tx.txid())
|
.map(|(tx, _)| tx.compute_txid())
|
||||||
.collect::<BTreeSet<_>>(),
|
.collect::<BTreeSet<_>>(),
|
||||||
tx_introductions.iter().map(|&(_, txid)| txid).collect(),
|
tx_introductions.iter().map(|&(_, txid)| txid).collect(),
|
||||||
"second mempool emission should still include all txs",
|
"second mempool emission should still include all txs",
|
||||||
@@ -538,7 +539,7 @@ fn mempool_re_emits_if_tx_introduction_height_not_reached() -> anyhow::Result<()
|
|||||||
let emitted_txids = emitter
|
let emitted_txids = emitter
|
||||||
.mempool()?
|
.mempool()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(tx, _)| tx.txid())
|
.map(|(tx, _)| tx.compute_txid())
|
||||||
.collect::<BTreeSet<_>>();
|
.collect::<BTreeSet<_>>();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
emitted_txids, exp_txids,
|
emitted_txids, exp_txids,
|
||||||
@@ -596,7 +597,7 @@ fn mempool_during_reorg() -> anyhow::Result<()> {
|
|||||||
emitter
|
emitter
|
||||||
.mempool()?
|
.mempool()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(tx, _)| tx.txid())
|
.map(|(tx, _)| tx.compute_txid())
|
||||||
.collect::<BTreeSet<_>>(),
|
.collect::<BTreeSet<_>>(),
|
||||||
env.rpc_client()
|
env.rpc_client()
|
||||||
.get_raw_mempool()?
|
.get_raw_mempool()?
|
||||||
@@ -633,7 +634,7 @@ fn mempool_during_reorg() -> anyhow::Result<()> {
|
|||||||
let mempool = emitter
|
let mempool = emitter
|
||||||
.mempool()?
|
.mempool()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(tx, _)| tx.txid())
|
.map(|(tx, _)| tx.compute_txid())
|
||||||
.collect::<BTreeSet<_>>();
|
.collect::<BTreeSet<_>>();
|
||||||
let exp_mempool = tx_introductions
|
let exp_mempool = tx_introductions
|
||||||
.iter()
|
.iter()
|
||||||
@@ -648,7 +649,7 @@ fn mempool_during_reorg() -> anyhow::Result<()> {
|
|||||||
let mempool = emitter
|
let mempool = emitter
|
||||||
.mempool()?
|
.mempool()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(tx, _)| tx.txid())
|
.map(|(tx, _)| tx.compute_txid())
|
||||||
.collect::<BTreeSet<_>>();
|
.collect::<BTreeSet<_>>();
|
||||||
let exp_mempool = tx_introductions
|
let exp_mempool = tx_introductions
|
||||||
.iter()
|
.iter()
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bdk_chain"
|
name = "bdk_chain"
|
||||||
version = "0.14.0"
|
version = "0.17.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.63"
|
rust-version = "1.63"
|
||||||
homepage = "https://bitcoindevkit.org"
|
homepage = "https://bitcoindevkit.org"
|
||||||
@@ -13,13 +13,16 @@ readme = "README.md"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# For no-std, remember to enable the bitcoin/no-std feature
|
bitcoin = { version = "0.32.0", default-features = false }
|
||||||
bitcoin = { version = "0.31.0", default-features = false }
|
|
||||||
serde_crate = { package = "serde", version = "1", optional = true, features = ["derive", "rc"] }
|
serde_crate = { package = "serde", version = "1", optional = true, features = ["derive", "rc"] }
|
||||||
|
|
||||||
# Use hashbrown as a feature flag to have HashSet and HashMap from it.
|
# Use hashbrown as a feature flag to have HashSet and HashMap from it.
|
||||||
hashbrown = { version = "0.9.1", optional = true, features = ["serde"] }
|
hashbrown = { version = "0.9.1", optional = true, features = ["serde"] }
|
||||||
miniscript = { version = "11.0.0", optional = true, default-features = false }
|
miniscript = { version = "12.0.0", optional = true, default-features = false }
|
||||||
|
|
||||||
|
# Feature dependencies
|
||||||
|
rusqlite_crate = { package = "rusqlite", version = "0.31.0", features = ["bundled"], optional = true }
|
||||||
|
serde_json = {version = "1", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
@@ -29,3 +32,4 @@ proptest = "1.2.0"
|
|||||||
default = ["std", "miniscript"]
|
default = ["std", "miniscript"]
|
||||||
std = ["bitcoin/std", "miniscript?/std"]
|
std = ["bitcoin/std", "miniscript?/std"]
|
||||||
serde = ["serde_crate", "bitcoin/serde", "miniscript?/serde"]
|
serde = ["serde_crate", "bitcoin/serde", "miniscript?/serde"]
|
||||||
|
rusqlite = ["std", "rusqlite_crate", "serde", "serde_json"]
|
||||||
|
|||||||
@@ -1,20 +1,4 @@
|
|||||||
//! Module for keychain related structures.
|
|
||||||
//!
|
|
||||||
//! A keychain here is a set of application-defined indexes for a miniscript descriptor where we can
|
|
||||||
//! derive script pubkeys at a particular derivation index. The application's index is simply
|
|
||||||
//! anything that implements `Ord`.
|
|
||||||
//!
|
|
||||||
//! [`KeychainTxOutIndex`] indexes script pubkeys of keychains and scans in relevant outpoints (that
|
|
||||||
//! has a `txout` containing an indexed script pubkey). Internally, this uses [`SpkTxOutIndex`], but
|
|
||||||
//! also maintains "revealed" and "lookahead" index counts per keychain.
|
|
||||||
//!
|
|
||||||
//! [`SpkTxOutIndex`]: crate::SpkTxOutIndex
|
|
||||||
|
|
||||||
#[cfg(feature = "miniscript")]
|
|
||||||
mod txout_index;
|
|
||||||
use bitcoin::Amount;
|
use bitcoin::Amount;
|
||||||
#[cfg(feature = "miniscript")]
|
|
||||||
pub use txout_index::*;
|
|
||||||
|
|
||||||
/// Balance, differentiated into various categories.
|
/// Balance, differentiated into various categories.
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
||||||
@@ -74,11 +74,11 @@ impl ConfirmationTime {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ChainPosition<ConfirmationTimeHeightAnchor>> for ConfirmationTime {
|
impl From<ChainPosition<ConfirmationBlockTime>> for ConfirmationTime {
|
||||||
fn from(observed_as: ChainPosition<ConfirmationTimeHeightAnchor>) -> Self {
|
fn from(observed_as: ChainPosition<ConfirmationBlockTime>) -> Self {
|
||||||
match observed_as {
|
match observed_as {
|
||||||
ChainPosition::Confirmed(a) => Self::Confirmed {
|
ChainPosition::Confirmed(a) => Self::Confirmed {
|
||||||
height: a.confirmation_height,
|
height: a.block_id.height,
|
||||||
time: a.confirmation_time,
|
time: a.confirmation_time,
|
||||||
},
|
},
|
||||||
ChainPosition::Unconfirmed(last_seen) => Self::Unconfirmed { last_seen },
|
ChainPosition::Unconfirmed(last_seen) => Self::Unconfirmed { last_seen },
|
||||||
@@ -145,9 +145,7 @@ impl From<(&u32, &BlockHash)> for BlockId {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An [`Anchor`] implementation that also records the exact confirmation height of the transaction.
|
/// An [`Anchor`] implementation that also records the exact confirmation time of the transaction.
|
||||||
///
|
|
||||||
/// Note that the confirmation block and the anchor block can be different here.
|
|
||||||
///
|
///
|
||||||
/// Refer to [`Anchor`] for more details.
|
/// Refer to [`Anchor`] for more details.
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)]
|
#[derive(Debug, Default, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)]
|
||||||
@@ -156,70 +154,27 @@ impl From<(&u32, &BlockHash)> for BlockId {
|
|||||||
derive(serde::Deserialize, serde::Serialize),
|
derive(serde::Deserialize, serde::Serialize),
|
||||||
serde(crate = "serde_crate")
|
serde(crate = "serde_crate")
|
||||||
)]
|
)]
|
||||||
pub struct ConfirmationHeightAnchor {
|
pub struct ConfirmationBlockTime {
|
||||||
/// The exact confirmation height of the transaction.
|
|
||||||
///
|
|
||||||
/// It is assumed that this value is never larger than the height of the anchor block.
|
|
||||||
pub confirmation_height: u32,
|
|
||||||
/// The anchor block.
|
/// The anchor block.
|
||||||
pub anchor_block: BlockId,
|
pub block_id: BlockId,
|
||||||
}
|
|
||||||
|
|
||||||
impl Anchor for ConfirmationHeightAnchor {
|
|
||||||
fn anchor_block(&self) -> BlockId {
|
|
||||||
self.anchor_block
|
|
||||||
}
|
|
||||||
|
|
||||||
fn confirmation_height_upper_bound(&self) -> u32 {
|
|
||||||
self.confirmation_height
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AnchorFromBlockPosition for ConfirmationHeightAnchor {
|
|
||||||
fn from_block_position(_block: &bitcoin::Block, block_id: BlockId, _tx_pos: usize) -> Self {
|
|
||||||
Self {
|
|
||||||
anchor_block: block_id,
|
|
||||||
confirmation_height: block_id.height,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An [`Anchor`] implementation that also records the exact confirmation time and height of the
|
|
||||||
/// transaction.
|
|
||||||
///
|
|
||||||
/// Note that the confirmation block and the anchor block can be different here.
|
|
||||||
///
|
|
||||||
/// Refer to [`Anchor`] for more details.
|
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)]
|
|
||||||
#[cfg_attr(
|
|
||||||
feature = "serde",
|
|
||||||
derive(serde::Deserialize, serde::Serialize),
|
|
||||||
serde(crate = "serde_crate")
|
|
||||||
)]
|
|
||||||
pub struct ConfirmationTimeHeightAnchor {
|
|
||||||
/// The confirmation height of the transaction being anchored.
|
|
||||||
pub confirmation_height: u32,
|
|
||||||
/// The confirmation time of the transaction being anchored.
|
/// The confirmation time of the transaction being anchored.
|
||||||
pub confirmation_time: u64,
|
pub confirmation_time: u64,
|
||||||
/// The anchor block.
|
|
||||||
pub anchor_block: BlockId,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Anchor for ConfirmationTimeHeightAnchor {
|
impl Anchor for ConfirmationBlockTime {
|
||||||
fn anchor_block(&self) -> BlockId {
|
fn anchor_block(&self) -> BlockId {
|
||||||
self.anchor_block
|
self.block_id
|
||||||
}
|
}
|
||||||
|
|
||||||
fn confirmation_height_upper_bound(&self) -> u32 {
|
fn confirmation_height_upper_bound(&self) -> u32 {
|
||||||
self.confirmation_height
|
self.block_id.height
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AnchorFromBlockPosition for ConfirmationTimeHeightAnchor {
|
impl AnchorFromBlockPosition for ConfirmationBlockTime {
|
||||||
fn from_block_position(block: &bitcoin::Block, block_id: BlockId, _tx_pos: usize) -> Self {
|
fn from_block_position(block: &bitcoin::Block, block_id: BlockId, _tx_pos: usize) -> Self {
|
||||||
Self {
|
Self {
|
||||||
anchor_block: block_id,
|
block_id,
|
||||||
confirmation_height: block_id.height,
|
|
||||||
confirmation_time: block.header.time as _,
|
confirmation_time: block.header.time as _,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -305,19 +260,19 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn chain_position_ord() {
|
fn chain_position_ord() {
|
||||||
let unconf1 = ChainPosition::<ConfirmationHeightAnchor>::Unconfirmed(10);
|
let unconf1 = ChainPosition::<ConfirmationBlockTime>::Unconfirmed(10);
|
||||||
let unconf2 = ChainPosition::<ConfirmationHeightAnchor>::Unconfirmed(20);
|
let unconf2 = ChainPosition::<ConfirmationBlockTime>::Unconfirmed(20);
|
||||||
let conf1 = ChainPosition::Confirmed(ConfirmationHeightAnchor {
|
let conf1 = ChainPosition::Confirmed(ConfirmationBlockTime {
|
||||||
confirmation_height: 9,
|
confirmation_time: 20,
|
||||||
anchor_block: BlockId {
|
block_id: BlockId {
|
||||||
height: 20,
|
height: 9,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
let conf2 = ChainPosition::Confirmed(ConfirmationHeightAnchor {
|
let conf2 = ChainPosition::Confirmed(ConfirmationBlockTime {
|
||||||
confirmation_height: 12,
|
confirmation_time: 15,
|
||||||
anchor_block: BlockId {
|
block_id: BlockId {
|
||||||
height: 15,
|
height: 12,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,12 +1,8 @@
|
|||||||
use crate::{
|
use crate::miniscript::{Descriptor, DescriptorPublicKey};
|
||||||
alloc::{string::ToString, vec::Vec},
|
|
||||||
miniscript::{Descriptor, DescriptorPublicKey},
|
|
||||||
};
|
|
||||||
use bitcoin::hashes::{hash_newtype, sha256, Hash};
|
use bitcoin::hashes::{hash_newtype, sha256, Hash};
|
||||||
|
|
||||||
hash_newtype! {
|
hash_newtype! {
|
||||||
/// Represents the ID of a descriptor, defined as the sha256 hash of
|
/// Represents the unique ID of a descriptor.
|
||||||
/// the descriptor string, checksum excluded.
|
|
||||||
///
|
///
|
||||||
/// This is useful for having a fixed-length unique representation of a descriptor,
|
/// This is useful for having a fixed-length unique representation of a descriptor,
|
||||||
/// in particular, we use it to persist application state changes related to the
|
/// in particular, we use it to persist application state changes related to the
|
||||||
@@ -21,8 +17,8 @@ pub trait DescriptorExt {
|
|||||||
/// Panics if the descriptor wildcard is hardened.
|
/// Panics if the descriptor wildcard is hardened.
|
||||||
fn dust_value(&self) -> u64;
|
fn dust_value(&self) -> u64;
|
||||||
|
|
||||||
/// Returns the descriptor id, calculated as the sha256 of the descriptor, checksum not
|
/// Returns the descriptor ID, calculated as the sha256 hash of the spk derived from the
|
||||||
/// included.
|
/// descriptor at index 0.
|
||||||
fn descriptor_id(&self) -> DescriptorId;
|
fn descriptor_id(&self) -> DescriptorId;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,14 +27,12 @@ impl DescriptorExt for Descriptor<DescriptorPublicKey> {
|
|||||||
self.at_derivation_index(0)
|
self.at_derivation_index(0)
|
||||||
.expect("descriptor can't have hardened derivation")
|
.expect("descriptor can't have hardened derivation")
|
||||||
.script_pubkey()
|
.script_pubkey()
|
||||||
.dust_value()
|
.minimal_non_dust()
|
||||||
.to_sat()
|
.to_sat()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn descriptor_id(&self) -> DescriptorId {
|
fn descriptor_id(&self) -> DescriptorId {
|
||||||
let desc = self.to_string();
|
let spk = self.at_derivation_index(0).unwrap().script_pubkey();
|
||||||
let desc_without_checksum = desc.split('#').next().expect("Must be here");
|
DescriptorId(sha256::Hash::hash(spk.as_bytes()))
|
||||||
let descriptor_bytes = <Vec<u8>>::from(desc_without_checksum.as_bytes());
|
|
||||||
DescriptorId(sha256::Hash::hash(&descriptor_bytes))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
//! Contains the [`IndexedTxGraph`] and associated types. Refer to the
|
//! Contains the [`IndexedTxGraph`] and associated types. Refer to the
|
||||||
//! [`IndexedTxGraph`] documentation for more.
|
//! [`IndexedTxGraph`] documentation for more.
|
||||||
|
use core::fmt::Debug;
|
||||||
|
|
||||||
use alloc::vec::Vec;
|
use alloc::vec::Vec;
|
||||||
use bitcoin::{Block, OutPoint, Transaction, TxOut, Txid};
|
use bitcoin::{Block, OutPoint, Transaction, TxOut, Txid};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
tx_graph::{self, TxGraph},
|
tx_graph::{self, TxGraph},
|
||||||
Anchor, AnchorFromBlockPosition, Append, BlockId,
|
Anchor, AnchorFromBlockPosition, BlockId, Indexer, Merge,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// The [`IndexedTxGraph`] combines a [`TxGraph`] and an [`Indexer`] implementation.
|
/// The [`IndexedTxGraph`] combines a [`TxGraph`] and an [`Indexer`] implementation.
|
||||||
@@ -47,27 +49,30 @@ impl<A: Anchor, I: Indexer> IndexedTxGraph<A, I> {
|
|||||||
pub fn apply_changeset(&mut self, changeset: ChangeSet<A, I::ChangeSet>) {
|
pub fn apply_changeset(&mut self, changeset: ChangeSet<A, I::ChangeSet>) {
|
||||||
self.index.apply_changeset(changeset.indexer);
|
self.index.apply_changeset(changeset.indexer);
|
||||||
|
|
||||||
for tx in &changeset.graph.txs {
|
for tx in &changeset.tx_graph.txs {
|
||||||
self.index.index_tx(tx);
|
self.index.index_tx(tx);
|
||||||
}
|
}
|
||||||
for (&outpoint, txout) in &changeset.graph.txouts {
|
for (&outpoint, txout) in &changeset.tx_graph.txouts {
|
||||||
self.index.index_txout(outpoint, txout);
|
self.index.index_txout(outpoint, txout);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.graph.apply_changeset(changeset.graph);
|
self.graph.apply_changeset(changeset.tx_graph);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Determines the [`ChangeSet`] between `self` and an empty [`IndexedTxGraph`].
|
/// Determines the [`ChangeSet`] between `self` and an empty [`IndexedTxGraph`].
|
||||||
pub fn initial_changeset(&self) -> ChangeSet<A, I::ChangeSet> {
|
pub fn initial_changeset(&self) -> ChangeSet<A, I::ChangeSet> {
|
||||||
let graph = self.graph.initial_changeset();
|
let graph = self.graph.initial_changeset();
|
||||||
let indexer = self.index.initial_changeset();
|
let indexer = self.index.initial_changeset();
|
||||||
ChangeSet { graph, indexer }
|
ChangeSet {
|
||||||
|
tx_graph: graph,
|
||||||
|
indexer,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A: Anchor, I: Indexer> IndexedTxGraph<A, I>
|
impl<A: Anchor, I: Indexer> IndexedTxGraph<A, I>
|
||||||
where
|
where
|
||||||
I::ChangeSet: Default + Append,
|
I::ChangeSet: Default + Merge,
|
||||||
{
|
{
|
||||||
fn index_tx_graph_changeset(
|
fn index_tx_graph_changeset(
|
||||||
&mut self,
|
&mut self,
|
||||||
@@ -75,10 +80,10 @@ where
|
|||||||
) -> I::ChangeSet {
|
) -> I::ChangeSet {
|
||||||
let mut changeset = I::ChangeSet::default();
|
let mut changeset = I::ChangeSet::default();
|
||||||
for added_tx in &tx_graph_changeset.txs {
|
for added_tx in &tx_graph_changeset.txs {
|
||||||
changeset.append(self.index.index_tx(added_tx));
|
changeset.merge(self.index.index_tx(added_tx));
|
||||||
}
|
}
|
||||||
for (&added_outpoint, added_txout) in &tx_graph_changeset.txouts {
|
for (&added_outpoint, added_txout) in &tx_graph_changeset.txouts {
|
||||||
changeset.append(self.index.index_txout(added_outpoint, added_txout));
|
changeset.merge(self.index.index_txout(added_outpoint, added_txout));
|
||||||
}
|
}
|
||||||
changeset
|
changeset
|
||||||
}
|
}
|
||||||
@@ -89,21 +94,30 @@ where
|
|||||||
pub fn apply_update(&mut self, update: TxGraph<A>) -> ChangeSet<A, I::ChangeSet> {
|
pub fn apply_update(&mut self, update: TxGraph<A>) -> ChangeSet<A, I::ChangeSet> {
|
||||||
let graph = self.graph.apply_update(update);
|
let graph = self.graph.apply_update(update);
|
||||||
let indexer = self.index_tx_graph_changeset(&graph);
|
let indexer = self.index_tx_graph_changeset(&graph);
|
||||||
ChangeSet { graph, indexer }
|
ChangeSet {
|
||||||
|
tx_graph: graph,
|
||||||
|
indexer,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert a floating `txout` of given `outpoint`.
|
/// Insert a floating `txout` of given `outpoint`.
|
||||||
pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<A, I::ChangeSet> {
|
pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<A, I::ChangeSet> {
|
||||||
let graph = self.graph.insert_txout(outpoint, txout);
|
let graph = self.graph.insert_txout(outpoint, txout);
|
||||||
let indexer = self.index_tx_graph_changeset(&graph);
|
let indexer = self.index_tx_graph_changeset(&graph);
|
||||||
ChangeSet { graph, indexer }
|
ChangeSet {
|
||||||
|
tx_graph: graph,
|
||||||
|
indexer,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert and index a transaction into the graph.
|
/// Insert and index a transaction into the graph.
|
||||||
pub fn insert_tx(&mut self, tx: Transaction) -> ChangeSet<A, I::ChangeSet> {
|
pub fn insert_tx(&mut self, tx: Transaction) -> ChangeSet<A, I::ChangeSet> {
|
||||||
let graph = self.graph.insert_tx(tx);
|
let graph = self.graph.insert_tx(tx);
|
||||||
let indexer = self.index_tx_graph_changeset(&graph);
|
let indexer = self.index_tx_graph_changeset(&graph);
|
||||||
ChangeSet { graph, indexer }
|
ChangeSet {
|
||||||
|
tx_graph: graph,
|
||||||
|
indexer,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert an `anchor` for a given transaction.
|
/// Insert an `anchor` for a given transaction.
|
||||||
@@ -137,21 +151,24 @@ where
|
|||||||
|
|
||||||
let mut indexer = I::ChangeSet::default();
|
let mut indexer = I::ChangeSet::default();
|
||||||
for (tx, _) in &txs {
|
for (tx, _) in &txs {
|
||||||
indexer.append(self.index.index_tx(tx));
|
indexer.merge(self.index.index_tx(tx));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut graph = tx_graph::ChangeSet::default();
|
let mut graph = tx_graph::ChangeSet::default();
|
||||||
for (tx, anchors) in txs {
|
for (tx, anchors) in txs {
|
||||||
if self.index.is_tx_relevant(tx) {
|
if self.index.is_tx_relevant(tx) {
|
||||||
let txid = tx.txid();
|
let txid = tx.compute_txid();
|
||||||
graph.append(self.graph.insert_tx(tx.clone()));
|
graph.merge(self.graph.insert_tx(tx.clone()));
|
||||||
for anchor in anchors {
|
for anchor in anchors {
|
||||||
graph.append(self.graph.insert_anchor(txid, anchor));
|
graph.merge(self.graph.insert_anchor(txid, anchor));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ChangeSet { graph, indexer }
|
ChangeSet {
|
||||||
|
tx_graph: graph,
|
||||||
|
indexer,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Batch insert unconfirmed transactions, filtering out those that are irrelevant.
|
/// Batch insert unconfirmed transactions, filtering out those that are irrelevant.
|
||||||
@@ -176,7 +193,7 @@ where
|
|||||||
|
|
||||||
let mut indexer = I::ChangeSet::default();
|
let mut indexer = I::ChangeSet::default();
|
||||||
for (tx, _) in &txs {
|
for (tx, _) in &txs {
|
||||||
indexer.append(self.index.index_tx(tx));
|
indexer.merge(self.index.index_tx(tx));
|
||||||
}
|
}
|
||||||
|
|
||||||
let graph = self.graph.batch_insert_unconfirmed(
|
let graph = self.graph.batch_insert_unconfirmed(
|
||||||
@@ -185,7 +202,10 @@ where
|
|||||||
.map(|(tx, seen_at)| (tx.clone(), seen_at)),
|
.map(|(tx, seen_at)| (tx.clone(), seen_at)),
|
||||||
);
|
);
|
||||||
|
|
||||||
ChangeSet { graph, indexer }
|
ChangeSet {
|
||||||
|
tx_graph: graph,
|
||||||
|
indexer,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Batch insert unconfirmed transactions.
|
/// Batch insert unconfirmed transactions.
|
||||||
@@ -203,14 +223,17 @@ where
|
|||||||
) -> ChangeSet<A, I::ChangeSet> {
|
) -> ChangeSet<A, I::ChangeSet> {
|
||||||
let graph = self.graph.batch_insert_unconfirmed(txs);
|
let graph = self.graph.batch_insert_unconfirmed(txs);
|
||||||
let indexer = self.index_tx_graph_changeset(&graph);
|
let indexer = self.index_tx_graph_changeset(&graph);
|
||||||
ChangeSet { graph, indexer }
|
ChangeSet {
|
||||||
|
tx_graph: graph,
|
||||||
|
indexer,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Methods are available if the anchor (`A`) implements [`AnchorFromBlockPosition`].
|
/// Methods are available if the anchor (`A`) implements [`AnchorFromBlockPosition`].
|
||||||
impl<A: Anchor, I: Indexer> IndexedTxGraph<A, I>
|
impl<A: Anchor, I: Indexer> IndexedTxGraph<A, I>
|
||||||
where
|
where
|
||||||
I::ChangeSet: Default + Append,
|
I::ChangeSet: Default + Merge,
|
||||||
A: AnchorFromBlockPosition,
|
A: AnchorFromBlockPosition,
|
||||||
{
|
{
|
||||||
/// Batch insert all transactions of the given `block` of `height`, filtering out those that are
|
/// Batch insert all transactions of the given `block` of `height`, filtering out those that are
|
||||||
@@ -232,14 +255,14 @@ where
|
|||||||
};
|
};
|
||||||
let mut changeset = ChangeSet::<A, I::ChangeSet>::default();
|
let mut changeset = ChangeSet::<A, I::ChangeSet>::default();
|
||||||
for (tx_pos, tx) in block.txdata.iter().enumerate() {
|
for (tx_pos, tx) in block.txdata.iter().enumerate() {
|
||||||
changeset.indexer.append(self.index.index_tx(tx));
|
changeset.indexer.merge(self.index.index_tx(tx));
|
||||||
if self.index.is_tx_relevant(tx) {
|
if self.index.is_tx_relevant(tx) {
|
||||||
let txid = tx.txid();
|
let txid = tx.compute_txid();
|
||||||
let anchor = A::from_block_position(block, block_id, tx_pos);
|
let anchor = A::from_block_position(block, block_id, tx_pos);
|
||||||
changeset.graph.append(self.graph.insert_tx(tx.clone()));
|
changeset.tx_graph.merge(self.graph.insert_tx(tx.clone()));
|
||||||
changeset
|
changeset
|
||||||
.graph
|
.tx_graph
|
||||||
.append(self.graph.insert_anchor(txid, anchor));
|
.merge(self.graph.insert_anchor(txid, anchor));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
changeset
|
changeset
|
||||||
@@ -261,11 +284,20 @@ where
|
|||||||
let mut graph = tx_graph::ChangeSet::default();
|
let mut graph = tx_graph::ChangeSet::default();
|
||||||
for (tx_pos, tx) in block.txdata.iter().enumerate() {
|
for (tx_pos, tx) in block.txdata.iter().enumerate() {
|
||||||
let anchor = A::from_block_position(&block, block_id, tx_pos);
|
let anchor = A::from_block_position(&block, block_id, tx_pos);
|
||||||
graph.append(self.graph.insert_anchor(tx.txid(), anchor));
|
graph.merge(self.graph.insert_anchor(tx.compute_txid(), anchor));
|
||||||
graph.append(self.graph.insert_tx(tx.clone()));
|
graph.merge(self.graph.insert_tx(tx.clone()));
|
||||||
}
|
}
|
||||||
let indexer = self.index_tx_graph_changeset(&graph);
|
let indexer = self.index_tx_graph_changeset(&graph);
|
||||||
ChangeSet { graph, indexer }
|
ChangeSet {
|
||||||
|
tx_graph: graph,
|
||||||
|
indexer,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A, I> AsRef<TxGraph<A>> for IndexedTxGraph<A, I> {
|
||||||
|
fn as_ref(&self) -> &TxGraph<A> {
|
||||||
|
&self.graph
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -285,7 +317,7 @@ where
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub struct ChangeSet<A, IA> {
|
pub struct ChangeSet<A, IA> {
|
||||||
/// [`TxGraph`] changeset.
|
/// [`TxGraph`] changeset.
|
||||||
pub graph: tx_graph::ChangeSet<A>,
|
pub tx_graph: tx_graph::ChangeSet<A>,
|
||||||
/// [`Indexer`] changeset.
|
/// [`Indexer`] changeset.
|
||||||
pub indexer: IA,
|
pub indexer: IA,
|
||||||
}
|
}
|
||||||
@@ -293,62 +325,38 @@ pub struct ChangeSet<A, IA> {
|
|||||||
impl<A, IA: Default> Default for ChangeSet<A, IA> {
|
impl<A, IA: Default> Default for ChangeSet<A, IA> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
graph: Default::default(),
|
tx_graph: Default::default(),
|
||||||
indexer: Default::default(),
|
indexer: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A: Anchor, IA: Append> Append for ChangeSet<A, IA> {
|
impl<A: Anchor, IA: Merge> Merge for ChangeSet<A, IA> {
|
||||||
fn append(&mut self, other: Self) {
|
fn merge(&mut self, other: Self) {
|
||||||
self.graph.append(other.graph);
|
self.tx_graph.merge(other.tx_graph);
|
||||||
self.indexer.append(other.indexer);
|
self.indexer.merge(other.indexer);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_empty(&self) -> bool {
|
fn is_empty(&self) -> bool {
|
||||||
self.graph.is_empty() && self.indexer.is_empty()
|
self.tx_graph.is_empty() && self.indexer.is_empty()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A, IA: Default> From<tx_graph::ChangeSet<A>> for ChangeSet<A, IA> {
|
impl<A, IA: Default> From<tx_graph::ChangeSet<A>> for ChangeSet<A, IA> {
|
||||||
fn from(graph: tx_graph::ChangeSet<A>) -> Self {
|
fn from(graph: tx_graph::ChangeSet<A>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
graph,
|
tx_graph: graph,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
impl<A, K> From<crate::keychain::ChangeSet<K>> for ChangeSet<A, crate::keychain::ChangeSet<K>> {
|
impl<A> From<crate::keychain_txout::ChangeSet> for ChangeSet<A, crate::keychain_txout::ChangeSet> {
|
||||||
fn from(indexer: crate::keychain::ChangeSet<K>) -> Self {
|
fn from(indexer: crate::keychain_txout::ChangeSet) -> Self {
|
||||||
Self {
|
Self {
|
||||||
graph: Default::default(),
|
tx_graph: Default::default(),
|
||||||
indexer,
|
indexer,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Utilities for indexing transaction data.
|
|
||||||
///
|
|
||||||
/// Types which implement this trait can be used to construct an [`IndexedTxGraph`].
|
|
||||||
/// This trait's methods should rarely be called directly.
|
|
||||||
pub trait Indexer {
|
|
||||||
/// The resultant "changeset" when new transaction data is indexed.
|
|
||||||
type ChangeSet;
|
|
||||||
|
|
||||||
/// Scan and index the given `outpoint` and `txout`.
|
|
||||||
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet;
|
|
||||||
|
|
||||||
/// Scans a transaction for relevant outpoints, which are stored and indexed internally.
|
|
||||||
fn index_tx(&mut self, tx: &Transaction) -> Self::ChangeSet;
|
|
||||||
|
|
||||||
/// Apply changeset to itself.
|
|
||||||
fn apply_changeset(&mut self, changeset: Self::ChangeSet);
|
|
||||||
|
|
||||||
/// Determines the [`ChangeSet`] between `self` and an empty [`Indexer`].
|
|
||||||
fn initial_changeset(&self) -> Self::ChangeSet;
|
|
||||||
|
|
||||||
/// Determines whether the transaction should be included in the index.
|
|
||||||
fn is_tx_relevant(&self, tx: &Transaction) -> bool;
|
|
||||||
}
|
|
||||||
|
|||||||
33
crates/chain/src/indexer.rs
Normal file
33
crates/chain/src/indexer.rs
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
//! [`Indexer`] provides utilities for indexing transaction data.
|
||||||
|
|
||||||
|
use bitcoin::{OutPoint, Transaction, TxOut};
|
||||||
|
|
||||||
|
#[cfg(feature = "miniscript")]
|
||||||
|
pub mod keychain_txout;
|
||||||
|
pub mod spk_txout;
|
||||||
|
|
||||||
|
/// Utilities for indexing transaction data.
|
||||||
|
///
|
||||||
|
/// Types which implement this trait can be used to construct an [`IndexedTxGraph`].
|
||||||
|
/// This trait's methods should rarely be called directly.
|
||||||
|
///
|
||||||
|
/// [`IndexedTxGraph`]: crate::IndexedTxGraph
|
||||||
|
pub trait Indexer {
|
||||||
|
/// The resultant "changeset" when new transaction data is indexed.
|
||||||
|
type ChangeSet;
|
||||||
|
|
||||||
|
/// Scan and index the given `outpoint` and `txout`.
|
||||||
|
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet;
|
||||||
|
|
||||||
|
/// Scans a transaction for relevant outpoints, which are stored and indexed internally.
|
||||||
|
fn index_tx(&mut self, tx: &Transaction) -> Self::ChangeSet;
|
||||||
|
|
||||||
|
/// Apply changeset to itself.
|
||||||
|
fn apply_changeset(&mut self, changeset: Self::ChangeSet);
|
||||||
|
|
||||||
|
/// Determines the [`ChangeSet`](Indexer::ChangeSet) between `self` and an empty [`Indexer`].
|
||||||
|
fn initial_changeset(&self) -> Self::ChangeSet;
|
||||||
|
|
||||||
|
/// Determines whether the transaction should be included in the index.
|
||||||
|
fn is_tx_relevant(&self, tx: &Transaction) -> bool;
|
||||||
|
}
|
||||||
881
crates/chain/src/indexer/keychain_txout.rs
Normal file
881
crates/chain/src/indexer/keychain_txout.rs
Normal file
@@ -0,0 +1,881 @@
|
|||||||
|
//! [`KeychainTxOutIndex`] controls how script pubkeys are revealed for multiple keychains and
|
||||||
|
//! indexes [`TxOut`]s with them.
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
collections::*,
|
||||||
|
miniscript::{Descriptor, DescriptorPublicKey},
|
||||||
|
spk_iter::BIP32_MAX_INDEX,
|
||||||
|
spk_txout::SpkTxOutIndex,
|
||||||
|
DescriptorExt, DescriptorId, Indexed, Indexer, KeychainIndexed, SpkIterator,
|
||||||
|
};
|
||||||
|
use alloc::{borrow::ToOwned, vec::Vec};
|
||||||
|
use bitcoin::{Amount, OutPoint, ScriptBuf, SignedAmount, Transaction, TxOut, Txid};
|
||||||
|
use core::{
|
||||||
|
fmt::Debug,
|
||||||
|
ops::{Bound, RangeBounds},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::Merge;
|
||||||
|
|
||||||
|
/// The default lookahead for a [`KeychainTxOutIndex`]
|
||||||
|
pub const DEFAULT_LOOKAHEAD: u32 = 25;
|
||||||
|
|
||||||
|
/// [`KeychainTxOutIndex`] controls how script pubkeys are revealed for multiple keychains, and
|
||||||
|
/// indexes [`TxOut`]s with them.
|
||||||
|
///
|
||||||
|
/// A single keychain is a chain of script pubkeys derived from a single [`Descriptor`]. Keychains
|
||||||
|
/// are identified using the `K` generic. Script pubkeys are identified by the keychain that they
|
||||||
|
/// are derived from `K`, as well as the derivation index `u32`.
|
||||||
|
///
|
||||||
|
/// There is a strict 1-to-1 relationship between descriptors and keychains. Each keychain has one
|
||||||
|
/// and only one descriptor and each descriptor has one and only one keychain. The
|
||||||
|
/// [`insert_descriptor`] method will return an error if you try and violate this invariant. This
|
||||||
|
/// rule is a proxy for a stronger rule: no two descriptors should produce the same script pubkey.
|
||||||
|
/// Having two descriptors produce the same script pubkey should cause whichever keychain derives
|
||||||
|
/// the script pubkey first to be the effective owner of it but you should not rely on this
|
||||||
|
/// behaviour. ⚠ It is up you, the developer, not to violate this invariant.
|
||||||
|
///
|
||||||
|
/// # Revealed script pubkeys
|
||||||
|
///
|
||||||
|
/// Tracking how script pubkeys are revealed is useful for collecting chain data. For example, if
|
||||||
|
/// the user has requested 5 script pubkeys (to receive money with), we only need to use those
|
||||||
|
/// script pubkeys to scan for chain data.
|
||||||
|
///
|
||||||
|
/// Call [`reveal_to_target`] or [`reveal_next_spk`] to reveal more script pubkeys.
|
||||||
|
/// Call [`revealed_keychain_spks`] or [`revealed_spks`] to iterate through revealed script pubkeys.
|
||||||
|
///
|
||||||
|
/// # Lookahead script pubkeys
|
||||||
|
///
|
||||||
|
/// When an user first recovers a wallet (i.e. from a recovery phrase and/or descriptor), we will
|
||||||
|
/// NOT have knowledge of which script pubkeys are revealed. So when we index a transaction or
|
||||||
|
/// txout (using [`index_tx`]/[`index_txout`]) we scan the txouts against script pubkeys derived
|
||||||
|
/// above the last revealed index. These additionally-derived script pubkeys are called the
|
||||||
|
/// lookahead.
|
||||||
|
///
|
||||||
|
/// The [`KeychainTxOutIndex`] is constructed with the `lookahead` and cannot be altered. See
|
||||||
|
/// [`DEFAULT_LOOKAHEAD`] for the value used in the `Default` implementation. Use [`new`] to set a
|
||||||
|
/// custom `lookahead`.
|
||||||
|
///
|
||||||
|
/// # Unbounded script pubkey iterator
|
||||||
|
///
|
||||||
|
/// For script-pubkey-based chain sources (such as Electrum/Esplora), an initial scan is best done
|
||||||
|
/// by iterating though derived script pubkeys one by one and requesting transaction histories for
|
||||||
|
/// each script pubkey. We will stop after x-number of script pubkeys have empty histories. An
|
||||||
|
/// unbounded script pubkey iterator is useful to pass to such a chain source because it doesn't
|
||||||
|
/// require holding a reference to the index.
|
||||||
|
///
|
||||||
|
/// Call [`unbounded_spk_iter`] to get an unbounded script pubkey iterator for a given keychain.
|
||||||
|
/// Call [`all_unbounded_spk_iters`] to get unbounded script pubkey iterators for all keychains.
|
||||||
|
///
|
||||||
|
/// # Change sets
|
||||||
|
///
|
||||||
|
/// Methods that can update the last revealed index or add keychains will return [`ChangeSet`] to report
|
||||||
|
/// these changes. This should be persisted for future recovery.
|
||||||
|
///
|
||||||
|
/// ## Synopsis
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use bdk_chain::indexer::keychain_txout::KeychainTxOutIndex;
|
||||||
|
/// # use bdk_chain::{ miniscript::{Descriptor, DescriptorPublicKey} };
|
||||||
|
/// # use core::str::FromStr;
|
||||||
|
///
|
||||||
|
/// // imagine our service has internal and external addresses but also addresses for users
|
||||||
|
/// #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
||||||
|
/// enum MyKeychain {
|
||||||
|
/// External,
|
||||||
|
/// Internal,
|
||||||
|
/// MyAppUser {
|
||||||
|
/// user_id: u32
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let mut txout_index = KeychainTxOutIndex::<MyKeychain>::default();
|
||||||
|
///
|
||||||
|
/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
||||||
|
/// # let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||||
|
/// # let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||||
|
/// # let (descriptor_42, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/2/*)").unwrap();
|
||||||
|
/// let _ = txout_index.insert_descriptor(MyKeychain::External, external_descriptor)?;
|
||||||
|
/// let _ = txout_index.insert_descriptor(MyKeychain::Internal, internal_descriptor)?;
|
||||||
|
/// let _ = txout_index.insert_descriptor(MyKeychain::MyAppUser { user_id: 42 }, descriptor_42)?;
|
||||||
|
///
|
||||||
|
/// let new_spk_for_user = txout_index.reveal_next_spk(MyKeychain::MyAppUser{ user_id: 42 });
|
||||||
|
/// # Ok::<_, bdk_chain::indexer::keychain_txout::InsertDescriptorError<_>>(())
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// [`Ord`]: core::cmp::Ord
|
||||||
|
/// [`SpkTxOutIndex`]: crate::spk_txout_index::SpkTxOutIndex
|
||||||
|
/// [`Descriptor`]: crate::miniscript::Descriptor
|
||||||
|
/// [`reveal_to_target`]: Self::reveal_to_target
|
||||||
|
/// [`reveal_next_spk`]: Self::reveal_next_spk
|
||||||
|
/// [`revealed_keychain_spks`]: Self::revealed_keychain_spks
|
||||||
|
/// [`revealed_spks`]: Self::revealed_spks
|
||||||
|
/// [`index_tx`]: Self::index_tx
|
||||||
|
/// [`index_txout`]: Self::index_txout
|
||||||
|
/// [`new`]: Self::new
|
||||||
|
/// [`unbounded_spk_iter`]: Self::unbounded_spk_iter
|
||||||
|
/// [`all_unbounded_spk_iters`]: Self::all_unbounded_spk_iters
|
||||||
|
/// [`outpoints`]: Self::outpoints
|
||||||
|
/// [`txouts`]: Self::txouts
|
||||||
|
/// [`unused_spks`]: Self::unused_spks
|
||||||
|
/// [`insert_descriptor`]: Self::insert_descriptor
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct KeychainTxOutIndex<K> {
|
||||||
|
inner: SpkTxOutIndex<(K, u32)>,
|
||||||
|
keychain_to_descriptor_id: BTreeMap<K, DescriptorId>,
|
||||||
|
descriptor_id_to_keychain: HashMap<DescriptorId, K>,
|
||||||
|
descriptors: HashMap<DescriptorId, Descriptor<DescriptorPublicKey>>,
|
||||||
|
last_revealed: HashMap<DescriptorId, u32>,
|
||||||
|
lookahead: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K> Default for KeychainTxOutIndex<K> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new(DEFAULT_LOOKAHEAD)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
||||||
|
type ChangeSet = ChangeSet;
|
||||||
|
|
||||||
|
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
||||||
|
let mut changeset = ChangeSet::default();
|
||||||
|
if let Some((keychain, index)) = self.inner.scan_txout(outpoint, txout).cloned() {
|
||||||
|
let did = self
|
||||||
|
.keychain_to_descriptor_id
|
||||||
|
.get(&keychain)
|
||||||
|
.expect("invariant");
|
||||||
|
if self.last_revealed.get(did) < Some(&index) {
|
||||||
|
self.last_revealed.insert(*did, index);
|
||||||
|
changeset.last_revealed.insert(*did, index);
|
||||||
|
self.replenish_inner_index(*did, &keychain, self.lookahead);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
changeset
|
||||||
|
}
|
||||||
|
|
||||||
|
fn index_tx(&mut self, tx: &bitcoin::Transaction) -> Self::ChangeSet {
|
||||||
|
let mut changeset = ChangeSet::default();
|
||||||
|
let txid = tx.compute_txid();
|
||||||
|
for (op, txout) in tx.output.iter().enumerate() {
|
||||||
|
changeset.merge(self.index_txout(OutPoint::new(txid, op as u32), txout));
|
||||||
|
}
|
||||||
|
changeset
|
||||||
|
}
|
||||||
|
|
||||||
|
fn initial_changeset(&self) -> Self::ChangeSet {
|
||||||
|
ChangeSet {
|
||||||
|
last_revealed: self.last_revealed.clone().into_iter().collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apply_changeset(&mut self, changeset: Self::ChangeSet) {
|
||||||
|
self.apply_changeset(changeset)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_tx_relevant(&self, tx: &bitcoin::Transaction) -> bool {
|
||||||
|
self.inner.is_relevant(tx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K> KeychainTxOutIndex<K> {
|
||||||
|
/// Construct a [`KeychainTxOutIndex`] with the given `lookahead`.
|
||||||
|
///
|
||||||
|
/// The `lookahead` is the number of script pubkeys to derive and cache from the internal
|
||||||
|
/// descriptors over and above the last revealed script index. Without a lookahead the index
|
||||||
|
/// will miss outputs you own when processing transactions whose output script pubkeys lie
|
||||||
|
/// beyond the last revealed index. In certain situations, such as when performing an initial
|
||||||
|
/// scan of the blockchain during wallet import, it may be uncertain or unknown what the index
|
||||||
|
/// of the last revealed script pubkey actually is.
|
||||||
|
///
|
||||||
|
/// Refer to [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
||||||
|
pub fn new(lookahead: u32) -> Self {
|
||||||
|
Self {
|
||||||
|
inner: SpkTxOutIndex::default(),
|
||||||
|
keychain_to_descriptor_id: Default::default(),
|
||||||
|
descriptors: Default::default(),
|
||||||
|
descriptor_id_to_keychain: Default::default(),
|
||||||
|
last_revealed: Default::default(),
|
||||||
|
lookahead,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Methods that are *re-exposed* from the internal [`SpkTxOutIndex`].
|
||||||
|
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||||
|
/// Return a reference to the internal [`SpkTxOutIndex`].
|
||||||
|
///
|
||||||
|
/// **WARNING**: The internal index will contain lookahead spks. Refer to
|
||||||
|
/// [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
||||||
|
pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> {
|
||||||
|
&self.inner
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the set of indexed outpoints, corresponding to tracked keychains.
|
||||||
|
pub fn outpoints(&self) -> &BTreeSet<KeychainIndexed<K, OutPoint>> {
|
||||||
|
self.inner.outpoints()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over known txouts that spend to tracked script pubkeys.
|
||||||
|
pub fn txouts(
|
||||||
|
&self,
|
||||||
|
) -> impl DoubleEndedIterator<Item = KeychainIndexed<K, (OutPoint, &TxOut)>> + ExactSizeIterator
|
||||||
|
{
|
||||||
|
self.inner
|
||||||
|
.txouts()
|
||||||
|
.map(|(index, op, txout)| (index.clone(), (op, txout)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
||||||
|
pub fn txouts_in_tx(
|
||||||
|
&self,
|
||||||
|
txid: Txid,
|
||||||
|
) -> impl DoubleEndedIterator<Item = KeychainIndexed<K, (OutPoint, &TxOut)>> {
|
||||||
|
self.inner
|
||||||
|
.txouts_in_tx(txid)
|
||||||
|
.map(|(index, op, txout)| (index.clone(), (op, txout)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the [`TxOut`] of `outpoint` if it has been indexed, and if it corresponds to a
|
||||||
|
/// tracked keychain.
|
||||||
|
///
|
||||||
|
/// The associated keychain and keychain index of the txout's spk is also returned.
|
||||||
|
///
|
||||||
|
/// This calls [`SpkTxOutIndex::txout`] internally.
|
||||||
|
pub fn txout(&self, outpoint: OutPoint) -> Option<KeychainIndexed<K, &TxOut>> {
|
||||||
|
self.inner
|
||||||
|
.txout(outpoint)
|
||||||
|
.map(|(index, txout)| (index.clone(), txout))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the script that exists under the given `keychain`'s `index`.
|
||||||
|
///
|
||||||
|
/// This calls [`SpkTxOutIndex::spk_at_index`] internally.
|
||||||
|
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<ScriptBuf> {
|
||||||
|
self.inner.spk_at_index(&(keychain.clone(), index))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the keychain and keychain index associated with the spk.
|
||||||
|
///
|
||||||
|
/// This calls [`SpkTxOutIndex::index_of_spk`] internally.
|
||||||
|
pub fn index_of_spk(&self, script: ScriptBuf) -> Option<&(K, u32)> {
|
||||||
|
self.inner.index_of_spk(script)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether the spk under the `keychain`'s `index` has been used.
|
||||||
|
///
|
||||||
|
/// Here, "unused" means that after the script pubkey was stored in the index, the index has
|
||||||
|
/// never scanned a transaction output with it.
|
||||||
|
///
|
||||||
|
/// This calls [`SpkTxOutIndex::is_used`] internally.
|
||||||
|
pub fn is_used(&self, keychain: K, index: u32) -> bool {
|
||||||
|
self.inner.is_used(&(keychain, index))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output
|
||||||
|
/// with it.
|
||||||
|
///
|
||||||
|
/// This only has an effect when the `index` had been added to `self` already and was unused.
|
||||||
|
///
|
||||||
|
/// Returns whether the spk under the given `keychain` and `index` is successfully
|
||||||
|
/// marked as used. Returns false either when there is no descriptor under the given
|
||||||
|
/// keychain, or when the spk is already marked as used.
|
||||||
|
///
|
||||||
|
/// This is useful when you want to reserve a script pubkey for something but don't want to add
|
||||||
|
/// the transaction output using it to the index yet. Other callers will consider `index` on
|
||||||
|
/// `keychain` used until you call [`unmark_used`].
|
||||||
|
///
|
||||||
|
/// This calls [`SpkTxOutIndex::mark_used`] internally.
|
||||||
|
///
|
||||||
|
/// [`unmark_used`]: Self::unmark_used
|
||||||
|
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||||
|
self.inner.mark_used(&(keychain, index))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
||||||
|
/// `unused`.
|
||||||
|
///
|
||||||
|
/// Note that if `self` has scanned an output with this script pubkey, then this will have no
|
||||||
|
/// effect.
|
||||||
|
///
|
||||||
|
/// This calls [`SpkTxOutIndex::unmark_used`] internally.
|
||||||
|
///
|
||||||
|
/// [`mark_used`]: Self::mark_used
|
||||||
|
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
|
||||||
|
self.inner.unmark_used(&(keychain, index))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes the total value transfer effect `tx` has on the script pubkeys belonging to the
|
||||||
|
/// keychains in `range`. Value is *sent* when a script pubkey in the `range` is on an input and
|
||||||
|
/// *received* when it is on an output. For `sent` to be computed correctly, the output being
|
||||||
|
/// spent must have already been scanned by the index. Calculating received just uses the
|
||||||
|
/// [`Transaction`] outputs directly, so it will be correct even if it has not been scanned.
|
||||||
|
pub fn sent_and_received(
|
||||||
|
&self,
|
||||||
|
tx: &Transaction,
|
||||||
|
range: impl RangeBounds<K>,
|
||||||
|
) -> (Amount, Amount) {
|
||||||
|
self.inner
|
||||||
|
.sent_and_received(tx, self.map_to_inner_bounds(range))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes the net value that this transaction gives to the script pubkeys in the index and
|
||||||
|
/// *takes* from the transaction outputs in the index. Shorthand for calling
|
||||||
|
/// [`sent_and_received`] and subtracting sent from received.
|
||||||
|
///
|
||||||
|
/// This calls [`SpkTxOutIndex::net_value`] internally.
|
||||||
|
///
|
||||||
|
/// [`sent_and_received`]: Self::sent_and_received
|
||||||
|
pub fn net_value(&self, tx: &Transaction, range: impl RangeBounds<K>) -> SignedAmount {
|
||||||
|
self.inner.net_value(tx, self.map_to_inner_bounds(range))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||||
|
/// Return all keychains and their corresponding descriptors.
|
||||||
|
pub fn keychains(
|
||||||
|
&self,
|
||||||
|
) -> impl DoubleEndedIterator<Item = (K, &Descriptor<DescriptorPublicKey>)> + ExactSizeIterator + '_
|
||||||
|
{
|
||||||
|
self.keychain_to_descriptor_id
|
||||||
|
.iter()
|
||||||
|
.map(|(k, did)| (k.clone(), self.descriptors.get(did).expect("invariant")))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert a descriptor with a keychain associated to it.
|
||||||
|
///
|
||||||
|
/// Adding a descriptor means you will be able to derive new script pubkeys under it and the
|
||||||
|
/// txout index will discover transaction outputs with those script pubkeys (once they've been
|
||||||
|
/// derived and added to the index).
|
||||||
|
///
|
||||||
|
/// keychain <-> descriptor is a one-to-one mapping that cannot be changed. Attempting to do so
|
||||||
|
/// will return a [`InsertDescriptorError<K>`].
|
||||||
|
///
|
||||||
|
/// [`KeychainTxOutIndex`] will prevent you from inserting two descriptors which derive the same
|
||||||
|
/// script pubkey at index 0, but it's up to you to ensure that descriptors don't collide at
|
||||||
|
/// other indices. If they do nothing catastrophic happens at the `KeychainTxOutIndex` level
|
||||||
|
/// (one keychain just becomes the defacto owner of that spk arbitrarily) but this may have
|
||||||
|
/// subtle implications up the application stack like one UTXO being missing from one keychain
|
||||||
|
/// because it has been assigned to another which produces the same script pubkey.
|
||||||
|
pub fn insert_descriptor(
|
||||||
|
&mut self,
|
||||||
|
keychain: K,
|
||||||
|
descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
|
) -> Result<bool, InsertDescriptorError<K>> {
|
||||||
|
let did = descriptor.descriptor_id();
|
||||||
|
if !self.keychain_to_descriptor_id.contains_key(&keychain)
|
||||||
|
&& !self.descriptor_id_to_keychain.contains_key(&did)
|
||||||
|
{
|
||||||
|
self.descriptors.insert(did, descriptor.clone());
|
||||||
|
self.keychain_to_descriptor_id.insert(keychain.clone(), did);
|
||||||
|
self.descriptor_id_to_keychain.insert(did, keychain.clone());
|
||||||
|
self.replenish_inner_index(did, &keychain, self.lookahead);
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(existing_desc_id) = self.keychain_to_descriptor_id.get(&keychain) {
|
||||||
|
let descriptor = self.descriptors.get(existing_desc_id).expect("invariant");
|
||||||
|
if *existing_desc_id != did {
|
||||||
|
return Err(InsertDescriptorError::KeychainAlreadyAssigned {
|
||||||
|
existing_assignment: descriptor.clone(),
|
||||||
|
keychain,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(existing_keychain) = self.descriptor_id_to_keychain.get(&did) {
|
||||||
|
let descriptor = self.descriptors.get(&did).expect("invariant").clone();
|
||||||
|
|
||||||
|
if *existing_keychain != keychain {
|
||||||
|
return Err(InsertDescriptorError::DescriptorAlreadyAssigned {
|
||||||
|
existing_assignment: existing_keychain.clone(),
|
||||||
|
descriptor,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the descriptor associated with the keychain. Returns `None` if the keychain doesn't
|
||||||
|
/// have a descriptor associated with it.
|
||||||
|
pub fn get_descriptor(&self, keychain: K) -> Option<&Descriptor<DescriptorPublicKey>> {
|
||||||
|
let did = self.keychain_to_descriptor_id.get(&keychain)?;
|
||||||
|
self.descriptors.get(did)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the lookahead setting.
|
||||||
|
///
|
||||||
|
/// Refer to [`new`] for more information on the `lookahead`.
|
||||||
|
///
|
||||||
|
/// [`new`]: Self::new
|
||||||
|
pub fn lookahead(&self) -> u32 {
|
||||||
|
self.lookahead
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Store lookahead scripts until `target_index` (inclusive).
|
||||||
|
///
|
||||||
|
/// This does not change the global `lookahead` setting.
|
||||||
|
pub fn lookahead_to_target(&mut self, keychain: K, target_index: u32) {
|
||||||
|
if let Some((next_index, _)) = self.next_index(keychain.clone()) {
|
||||||
|
let temp_lookahead = (target_index + 1)
|
||||||
|
.checked_sub(next_index)
|
||||||
|
.filter(|&index| index > 0);
|
||||||
|
|
||||||
|
if let Some(temp_lookahead) = temp_lookahead {
|
||||||
|
self.replenish_inner_index_keychain(keychain, temp_lookahead);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn replenish_inner_index_did(&mut self, did: DescriptorId, lookahead: u32) {
|
||||||
|
if let Some(keychain) = self.descriptor_id_to_keychain.get(&did).cloned() {
|
||||||
|
self.replenish_inner_index(did, &keychain, lookahead);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn replenish_inner_index_keychain(&mut self, keychain: K, lookahead: u32) {
|
||||||
|
if let Some(did) = self.keychain_to_descriptor_id.get(&keychain) {
|
||||||
|
self.replenish_inner_index(*did, &keychain, lookahead);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Syncs the state of the inner spk index after changes to a keychain
|
||||||
|
fn replenish_inner_index(&mut self, did: DescriptorId, keychain: &K, lookahead: u32) {
|
||||||
|
let descriptor = self.descriptors.get(&did).expect("invariant");
|
||||||
|
let next_store_index = self
|
||||||
|
.inner
|
||||||
|
.all_spks()
|
||||||
|
.range(&(keychain.clone(), u32::MIN)..=&(keychain.clone(), u32::MAX))
|
||||||
|
.last()
|
||||||
|
.map_or(0, |((_, index), _)| *index + 1);
|
||||||
|
let next_reveal_index = self.last_revealed.get(&did).map_or(0, |v| *v + 1);
|
||||||
|
for (new_index, new_spk) in
|
||||||
|
SpkIterator::new_with_range(descriptor, next_store_index..next_reveal_index + lookahead)
|
||||||
|
{
|
||||||
|
let _inserted = self
|
||||||
|
.inner
|
||||||
|
.insert_spk((keychain.clone(), new_index), new_spk);
|
||||||
|
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get an unbounded spk iterator over a given `keychain`. Returns `None` if the provided
|
||||||
|
/// keychain doesn't exist
|
||||||
|
pub fn unbounded_spk_iter(
|
||||||
|
&self,
|
||||||
|
keychain: K,
|
||||||
|
) -> Option<SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||||
|
let descriptor = self.get_descriptor(keychain)?.clone();
|
||||||
|
Some(SpkIterator::new(descriptor))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get unbounded spk iterators for all keychains.
|
||||||
|
pub fn all_unbounded_spk_iters(
|
||||||
|
&self,
|
||||||
|
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
||||||
|
self.keychain_to_descriptor_id
|
||||||
|
.iter()
|
||||||
|
.map(|(k, did)| {
|
||||||
|
(
|
||||||
|
k.clone(),
|
||||||
|
SpkIterator::new(self.descriptors.get(did).expect("invariant").clone()),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over revealed spks of keychains in `range`
|
||||||
|
pub fn revealed_spks(
|
||||||
|
&self,
|
||||||
|
range: impl RangeBounds<K>,
|
||||||
|
) -> impl Iterator<Item = KeychainIndexed<K, ScriptBuf>> + '_ {
|
||||||
|
let start = range.start_bound();
|
||||||
|
let end = range.end_bound();
|
||||||
|
let mut iter_last_revealed = self
|
||||||
|
.keychain_to_descriptor_id
|
||||||
|
.range((start, end))
|
||||||
|
.map(|(k, did)| (k, self.last_revealed.get(did).cloned()));
|
||||||
|
let mut iter_spks = self
|
||||||
|
.inner
|
||||||
|
.all_spks()
|
||||||
|
.range(self.map_to_inner_bounds((start, end)));
|
||||||
|
let mut current_keychain = iter_last_revealed.next();
|
||||||
|
// The reason we need a tricky algorithm is because of the "lookahead" feature which means
|
||||||
|
// that some of the spks in the SpkTxoutIndex will not have been revealed yet. So we need to
|
||||||
|
// filter out those spks that are above the last_revealed for that keychain. To do this we
|
||||||
|
// iterate through the last_revealed for each keychain and the spks for each keychain in
|
||||||
|
// tandem. This minimizes BTreeMap queries.
|
||||||
|
core::iter::from_fn(move || loop {
|
||||||
|
let ((keychain, index), spk) = iter_spks.next()?;
|
||||||
|
// We need to find the last revealed that matches the current spk we are considering so
|
||||||
|
// we skip ahead.
|
||||||
|
while current_keychain?.0 < keychain {
|
||||||
|
current_keychain = iter_last_revealed.next();
|
||||||
|
}
|
||||||
|
let (current_keychain, last_revealed) = current_keychain?;
|
||||||
|
|
||||||
|
if current_keychain == keychain && Some(*index) <= last_revealed {
|
||||||
|
break Some(((keychain.clone(), *index), spk.clone()));
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over revealed spks of the given `keychain` with ascending indices.
|
||||||
|
///
|
||||||
|
/// This is a double ended iterator so you can easily reverse it to get an iterator where
|
||||||
|
/// the script pubkeys that were most recently revealed are first.
|
||||||
|
pub fn revealed_keychain_spks(
|
||||||
|
&self,
|
||||||
|
keychain: K,
|
||||||
|
) -> impl DoubleEndedIterator<Item = Indexed<ScriptBuf>> + '_ {
|
||||||
|
let end = self
|
||||||
|
.last_revealed_index(keychain.clone())
|
||||||
|
.map(|v| v + 1)
|
||||||
|
.unwrap_or(0);
|
||||||
|
self.inner
|
||||||
|
.all_spks()
|
||||||
|
.range((keychain.clone(), 0)..(keychain.clone(), end))
|
||||||
|
.map(|((_, index), spk)| (*index, spk.clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over revealed, but unused, spks of all keychains.
|
||||||
|
pub fn unused_spks(
|
||||||
|
&self,
|
||||||
|
) -> impl DoubleEndedIterator<Item = KeychainIndexed<K, ScriptBuf>> + Clone + '_ {
|
||||||
|
self.keychain_to_descriptor_id.keys().flat_map(|keychain| {
|
||||||
|
self.unused_keychain_spks(keychain.clone())
|
||||||
|
.map(|(i, spk)| ((keychain.clone(), i), spk.clone()))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over revealed, but unused, spks of the given `keychain`.
|
||||||
|
/// Returns an empty iterator if the provided keychain doesn't exist.
|
||||||
|
pub fn unused_keychain_spks(
|
||||||
|
&self,
|
||||||
|
keychain: K,
|
||||||
|
) -> impl DoubleEndedIterator<Item = Indexed<ScriptBuf>> + Clone + '_ {
|
||||||
|
let end = match self.keychain_to_descriptor_id.get(&keychain) {
|
||||||
|
Some(did) => self.last_revealed.get(did).map(|v| *v + 1).unwrap_or(0),
|
||||||
|
None => 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.inner
|
||||||
|
.unused_spks((keychain.clone(), 0)..(keychain.clone(), end))
|
||||||
|
.map(|((_, i), spk)| (*i, spk))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the next derivation index for `keychain`. The next index is the index after the last revealed
|
||||||
|
/// derivation index.
|
||||||
|
///
|
||||||
|
/// The second field in the returned tuple represents whether the next derivation index is new.
|
||||||
|
/// There are two scenarios where the next derivation index is reused (not new):
|
||||||
|
///
|
||||||
|
/// 1. The keychain's descriptor has no wildcard, and a script has already been revealed.
|
||||||
|
/// 2. The number of revealed scripts has already reached 2^31 (refer to BIP-32).
|
||||||
|
///
|
||||||
|
/// Not checking the second field of the tuple may result in address reuse.
|
||||||
|
///
|
||||||
|
/// Returns None if the provided `keychain` doesn't exist.
|
||||||
|
pub fn next_index(&self, keychain: K) -> Option<(u32, bool)> {
|
||||||
|
let did = self.keychain_to_descriptor_id.get(&keychain)?;
|
||||||
|
let last_index = self.last_revealed.get(did).cloned();
|
||||||
|
let descriptor = self.descriptors.get(did).expect("invariant");
|
||||||
|
|
||||||
|
// we can only get the next index if the wildcard exists.
|
||||||
|
let has_wildcard = descriptor.has_wildcard();
|
||||||
|
|
||||||
|
Some(match last_index {
|
||||||
|
// if there is no index, next_index is always 0.
|
||||||
|
None => (0, true),
|
||||||
|
// descriptors without wildcards can only have one index.
|
||||||
|
Some(_) if !has_wildcard => (0, false),
|
||||||
|
// derivation index must be < 2^31 (BIP-32).
|
||||||
|
Some(index) if index > BIP32_MAX_INDEX => {
|
||||||
|
unreachable!("index is out of bounds")
|
||||||
|
}
|
||||||
|
Some(index) if index == BIP32_MAX_INDEX => (index, false),
|
||||||
|
// get the next derivation index.
|
||||||
|
Some(index) => (index + 1, true),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the last derivation index that is revealed for each keychain.
|
||||||
|
///
|
||||||
|
/// Keychains with no revealed indices will not be included in the returned [`BTreeMap`].
|
||||||
|
pub fn last_revealed_indices(&self) -> BTreeMap<K, u32> {
|
||||||
|
self.last_revealed
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(desc_id, index)| {
|
||||||
|
let keychain = self.descriptor_id_to_keychain.get(desc_id)?;
|
||||||
|
Some((keychain.clone(), *index))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the last derivation index revealed for `keychain`. Returns None if the keychain doesn't
|
||||||
|
/// exist, or if the keychain doesn't have any revealed scripts.
|
||||||
|
pub fn last_revealed_index(&self, keychain: K) -> Option<u32> {
|
||||||
|
let descriptor_id = self.keychain_to_descriptor_id.get(&keychain)?;
|
||||||
|
self.last_revealed.get(descriptor_id).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
||||||
|
pub fn reveal_to_target_multi(&mut self, keychains: &BTreeMap<K, u32>) -> ChangeSet {
|
||||||
|
let mut changeset = ChangeSet::default();
|
||||||
|
|
||||||
|
for (keychain, &index) in keychains {
|
||||||
|
if let Some((_, new_changeset)) = self.reveal_to_target(keychain.clone(), index) {
|
||||||
|
changeset.merge(new_changeset);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
changeset
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
||||||
|
/// `target_index`.
|
||||||
|
///
|
||||||
|
/// If the `target_index` cannot be reached (due to the descriptor having no wildcard and/or
|
||||||
|
/// the `target_index` is in the hardened index range), this method will make a best-effort and
|
||||||
|
/// reveal up to the last possible index.
|
||||||
|
///
|
||||||
|
/// This returns list of newly revealed indices (alongside their scripts) and a
|
||||||
|
/// [`ChangeSet`], which reports updates to the latest revealed index. If no new script
|
||||||
|
/// pubkeys are revealed, then both of these will be empty.
|
||||||
|
///
|
||||||
|
/// Returns None if the provided `keychain` doesn't exist.
|
||||||
|
#[must_use]
|
||||||
|
pub fn reveal_to_target(
|
||||||
|
&mut self,
|
||||||
|
keychain: K,
|
||||||
|
target_index: u32,
|
||||||
|
) -> Option<(Vec<Indexed<ScriptBuf>>, ChangeSet)> {
|
||||||
|
let mut changeset = ChangeSet::default();
|
||||||
|
let mut spks: Vec<Indexed<ScriptBuf>> = vec![];
|
||||||
|
while let Some((i, new)) = self.next_index(keychain.clone()) {
|
||||||
|
if !new || i > target_index {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
match self.reveal_next_spk(keychain.clone()) {
|
||||||
|
Some(((i, spk), change)) => {
|
||||||
|
spks.push((i, spk));
|
||||||
|
changeset.merge(change);
|
||||||
|
}
|
||||||
|
None => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some((spks, changeset))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempts to reveal the next script pubkey for `keychain`.
|
||||||
|
///
|
||||||
|
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
||||||
|
/// [`ChangeSet`] which represents changes in the last revealed index (if any).
|
||||||
|
/// Returns None if the provided keychain doesn't exist.
|
||||||
|
///
|
||||||
|
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
||||||
|
/// [`ChangeSet`]. There are two scenarios when a new script pubkey cannot be derived:
|
||||||
|
///
|
||||||
|
/// 1. The descriptor has no wildcard and already has one script revealed.
|
||||||
|
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
||||||
|
/// 3. There is no descriptor associated with the given keychain.
|
||||||
|
pub fn reveal_next_spk(&mut self, keychain: K) -> Option<(Indexed<ScriptBuf>, ChangeSet)> {
|
||||||
|
let (next_index, new) = self.next_index(keychain.clone())?;
|
||||||
|
let mut changeset = ChangeSet::default();
|
||||||
|
|
||||||
|
if new {
|
||||||
|
let did = self.keychain_to_descriptor_id.get(&keychain)?;
|
||||||
|
self.last_revealed.insert(*did, next_index);
|
||||||
|
changeset.last_revealed.insert(*did, next_index);
|
||||||
|
self.replenish_inner_index(*did, &keychain, self.lookahead);
|
||||||
|
}
|
||||||
|
let script = self
|
||||||
|
.inner
|
||||||
|
.spk_at_index(&(keychain.clone(), next_index))
|
||||||
|
.expect("we just inserted it");
|
||||||
|
Some(((next_index, script), changeset))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the next unused script pubkey in the keychain. I.e., the script pubkey with the lowest
|
||||||
|
/// index that has not been used yet.
|
||||||
|
///
|
||||||
|
/// This will derive and reveal a new script pubkey if no more unused script pubkeys exist.
|
||||||
|
///
|
||||||
|
/// If the descriptor has no wildcard and already has a used script pubkey or if a descriptor
|
||||||
|
/// has used all scripts up to the derivation bounds, then the last derived script pubkey will be
|
||||||
|
/// returned.
|
||||||
|
///
|
||||||
|
/// Returns `None` if there are no script pubkeys that have been used and no new script pubkey
|
||||||
|
/// could be revealed (see [`reveal_next_spk`] for when this happens).
|
||||||
|
///
|
||||||
|
/// [`reveal_next_spk`]: Self::reveal_next_spk
|
||||||
|
pub fn next_unused_spk(&mut self, keychain: K) -> Option<(Indexed<ScriptBuf>, ChangeSet)> {
|
||||||
|
let next_unused = self
|
||||||
|
.unused_keychain_spks(keychain.clone())
|
||||||
|
.next()
|
||||||
|
.map(|(i, spk)| ((i, spk.to_owned()), ChangeSet::default()));
|
||||||
|
|
||||||
|
next_unused.or_else(|| self.reveal_next_spk(keychain))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over all [`OutPoint`]s that have `TxOut`s with script pubkeys derived from
|
||||||
|
/// `keychain`.
|
||||||
|
pub fn keychain_outpoints(
|
||||||
|
&self,
|
||||||
|
keychain: K,
|
||||||
|
) -> impl DoubleEndedIterator<Item = Indexed<OutPoint>> + '_ {
|
||||||
|
self.keychain_outpoints_in_range(keychain.clone()..=keychain)
|
||||||
|
.map(|((_, i), op)| (i, op))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over [`OutPoint`]s that have script pubkeys derived from keychains in `range`.
|
||||||
|
pub fn keychain_outpoints_in_range<'a>(
|
||||||
|
&'a self,
|
||||||
|
range: impl RangeBounds<K> + 'a,
|
||||||
|
) -> impl DoubleEndedIterator<Item = KeychainIndexed<K, OutPoint>> + 'a {
|
||||||
|
self.inner
|
||||||
|
.outputs_in_range(self.map_to_inner_bounds(range))
|
||||||
|
.map(|((k, i), op)| ((k.clone(), *i), op))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_to_inner_bounds(&self, bound: impl RangeBounds<K>) -> impl RangeBounds<(K, u32)> {
|
||||||
|
let start = match bound.start_bound() {
|
||||||
|
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MIN)),
|
||||||
|
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MAX)),
|
||||||
|
Bound::Unbounded => Bound::Unbounded,
|
||||||
|
};
|
||||||
|
let end = match bound.end_bound() {
|
||||||
|
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MAX)),
|
||||||
|
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MIN)),
|
||||||
|
Bound::Unbounded => Bound::Unbounded,
|
||||||
|
};
|
||||||
|
|
||||||
|
(start, end)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the highest derivation index of the `keychain` where [`KeychainTxOutIndex`] has
|
||||||
|
/// found a [`TxOut`] with it's script pubkey.
|
||||||
|
pub fn last_used_index(&self, keychain: K) -> Option<u32> {
|
||||||
|
self.keychain_outpoints(keychain).last().map(|(i, _)| i)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
||||||
|
/// a [`TxOut`] with it's script pubkey.
|
||||||
|
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
||||||
|
self.keychain_to_descriptor_id
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(keychain, _)| {
|
||||||
|
self.last_used_index(keychain.clone())
|
||||||
|
.map(|index| (keychain.clone(), index))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Applies the `ChangeSet<K>` to the [`KeychainTxOutIndex<K>`]
|
||||||
|
pub fn apply_changeset(&mut self, changeset: ChangeSet) {
|
||||||
|
for (&desc_id, &index) in &changeset.last_revealed {
|
||||||
|
let v = self.last_revealed.entry(desc_id).or_default();
|
||||||
|
*v = index.max(*v);
|
||||||
|
self.replenish_inner_index_did(desc_id, self.lookahead);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
/// Error returned from [`KeychainTxOutIndex::insert_descriptor`]
|
||||||
|
pub enum InsertDescriptorError<K> {
|
||||||
|
/// The descriptor has already been assigned to a keychain so you can't assign it to another
|
||||||
|
DescriptorAlreadyAssigned {
|
||||||
|
/// The descriptor you have attempted to reassign
|
||||||
|
descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
|
/// The keychain that the descriptor is already assigned to
|
||||||
|
existing_assignment: K,
|
||||||
|
},
|
||||||
|
/// The keychain is already assigned to a descriptor so you can't reassign it
|
||||||
|
KeychainAlreadyAssigned {
|
||||||
|
/// The keychain that you have attempted to reassign
|
||||||
|
keychain: K,
|
||||||
|
/// The descriptor that the keychain is already assigned to
|
||||||
|
existing_assignment: Descriptor<DescriptorPublicKey>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: core::fmt::Debug> core::fmt::Display for InsertDescriptorError<K> {
|
||||||
|
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||||
|
match self {
|
||||||
|
InsertDescriptorError::DescriptorAlreadyAssigned {
|
||||||
|
existing_assignment: existing,
|
||||||
|
descriptor,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"attempt to re-assign descriptor {descriptor:?} already assigned to {existing:?}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
InsertDescriptorError::KeychainAlreadyAssigned {
|
||||||
|
existing_assignment: existing,
|
||||||
|
keychain,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"attempt to re-assign keychain {keychain:?} already assigned to {existing:?}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
impl<K: core::fmt::Debug> std::error::Error for InsertDescriptorError<K> {}
|
||||||
|
|
||||||
|
/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
|
||||||
|
/// It maps each keychain `K` to a descriptor and its last revealed index.
|
||||||
|
///
|
||||||
|
/// It can be applied to [`KeychainTxOutIndex`] with [`apply_changeset`].
|
||||||
|
///
|
||||||
|
/// The `last_revealed` field is monotone in that [`merge`] will never decrease it.
|
||||||
|
/// `keychains_added` is *not* monotone, once it is set any attempt to change it is subject to the
|
||||||
|
/// same *one-to-one* keychain <-> descriptor mapping invariant as [`KeychainTxOutIndex`] itself.
|
||||||
|
///
|
||||||
|
/// [`KeychainTxOutIndex`]: crate::keychain_txout::KeychainTxOutIndex
|
||||||
|
/// [`apply_changeset`]: crate::keychain_txout::KeychainTxOutIndex::apply_changeset
|
||||||
|
/// [`merge`]: Self::merge
|
||||||
|
#[derive(Clone, Debug, Default, PartialEq)]
|
||||||
|
#[cfg_attr(
|
||||||
|
feature = "serde",
|
||||||
|
derive(serde::Deserialize, serde::Serialize),
|
||||||
|
serde(crate = "serde_crate")
|
||||||
|
)]
|
||||||
|
#[must_use]
|
||||||
|
pub struct ChangeSet {
|
||||||
|
/// Contains for each descriptor_id the last revealed index of derivation
|
||||||
|
pub last_revealed: BTreeMap<DescriptorId, u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Merge for ChangeSet {
|
||||||
|
/// Merge another [`ChangeSet`] into self.
|
||||||
|
fn merge(&mut self, other: Self) {
|
||||||
|
// for `last_revealed`, entries of `other` will take precedence ONLY if it is greater than
|
||||||
|
// what was originally in `self`.
|
||||||
|
for (desc_id, index) in other.last_revealed {
|
||||||
|
use crate::collections::btree_map::Entry;
|
||||||
|
match self.last_revealed.entry(desc_id) {
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(index);
|
||||||
|
}
|
||||||
|
Entry::Occupied(mut entry) => {
|
||||||
|
if *entry.get() < index {
|
||||||
|
entry.insert(index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether the changeset are empty.
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.last_revealed.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,12 @@
|
|||||||
|
//! [`SpkTxOutIndex`] is an index storing [`TxOut`]s that have a script pubkey that matches those in a list.
|
||||||
|
|
||||||
use core::ops::RangeBounds;
|
use core::ops::RangeBounds;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap},
|
collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap},
|
||||||
indexed_tx_graph::Indexer,
|
Indexer,
|
||||||
};
|
};
|
||||||
use bitcoin::{Amount, OutPoint, Script, ScriptBuf, SignedAmount, Transaction, TxOut, Txid};
|
use bitcoin::{Amount, OutPoint, ScriptBuf, SignedAmount, Transaction, TxOut, Txid};
|
||||||
|
|
||||||
/// An index storing [`TxOut`]s that have a script pubkey that matches those in a list.
|
/// An index storing [`TxOut`]s that have a script pubkey that matches those in a list.
|
||||||
///
|
///
|
||||||
@@ -52,7 +54,7 @@ impl<I> Default for SpkTxOutIndex<I> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Clone + Ord> Indexer for SpkTxOutIndex<I> {
|
impl<I: Clone + Ord + core::fmt::Debug> Indexer for SpkTxOutIndex<I> {
|
||||||
type ChangeSet = ();
|
type ChangeSet = ();
|
||||||
|
|
||||||
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
||||||
@@ -76,17 +78,17 @@ impl<I: Clone + Ord> Indexer for SpkTxOutIndex<I> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
impl<I: Clone + Ord + core::fmt::Debug> SpkTxOutIndex<I> {
|
||||||
/// Scans a transaction's outputs for matching script pubkeys.
|
/// Scans a transaction's outputs for matching script pubkeys.
|
||||||
///
|
///
|
||||||
/// Typically, this is used in two situations:
|
/// Typically, this is used in two situations:
|
||||||
///
|
///
|
||||||
/// 1. After loading transaction data from the disk, you may scan over all the txouts to restore all
|
/// 1. After loading transaction data from the disk, you may scan over all the txouts to restore all
|
||||||
/// your txouts.
|
/// your txouts.
|
||||||
/// 2. When getting new data from the chain, you usually scan it before incorporating it into your chain state.
|
/// 2. When getting new data from the chain, you usually scan it before incorporating it into your chain state.
|
||||||
pub fn scan(&mut self, tx: &Transaction) -> BTreeSet<I> {
|
pub fn scan(&mut self, tx: &Transaction) -> BTreeSet<I> {
|
||||||
let mut scanned_indices = BTreeSet::new();
|
let mut scanned_indices = BTreeSet::new();
|
||||||
let txid = tx.txid();
|
let txid = tx.compute_txid();
|
||||||
for (i, txout) in tx.output.iter().enumerate() {
|
for (i, txout) in tx.output.iter().enumerate() {
|
||||||
let op = OutPoint::new(txid, i as u32);
|
let op = OutPoint::new(txid, i as u32);
|
||||||
if let Some(spk_i) = self.scan_txout(op, txout) {
|
if let Some(spk_i) = self.scan_txout(op, txout) {
|
||||||
@@ -174,8 +176,8 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
|||||||
/// Returns the script that has been inserted at the `index`.
|
/// Returns the script that has been inserted at the `index`.
|
||||||
///
|
///
|
||||||
/// If that index hasn't been inserted yet, it will return `None`.
|
/// If that index hasn't been inserted yet, it will return `None`.
|
||||||
pub fn spk_at_index(&self, index: &I) -> Option<&Script> {
|
pub fn spk_at_index(&self, index: &I) -> Option<ScriptBuf> {
|
||||||
self.spks.get(index).map(|s| s.as_script())
|
self.spks.get(index).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The script pubkeys that are being tracked by the index.
|
/// The script pubkeys that are being tracked by the index.
|
||||||
@@ -206,7 +208,7 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
|||||||
/// # Example
|
/// # Example
|
||||||
///
|
///
|
||||||
/// ```rust
|
/// ```rust
|
||||||
/// # use bdk_chain::SpkTxOutIndex;
|
/// # use bdk_chain::spk_txout::SpkTxOutIndex;
|
||||||
///
|
///
|
||||||
/// // imagine our spks are indexed like (keychain, derivation_index).
|
/// // imagine our spks are indexed like (keychain, derivation_index).
|
||||||
/// let txout_index = SpkTxOutIndex::<(u32, u32)>::default();
|
/// let txout_index = SpkTxOutIndex::<(u32, u32)>::default();
|
||||||
@@ -215,7 +217,10 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
|||||||
/// let unused_change_spks =
|
/// let unused_change_spks =
|
||||||
/// txout_index.unused_spks((change_index, u32::MIN)..(change_index, u32::MAX));
|
/// txout_index.unused_spks((change_index, u32::MIN)..(change_index, u32::MAX));
|
||||||
/// ```
|
/// ```
|
||||||
pub fn unused_spks<R>(&self, range: R) -> impl DoubleEndedIterator<Item = (&I, &Script)> + Clone
|
pub fn unused_spks<R>(
|
||||||
|
&self,
|
||||||
|
range: R,
|
||||||
|
) -> impl DoubleEndedIterator<Item = (&I, ScriptBuf)> + Clone + '_
|
||||||
where
|
where
|
||||||
R: RangeBounds<I>,
|
R: RangeBounds<I>,
|
||||||
{
|
{
|
||||||
@@ -266,8 +271,8 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the index associated with the script pubkey.
|
/// Returns the index associated with the script pubkey.
|
||||||
pub fn index_of_spk(&self, script: &Script) -> Option<&I> {
|
pub fn index_of_spk(&self, script: ScriptBuf) -> Option<&I> {
|
||||||
self.spk_indices.get(script)
|
self.spk_indices.get(script.as_script())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes the total value transfer effect `tx` has on the script pubkeys in `range`. Value is
|
/// Computes the total value transfer effect `tx` has on the script pubkeys in `range`. Value is
|
||||||
@@ -291,7 +296,7 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
for txout in &tx.output {
|
for txout in &tx.output {
|
||||||
if let Some(index) = self.index_of_spk(&txout.script_pubkey) {
|
if let Some(index) = self.index_of_spk(txout.script_pubkey.clone()) {
|
||||||
if range.contains(index) {
|
if range.contains(index) {
|
||||||
received += txout.value;
|
received += txout.value;
|
||||||
}
|
}
|
||||||
@@ -1,971 +0,0 @@
|
|||||||
use crate::{
|
|
||||||
collections::*,
|
|
||||||
indexed_tx_graph::Indexer,
|
|
||||||
miniscript::{Descriptor, DescriptorPublicKey},
|
|
||||||
spk_iter::BIP32_MAX_INDEX,
|
|
||||||
DescriptorExt, DescriptorId, SpkIterator, SpkTxOutIndex,
|
|
||||||
};
|
|
||||||
use bitcoin::{hashes::Hash, Amount, OutPoint, Script, SignedAmount, Transaction, TxOut, Txid};
|
|
||||||
use core::{
|
|
||||||
fmt::Debug,
|
|
||||||
ops::{Bound, RangeBounds},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::Append;
|
|
||||||
|
|
||||||
/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
|
|
||||||
/// It maps each keychain `K` to a descriptor and its last revealed index.
|
|
||||||
///
|
|
||||||
/// It can be applied to [`KeychainTxOutIndex`] with [`apply_changeset`]. [`ChangeSet] are
|
|
||||||
/// monotone in that they will never decrease the revealed derivation index.
|
|
||||||
///
|
|
||||||
/// [`KeychainTxOutIndex`]: crate::keychain::KeychainTxOutIndex
|
|
||||||
/// [`apply_changeset`]: crate::keychain::KeychainTxOutIndex::apply_changeset
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
#[cfg_attr(
|
|
||||||
feature = "serde",
|
|
||||||
derive(serde::Deserialize, serde::Serialize),
|
|
||||||
serde(
|
|
||||||
crate = "serde_crate",
|
|
||||||
bound(
|
|
||||||
deserialize = "K: Ord + serde::Deserialize<'de>",
|
|
||||||
serialize = "K: Ord + serde::Serialize"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)]
|
|
||||||
#[must_use]
|
|
||||||
pub struct ChangeSet<K> {
|
|
||||||
/// Contains the keychains that have been added and their respective descriptor
|
|
||||||
pub keychains_added: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
|
|
||||||
/// Contains for each descriptor_id the last revealed index of derivation
|
|
||||||
pub last_revealed: BTreeMap<DescriptorId, u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Ord> Append for ChangeSet<K> {
|
|
||||||
/// Append another [`ChangeSet`] into self.
|
|
||||||
///
|
|
||||||
/// For each keychain in `keychains_added` in the given [`ChangeSet`]:
|
|
||||||
/// If the keychain already exist with a different descriptor, we overwrite the old descriptor.
|
|
||||||
///
|
|
||||||
/// For each `last_revealed` in the given [`ChangeSet`]:
|
|
||||||
/// If the keychain already exists, increase the index when the other's index > self's index.
|
|
||||||
fn append(&mut self, other: Self) {
|
|
||||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
|
||||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
|
||||||
self.keychains_added.extend(other.keychains_added);
|
|
||||||
|
|
||||||
// for `last_revealed`, entries of `other` will take precedence ONLY if it is greater than
|
|
||||||
// what was originally in `self`.
|
|
||||||
for (desc_id, index) in other.last_revealed {
|
|
||||||
use crate::collections::btree_map::Entry;
|
|
||||||
match self.last_revealed.entry(desc_id) {
|
|
||||||
Entry::Vacant(entry) => {
|
|
||||||
entry.insert(index);
|
|
||||||
}
|
|
||||||
Entry::Occupied(mut entry) => {
|
|
||||||
if *entry.get() < index {
|
|
||||||
entry.insert(index);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns whether the changeset are empty.
|
|
||||||
fn is_empty(&self) -> bool {
|
|
||||||
self.last_revealed.is_empty() && self.keychains_added.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> Default for ChangeSet<K> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
last_revealed: BTreeMap::default(),
|
|
||||||
keychains_added: BTreeMap::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const DEFAULT_LOOKAHEAD: u32 = 25;
|
|
||||||
|
|
||||||
/// [`KeychainTxOutIndex`] controls how script pubkeys are revealed for multiple keychains, and
|
|
||||||
/// indexes [`TxOut`]s with them.
|
|
||||||
///
|
|
||||||
/// A single keychain is a chain of script pubkeys derived from a single [`Descriptor`]. Keychains
|
|
||||||
/// are identified using the `K` generic. Script pubkeys are identified by the keychain that they
|
|
||||||
/// are derived from `K`, as well as the derivation index `u32`.
|
|
||||||
///
|
|
||||||
/// # Revealed script pubkeys
|
|
||||||
///
|
|
||||||
/// Tracking how script pubkeys are revealed is useful for collecting chain data. For example, if
|
|
||||||
/// the user has requested 5 script pubkeys (to receive money with), we only need to use those
|
|
||||||
/// script pubkeys to scan for chain data.
|
|
||||||
///
|
|
||||||
/// Call [`reveal_to_target`] or [`reveal_next_spk`] to reveal more script pubkeys.
|
|
||||||
/// Call [`revealed_keychain_spks`] or [`revealed_spks`] to iterate through revealed script pubkeys.
|
|
||||||
///
|
|
||||||
/// # Lookahead script pubkeys
|
|
||||||
///
|
|
||||||
/// When an user first recovers a wallet (i.e. from a recovery phrase and/or descriptor), we will
|
|
||||||
/// NOT have knowledge of which script pubkeys are revealed. So when we index a transaction or
|
|
||||||
/// txout (using [`index_tx`]/[`index_txout`]) we scan the txouts against script pubkeys derived
|
|
||||||
/// above the last revealed index. These additionally-derived script pubkeys are called the
|
|
||||||
/// lookahead.
|
|
||||||
///
|
|
||||||
/// The [`KeychainTxOutIndex`] is constructed with the `lookahead` and cannot be altered. The
|
|
||||||
/// default `lookahead` count is 1000. Use [`new`] to set a custom `lookahead`.
|
|
||||||
///
|
|
||||||
/// # Unbounded script pubkey iterator
|
|
||||||
///
|
|
||||||
/// For script-pubkey-based chain sources (such as Electrum/Esplora), an initial scan is best done
|
|
||||||
/// by iterating though derived script pubkeys one by one and requesting transaction histories for
|
|
||||||
/// each script pubkey. We will stop after x-number of script pubkeys have empty histories. An
|
|
||||||
/// unbounded script pubkey iterator is useful to pass to such a chain source.
|
|
||||||
///
|
|
||||||
/// Call [`unbounded_spk_iter`] to get an unbounded script pubkey iterator for a given keychain.
|
|
||||||
/// Call [`all_unbounded_spk_iters`] to get unbounded script pubkey iterators for all keychains.
|
|
||||||
///
|
|
||||||
/// # Change sets
|
|
||||||
///
|
|
||||||
/// Methods that can update the last revealed index or add keychains will return [`super::ChangeSet`] to report
|
|
||||||
/// these changes. This can be persisted for future recovery.
|
|
||||||
///
|
|
||||||
/// ## Synopsis
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use bdk_chain::keychain::KeychainTxOutIndex;
|
|
||||||
/// # use bdk_chain::{ miniscript::{Descriptor, DescriptorPublicKey} };
|
|
||||||
/// # use core::str::FromStr;
|
|
||||||
///
|
|
||||||
/// // imagine our service has internal and external addresses but also addresses for users
|
|
||||||
/// #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
|
||||||
/// enum MyKeychain {
|
|
||||||
/// External,
|
|
||||||
/// Internal,
|
|
||||||
/// MyAppUser {
|
|
||||||
/// user_id: u32
|
|
||||||
/// }
|
|
||||||
/// }
|
|
||||||
///
|
|
||||||
/// let mut txout_index = KeychainTxOutIndex::<MyKeychain>::default();
|
|
||||||
///
|
|
||||||
/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
|
||||||
/// # let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
|
||||||
/// # let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
|
||||||
/// # let (descriptor_42, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/2/*)").unwrap();
|
|
||||||
/// let _ = txout_index.insert_descriptor(MyKeychain::External, external_descriptor);
|
|
||||||
/// let _ = txout_index.insert_descriptor(MyKeychain::Internal, internal_descriptor);
|
|
||||||
/// let _ = txout_index.insert_descriptor(MyKeychain::MyAppUser { user_id: 42 }, descriptor_42);
|
|
||||||
///
|
|
||||||
/// let new_spk_for_user = txout_index.reveal_next_spk(&MyKeychain::MyAppUser{ user_id: 42 });
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// # Non-recommend keychain to descriptor assignments
|
|
||||||
///
|
|
||||||
/// A keychain (`K`) is used to identify a descriptor. However, the following keychain to descriptor
|
|
||||||
/// arrangements result in behavior that is harder to reason about and is not recommended.
|
|
||||||
///
|
|
||||||
/// ## Multiple keychains identifying the same descriptor
|
|
||||||
///
|
|
||||||
/// Although a single keychain variant can only identify a single descriptor, multiple keychain
|
|
||||||
/// variants can identify the same descriptor.
|
|
||||||
///
|
|
||||||
/// If multiple keychains identify the same descriptor:
|
|
||||||
/// 1. Methods that take in a keychain (such as [`reveal_next_spk`]) will work normally when any
|
|
||||||
/// keychain (that identifies that descriptor) is passed in.
|
|
||||||
/// 2. Methods that return data which associates with a descriptor (such as [`outpoints`],
|
|
||||||
/// [`txouts`], [`unused_spks`], etc.) the method will return the highest-ranked keychain variant
|
|
||||||
/// that identifies the descriptor. Rank is determined by the [`Ord`] implementation of the keychain
|
|
||||||
/// type.
|
|
||||||
///
|
|
||||||
/// This arrangement is not recommended since some methods will return a single keychain variant
|
|
||||||
/// even though multiple keychain variants identify the same descriptor.
|
|
||||||
///
|
|
||||||
/// ## Reassigning the descriptor of a single keychain
|
|
||||||
///
|
|
||||||
/// Descriptors added to [`KeychainTxOutIndex`] are never removed. However, a keychain that
|
|
||||||
/// identifies a descriptor can be reassigned to identify a different descriptor. This may result in
|
|
||||||
/// a situation where a descriptor has no associated keychain(s), and relevant [`TxOut`]s,
|
|
||||||
/// [`OutPoint`]s and [`Script`]s (of that descriptor) will not be return by [`KeychainTxOutIndex`].
|
|
||||||
/// Therefore, reassigning the descriptor of a single keychain is not recommended.
|
|
||||||
///
|
|
||||||
/// [`Ord`]: core::cmp::Ord
|
|
||||||
/// [`SpkTxOutIndex`]: crate::spk_txout_index::SpkTxOutIndex
|
|
||||||
/// [`Descriptor`]: crate::miniscript::Descriptor
|
|
||||||
/// [`reveal_to_target`]: KeychainTxOutIndex::reveal_to_target
|
|
||||||
/// [`reveal_next_spk`]: KeychainTxOutIndex::reveal_next_spk
|
|
||||||
/// [`revealed_keychain_spks`]: KeychainTxOutIndex::revealed_keychain_spks
|
|
||||||
/// [`revealed_spks`]: KeychainTxOutIndex::revealed_spks
|
|
||||||
/// [`index_tx`]: KeychainTxOutIndex::index_tx
|
|
||||||
/// [`index_txout`]: KeychainTxOutIndex::index_txout
|
|
||||||
/// [`new`]: KeychainTxOutIndex::new
|
|
||||||
/// [`unbounded_spk_iter`]: KeychainTxOutIndex::unbounded_spk_iter
|
|
||||||
/// [`all_unbounded_spk_iters`]: KeychainTxOutIndex::all_unbounded_spk_iters
|
|
||||||
/// [`outpoints`]: KeychainTxOutIndex::outpoints
|
|
||||||
/// [`txouts`]: KeychainTxOutIndex::txouts
|
|
||||||
/// [`unused_spks`]: KeychainTxOutIndex::unused_spks
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct KeychainTxOutIndex<K> {
|
|
||||||
inner: SpkTxOutIndex<(DescriptorId, u32)>,
|
|
||||||
// keychain -> (descriptor, descriptor id) map
|
|
||||||
keychains_to_descriptors: BTreeMap<K, (DescriptorId, Descriptor<DescriptorPublicKey>)>,
|
|
||||||
// descriptor id -> keychain set
|
|
||||||
// Because different keychains can have the same descriptor, we rank keychains by `Ord` so that
|
|
||||||
// that the first keychain variant (according to `Ord`) has the highest rank. When associated
|
|
||||||
// data (such as spks, outpoints) are returned with a keychain, we return the highest-ranked
|
|
||||||
// keychain with it.
|
|
||||||
descriptor_ids_to_keychain_set: HashMap<DescriptorId, BTreeSet<K>>,
|
|
||||||
// descriptor_id -> descriptor map
|
|
||||||
// This is a "monotone" map, meaning that its size keeps growing, i.e., we never delete
|
|
||||||
// descriptors from it. This is useful for revealing spks for descriptors that don't have
|
|
||||||
// keychains associated.
|
|
||||||
descriptor_ids_to_descriptors: BTreeMap<DescriptorId, Descriptor<DescriptorPublicKey>>,
|
|
||||||
// last revealed indexes
|
|
||||||
last_revealed: BTreeMap<DescriptorId, u32>,
|
|
||||||
// lookahead settings for each keychain
|
|
||||||
lookahead: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> Default for KeychainTxOutIndex<K> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new(DEFAULT_LOOKAHEAD)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
|
|
||||||
type ChangeSet = super::ChangeSet<K>;
|
|
||||||
|
|
||||||
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
|
|
||||||
match self.inner.scan_txout(outpoint, txout).cloned() {
|
|
||||||
Some((descriptor_id, index)) => {
|
|
||||||
// We want to reveal spks for descriptors that aren't tracked by any keychain, and
|
|
||||||
// so we call reveal with descriptor_id
|
|
||||||
let (_, changeset) = self.reveal_to_target_with_id(descriptor_id, index)
|
|
||||||
.expect("descriptors are added in a monotone manner, there cannot be a descriptor id with no corresponding descriptor");
|
|
||||||
changeset
|
|
||||||
}
|
|
||||||
None => super::ChangeSet::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn index_tx(&mut self, tx: &bitcoin::Transaction) -> Self::ChangeSet {
|
|
||||||
let mut changeset = super::ChangeSet::<K>::default();
|
|
||||||
for (op, txout) in tx.output.iter().enumerate() {
|
|
||||||
changeset.append(self.index_txout(OutPoint::new(tx.txid(), op as u32), txout));
|
|
||||||
}
|
|
||||||
changeset
|
|
||||||
}
|
|
||||||
|
|
||||||
fn initial_changeset(&self) -> Self::ChangeSet {
|
|
||||||
super::ChangeSet {
|
|
||||||
keychains_added: self
|
|
||||||
.keychains()
|
|
||||||
.map(|(k, v)| (k.clone(), v.clone()))
|
|
||||||
.collect(),
|
|
||||||
last_revealed: self.last_revealed.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn apply_changeset(&mut self, changeset: Self::ChangeSet) {
|
|
||||||
self.apply_changeset(changeset)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_tx_relevant(&self, tx: &bitcoin::Transaction) -> bool {
|
|
||||||
self.inner.is_relevant(tx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> KeychainTxOutIndex<K> {
|
|
||||||
/// Construct a [`KeychainTxOutIndex`] with the given `lookahead`.
|
|
||||||
///
|
|
||||||
/// The `lookahead` is the number of script pubkeys to derive and cache from the internal
|
|
||||||
/// descriptors over and above the last revealed script index. Without a lookahead the index
|
|
||||||
/// will miss outputs you own when processing transactions whose output script pubkeys lie
|
|
||||||
/// beyond the last revealed index. In certain situations, such as when performing an initial
|
|
||||||
/// scan of the blockchain during wallet import, it may be uncertain or unknown what the index
|
|
||||||
/// of the last revealed script pubkey actually is.
|
|
||||||
///
|
|
||||||
/// Refer to [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
|
||||||
pub fn new(lookahead: u32) -> Self {
|
|
||||||
Self {
|
|
||||||
inner: SpkTxOutIndex::default(),
|
|
||||||
keychains_to_descriptors: BTreeMap::new(),
|
|
||||||
descriptor_ids_to_keychain_set: HashMap::new(),
|
|
||||||
descriptor_ids_to_descriptors: BTreeMap::new(),
|
|
||||||
last_revealed: BTreeMap::new(),
|
|
||||||
lookahead,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Methods that are *re-exposed* from the internal [`SpkTxOutIndex`].
|
|
||||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|
||||||
/// Get the highest-ranked keychain that is currently associated with the given `desc_id`.
|
|
||||||
fn keychain_of_desc_id(&self, desc_id: &DescriptorId) -> Option<&K> {
|
|
||||||
let keychains = self.descriptor_ids_to_keychain_set.get(desc_id)?;
|
|
||||||
keychains.iter().next()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a reference to the internal [`SpkTxOutIndex`].
|
|
||||||
///
|
|
||||||
/// **WARNING:** The internal index will contain lookahead spks. Refer to
|
|
||||||
/// [struct-level docs](KeychainTxOutIndex) for more about `lookahead`.
|
|
||||||
pub fn inner(&self) -> &SpkTxOutIndex<(DescriptorId, u32)> {
|
|
||||||
&self.inner
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the set of indexed outpoints, corresponding to tracked keychains.
|
|
||||||
pub fn outpoints(&self) -> impl DoubleEndedIterator<Item = ((K, u32), OutPoint)> + '_ {
|
|
||||||
self.inner
|
|
||||||
.outpoints()
|
|
||||||
.iter()
|
|
||||||
.filter_map(|((desc_id, index), op)| {
|
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some(((keychain.clone(), *index), *op))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over known txouts that spend to tracked script pubkeys.
|
|
||||||
pub fn txouts(&self) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> + '_ {
|
|
||||||
self.inner.txouts().filter_map(|((desc_id, i), op, txo)| {
|
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some((keychain.clone(), *i, op, txo))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Finds all txouts on a transaction that has previously been scanned and indexed.
|
|
||||||
pub fn txouts_in_tx(
|
|
||||||
&self,
|
|
||||||
txid: Txid,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (K, u32, OutPoint, &TxOut)> {
|
|
||||||
self.inner
|
|
||||||
.txouts_in_tx(txid)
|
|
||||||
.filter_map(|((desc_id, i), op, txo)| {
|
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some((keychain.clone(), *i, op, txo))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the [`TxOut`] of `outpoint` if it has been indexed, and if it corresponds to a
|
|
||||||
/// tracked keychain.
|
|
||||||
///
|
|
||||||
/// The associated keychain and keychain index of the txout's spk is also returned.
|
|
||||||
///
|
|
||||||
/// This calls [`SpkTxOutIndex::txout`] internally.
|
|
||||||
pub fn txout(&self, outpoint: OutPoint) -> Option<(K, u32, &TxOut)> {
|
|
||||||
let ((descriptor_id, index), txo) = self.inner.txout(outpoint)?;
|
|
||||||
let keychain = self.keychain_of_desc_id(descriptor_id)?;
|
|
||||||
Some((keychain.clone(), *index, txo))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the script that exists under the given `keychain`'s `index`.
|
|
||||||
///
|
|
||||||
/// This calls [`SpkTxOutIndex::spk_at_index`] internally.
|
|
||||||
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<&Script> {
|
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain)?.0;
|
|
||||||
self.inner.spk_at_index(&(descriptor_id, index))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the keychain and keychain index associated with the spk.
|
|
||||||
///
|
|
||||||
/// This calls [`SpkTxOutIndex::index_of_spk`] internally.
|
|
||||||
pub fn index_of_spk(&self, script: &Script) -> Option<(K, u32)> {
|
|
||||||
let (desc_id, last_index) = self.inner.index_of_spk(script)?;
|
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some((keychain.clone(), *last_index))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns whether the spk under the `keychain`'s `index` has been used.
|
|
||||||
///
|
|
||||||
/// Here, "unused" means that after the script pubkey was stored in the index, the index has
|
|
||||||
/// never scanned a transaction output with it.
|
|
||||||
///
|
|
||||||
/// This calls [`SpkTxOutIndex::is_used`] internally.
|
|
||||||
pub fn is_used(&self, keychain: K, index: u32) -> bool {
|
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
|
||||||
match descriptor_id {
|
|
||||||
Some(descriptor_id) => self.inner.is_used(&(descriptor_id, index)),
|
|
||||||
None => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Marks the script pubkey at `index` as used even though the tracker hasn't seen an output
|
|
||||||
/// with it.
|
|
||||||
///
|
|
||||||
/// This only has an effect when the `index` had been added to `self` already and was unused.
|
|
||||||
///
|
|
||||||
/// Returns whether the spk under the given `keychain` and `index` is successfully
|
|
||||||
/// marked as used. Returns false either when there is no descriptor under the given
|
|
||||||
/// keychain, or when the spk is already marked as used.
|
|
||||||
///
|
|
||||||
/// This is useful when you want to reserve a script pubkey for something but don't want to add
|
|
||||||
/// the transaction output using it to the index yet. Other callers will consider `index` on
|
|
||||||
/// `keychain` used until you call [`unmark_used`].
|
|
||||||
///
|
|
||||||
/// This calls [`SpkTxOutIndex::mark_used`] internally.
|
|
||||||
///
|
|
||||||
/// [`unmark_used`]: Self::unmark_used
|
|
||||||
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
|
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
|
||||||
match descriptor_id {
|
|
||||||
Some(descriptor_id) => self.inner.mark_used(&(descriptor_id, index)),
|
|
||||||
None => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
|
|
||||||
/// `unused`.
|
|
||||||
///
|
|
||||||
/// Note that if `self` has scanned an output with this script pubkey, then this will have no
|
|
||||||
/// effect.
|
|
||||||
///
|
|
||||||
/// This calls [`SpkTxOutIndex::unmark_used`] internally.
|
|
||||||
///
|
|
||||||
/// [`mark_used`]: Self::mark_used
|
|
||||||
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
|
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(&keychain).map(|k| k.0);
|
|
||||||
match descriptor_id {
|
|
||||||
Some(descriptor_id) => self.inner.unmark_used(&(descriptor_id, index)),
|
|
||||||
None => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Computes the total value transfer effect `tx` has on the script pubkeys belonging to the
|
|
||||||
/// keychains in `range`. Value is *sent* when a script pubkey in the `range` is on an input and
|
|
||||||
/// *received* when it is on an output. For `sent` to be computed correctly, the output being
|
|
||||||
/// spent must have already been scanned by the index. Calculating received just uses the
|
|
||||||
/// [`Transaction`] outputs directly, so it will be correct even if it has not been scanned.
|
|
||||||
pub fn sent_and_received(
|
|
||||||
&self,
|
|
||||||
tx: &Transaction,
|
|
||||||
range: impl RangeBounds<K>,
|
|
||||||
) -> (Amount, Amount) {
|
|
||||||
self.inner
|
|
||||||
.sent_and_received(tx, self.map_to_inner_bounds(range))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Computes the net value that this transaction gives to the script pubkeys in the index and
|
|
||||||
/// *takes* from the transaction outputs in the index. Shorthand for calling
|
|
||||||
/// [`sent_and_received`] and subtracting sent from received.
|
|
||||||
///
|
|
||||||
/// This calls [`SpkTxOutIndex::net_value`] internally.
|
|
||||||
///
|
|
||||||
/// [`sent_and_received`]: Self::sent_and_received
|
|
||||||
pub fn net_value(&self, tx: &Transaction, range: impl RangeBounds<K>) -> SignedAmount {
|
|
||||||
self.inner.net_value(tx, self.map_to_inner_bounds(range))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
|
||||||
/// Return the map of the keychain to descriptors.
|
|
||||||
pub fn keychains(
|
|
||||||
&self,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (&K, &Descriptor<DescriptorPublicKey>)> + ExactSizeIterator + '_
|
|
||||||
{
|
|
||||||
self.keychains_to_descriptors
|
|
||||||
.iter()
|
|
||||||
.map(|(k, (_, d))| (k, d))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Insert a descriptor with a keychain associated to it.
|
|
||||||
///
|
|
||||||
/// Adding a descriptor means you will be able to derive new script pubkeys under it
|
|
||||||
/// and the txout index will discover transaction outputs with those script pubkeys.
|
|
||||||
///
|
|
||||||
/// When trying to add a keychain that already existed under a different descriptor, or a descriptor
|
|
||||||
/// that already existed with a different keychain, the old keychain (or descriptor) will be
|
|
||||||
/// overwritten.
|
|
||||||
pub fn insert_descriptor(
|
|
||||||
&mut self,
|
|
||||||
keychain: K,
|
|
||||||
descriptor: Descriptor<DescriptorPublicKey>,
|
|
||||||
) -> super::ChangeSet<K> {
|
|
||||||
let mut changeset = super::ChangeSet::<K>::default();
|
|
||||||
let desc_id = descriptor.descriptor_id();
|
|
||||||
|
|
||||||
let old_desc = self
|
|
||||||
.keychains_to_descriptors
|
|
||||||
.insert(keychain.clone(), (desc_id, descriptor.clone()));
|
|
||||||
|
|
||||||
if let Some((old_desc_id, _)) = old_desc {
|
|
||||||
// nothing needs to be done if caller reinsterted the same descriptor under the same
|
|
||||||
// keychain
|
|
||||||
if old_desc_id == desc_id {
|
|
||||||
return changeset;
|
|
||||||
}
|
|
||||||
// we should remove old descriptor that is associated with this keychain as the index
|
|
||||||
// is designed to track one descriptor per keychain (however different keychains can
|
|
||||||
// share the same descriptor)
|
|
||||||
let _is_keychain_removed = self
|
|
||||||
.descriptor_ids_to_keychain_set
|
|
||||||
.get_mut(&old_desc_id)
|
|
||||||
.expect("we must have already inserted this descriptor")
|
|
||||||
.remove(&keychain);
|
|
||||||
debug_assert!(_is_keychain_removed);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.descriptor_ids_to_keychain_set
|
|
||||||
.entry(desc_id)
|
|
||||||
.or_default()
|
|
||||||
.insert(keychain.clone());
|
|
||||||
self.descriptor_ids_to_descriptors
|
|
||||||
.insert(desc_id, descriptor.clone());
|
|
||||||
self.replenish_lookahead(&keychain, self.lookahead);
|
|
||||||
|
|
||||||
changeset
|
|
||||||
.keychains_added
|
|
||||||
.insert(keychain.clone(), descriptor);
|
|
||||||
changeset
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gets the descriptor associated with the keychain. Returns `None` if the keychain doesn't
|
|
||||||
/// have a descriptor associated with it.
|
|
||||||
pub fn get_descriptor(&self, keychain: &K) -> Option<&Descriptor<DescriptorPublicKey>> {
|
|
||||||
self.keychains_to_descriptors.get(keychain).map(|(_, d)| d)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the lookahead setting.
|
|
||||||
///
|
|
||||||
/// Refer to [`new`] for more information on the `lookahead`.
|
|
||||||
///
|
|
||||||
/// [`new`]: Self::new
|
|
||||||
pub fn lookahead(&self) -> u32 {
|
|
||||||
self.lookahead
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Store lookahead scripts until `target_index` (inclusive).
|
|
||||||
///
|
|
||||||
/// This does not change the global `lookahead` setting.
|
|
||||||
pub fn lookahead_to_target(&mut self, keychain: &K, target_index: u32) {
|
|
||||||
if let Some((next_index, _)) = self.next_index(keychain) {
|
|
||||||
let temp_lookahead = (target_index + 1)
|
|
||||||
.checked_sub(next_index)
|
|
||||||
.filter(|&index| index > 0);
|
|
||||||
|
|
||||||
if let Some(temp_lookahead) = temp_lookahead {
|
|
||||||
self.replenish_lookahead(keychain, temp_lookahead);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn replenish_lookahead(&mut self, keychain: &K, lookahead: u32) {
|
|
||||||
let descriptor_opt = self.keychains_to_descriptors.get(keychain).cloned();
|
|
||||||
if let Some((descriptor_id, descriptor)) = descriptor_opt {
|
|
||||||
let next_store_index = self.next_store_index(descriptor_id);
|
|
||||||
let next_reveal_index = self.last_revealed.get(&descriptor_id).map_or(0, |v| *v + 1);
|
|
||||||
|
|
||||||
for (new_index, new_spk) in SpkIterator::new_with_range(
|
|
||||||
descriptor,
|
|
||||||
next_store_index..next_reveal_index + lookahead,
|
|
||||||
) {
|
|
||||||
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
|
||||||
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn next_store_index(&self, descriptor_id: DescriptorId) -> u32 {
|
|
||||||
self.inner()
|
|
||||||
.all_spks()
|
|
||||||
// This range is keeping only the spks with descriptor_id equal to
|
|
||||||
// `descriptor_id`. We don't use filter here as range is more optimized.
|
|
||||||
.range((descriptor_id, u32::MIN)..(descriptor_id, u32::MAX))
|
|
||||||
.last()
|
|
||||||
.map_or(0, |((_, index), _)| *index + 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get an unbounded spk iterator over a given `keychain`. Returns `None` if the provided
|
|
||||||
/// keychain doesn't exist
|
|
||||||
pub fn unbounded_spk_iter(
|
|
||||||
&self,
|
|
||||||
keychain: &K,
|
|
||||||
) -> Option<SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
|
||||||
let descriptor = self.keychains_to_descriptors.get(keychain)?.1.clone();
|
|
||||||
Some(SpkIterator::new(descriptor))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get unbounded spk iterators for all keychains.
|
|
||||||
pub fn all_unbounded_spk_iters(
|
|
||||||
&self,
|
|
||||||
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
|
|
||||||
self.keychains_to_descriptors
|
|
||||||
.iter()
|
|
||||||
.map(|(k, (_, descriptor))| (k.clone(), SpkIterator::new(descriptor.clone())))
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over revealed spks of keychains in `range`
|
|
||||||
pub fn revealed_spks(
|
|
||||||
&self,
|
|
||||||
range: impl RangeBounds<K>,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (&K, u32, &Script)> + Clone {
|
|
||||||
self.keychains_to_descriptors
|
|
||||||
.range(range)
|
|
||||||
.flat_map(|(_, (descriptor_id, _))| {
|
|
||||||
let start = Bound::Included((*descriptor_id, u32::MIN));
|
|
||||||
let end = match self.last_revealed.get(descriptor_id) {
|
|
||||||
Some(last_revealed) => Bound::Included((*descriptor_id, *last_revealed)),
|
|
||||||
None => Bound::Excluded((*descriptor_id, u32::MIN)),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.inner
|
|
||||||
.all_spks()
|
|
||||||
.range((start, end))
|
|
||||||
.map(|((descriptor_id, i), spk)| {
|
|
||||||
(
|
|
||||||
self.keychain_of_desc_id(descriptor_id)
|
|
||||||
.expect("must have keychain"),
|
|
||||||
*i,
|
|
||||||
spk.as_script(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over revealed spks of the given `keychain`.
|
|
||||||
pub fn revealed_keychain_spks<'a>(
|
|
||||||
&'a self,
|
|
||||||
keychain: &'a K,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + 'a {
|
|
||||||
self.revealed_spks(keychain..=keychain)
|
|
||||||
.map(|(_, i, spk)| (i, spk))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over revealed, but unused, spks of all keychains.
|
|
||||||
pub fn unused_spks(&self) -> impl DoubleEndedIterator<Item = (K, u32, &Script)> + Clone {
|
|
||||||
self.keychains_to_descriptors.keys().flat_map(|keychain| {
|
|
||||||
self.unused_keychain_spks(keychain)
|
|
||||||
.map(|(i, spk)| (keychain.clone(), i, spk))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over revealed, but unused, spks of the given `keychain`.
|
|
||||||
/// Returns an empty iterator if the provided keychain doesn't exist.
|
|
||||||
pub fn unused_keychain_spks(
|
|
||||||
&self,
|
|
||||||
keychain: &K,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
|
|
||||||
let desc_id = self
|
|
||||||
.keychains_to_descriptors
|
|
||||||
.get(keychain)
|
|
||||||
.map(|(desc_id, _)| *desc_id)
|
|
||||||
// We use a dummy desc id if we can't find the real one in our map. In this way,
|
|
||||||
// if this method was to be called with a non-existent keychain, we would return an
|
|
||||||
// empty iterator
|
|
||||||
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]));
|
|
||||||
let next_i = self.last_revealed.get(&desc_id).map_or(0, |&i| i + 1);
|
|
||||||
self.inner
|
|
||||||
.unused_spks((desc_id, u32::MIN)..(desc_id, next_i))
|
|
||||||
.map(|((_, i), spk)| (*i, spk))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the next derivation index for `keychain`. The next index is the index after the last revealed
|
|
||||||
/// derivation index.
|
|
||||||
///
|
|
||||||
/// The second field in the returned tuple represents whether the next derivation index is new.
|
|
||||||
/// There are two scenarios where the next derivation index is reused (not new):
|
|
||||||
///
|
|
||||||
/// 1. The keychain's descriptor has no wildcard, and a script has already been revealed.
|
|
||||||
/// 2. The number of revealed scripts has already reached 2^31 (refer to BIP-32).
|
|
||||||
///
|
|
||||||
/// Not checking the second field of the tuple may result in address reuse.
|
|
||||||
///
|
|
||||||
/// Returns None if the provided `keychain` doesn't exist.
|
|
||||||
pub fn next_index(&self, keychain: &K) -> Option<(u32, bool)> {
|
|
||||||
let (descriptor_id, descriptor) = self.keychains_to_descriptors.get(keychain)?;
|
|
||||||
let last_index = self.last_revealed.get(descriptor_id).cloned();
|
|
||||||
|
|
||||||
// we can only get the next index if the wildcard exists.
|
|
||||||
let has_wildcard = descriptor.has_wildcard();
|
|
||||||
|
|
||||||
Some(match last_index {
|
|
||||||
// if there is no index, next_index is always 0.
|
|
||||||
None => (0, true),
|
|
||||||
// descriptors without wildcards can only have one index.
|
|
||||||
Some(_) if !has_wildcard => (0, false),
|
|
||||||
// derivation index must be < 2^31 (BIP-32).
|
|
||||||
Some(index) if index > BIP32_MAX_INDEX => {
|
|
||||||
unreachable!("index is out of bounds")
|
|
||||||
}
|
|
||||||
Some(index) if index == BIP32_MAX_INDEX => (index, false),
|
|
||||||
// get the next derivation index.
|
|
||||||
Some(index) => (index + 1, true),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the last derivation index that is revealed for each keychain.
|
|
||||||
///
|
|
||||||
/// Keychains with no revealed indices will not be included in the returned [`BTreeMap`].
|
|
||||||
pub fn last_revealed_indices(&self) -> BTreeMap<K, u32> {
|
|
||||||
self.last_revealed
|
|
||||||
.iter()
|
|
||||||
.filter_map(|(desc_id, index)| {
|
|
||||||
let keychain = self.keychain_of_desc_id(desc_id)?;
|
|
||||||
Some((keychain.clone(), *index))
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the last derivation index revealed for `keychain`. Returns None if the keychain doesn't
|
|
||||||
/// exist, or if the keychain doesn't have any revealed scripts.
|
|
||||||
pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
|
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
|
||||||
self.last_revealed.get(&descriptor_id).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
|
|
||||||
pub fn reveal_to_target_multi(
|
|
||||||
&mut self,
|
|
||||||
keychains: &BTreeMap<K, u32>,
|
|
||||||
) -> (
|
|
||||||
BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>>,
|
|
||||||
super::ChangeSet<K>,
|
|
||||||
) {
|
|
||||||
let mut changeset = super::ChangeSet::default();
|
|
||||||
let mut spks = BTreeMap::new();
|
|
||||||
|
|
||||||
for (keychain, &index) in keychains {
|
|
||||||
if let Some((new_spks, new_changeset)) = self.reveal_to_target(keychain, index) {
|
|
||||||
if !new_changeset.is_empty() {
|
|
||||||
spks.insert(keychain.clone(), new_spks);
|
|
||||||
changeset.append(new_changeset.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(spks, changeset)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience method to call `reveal_to_target` with a descriptor_id instead of a keychain.
|
|
||||||
/// This is useful for revealing spks of descriptors for which we don't have a keychain
|
|
||||||
/// tracked.
|
|
||||||
/// Refer to the `reveal_to_target` documentation for more.
|
|
||||||
///
|
|
||||||
/// Returns None if the provided `descriptor_id` doesn't correspond to a tracked descriptor.
|
|
||||||
fn reveal_to_target_with_id(
|
|
||||||
&mut self,
|
|
||||||
descriptor_id: DescriptorId,
|
|
||||||
target_index: u32,
|
|
||||||
) -> Option<(
|
|
||||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
|
||||||
super::ChangeSet<K>,
|
|
||||||
)> {
|
|
||||||
let descriptor = self
|
|
||||||
.descriptor_ids_to_descriptors
|
|
||||||
.get(&descriptor_id)?
|
|
||||||
.clone();
|
|
||||||
let has_wildcard = descriptor.has_wildcard();
|
|
||||||
|
|
||||||
let target_index = if has_wildcard { target_index } else { 0 };
|
|
||||||
let next_reveal_index = self
|
|
||||||
.last_revealed
|
|
||||||
.get(&descriptor_id)
|
|
||||||
.map_or(0, |index| *index + 1);
|
|
||||||
|
|
||||||
debug_assert!(next_reveal_index + self.lookahead >= self.next_store_index(descriptor_id));
|
|
||||||
|
|
||||||
// If the target_index is already revealed, we are done
|
|
||||||
if next_reveal_index > target_index {
|
|
||||||
return Some((
|
|
||||||
SpkIterator::new_with_range(descriptor, next_reveal_index..next_reveal_index),
|
|
||||||
super::ChangeSet::default(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// We range over the indexes that are not stored and insert their spks in the index.
|
|
||||||
// Indexes from next_reveal_index to next_reveal_index + lookahead are already stored (due
|
|
||||||
// to lookahead), so we only range from next_reveal_index + lookahead to target + lookahead
|
|
||||||
let range = next_reveal_index + self.lookahead..=target_index + self.lookahead;
|
|
||||||
for (new_index, new_spk) in SpkIterator::new_with_range(descriptor.clone(), range) {
|
|
||||||
let _inserted = self.inner.insert_spk((descriptor_id, new_index), new_spk);
|
|
||||||
debug_assert!(_inserted, "must not have existing spk");
|
|
||||||
debug_assert!(
|
|
||||||
has_wildcard || new_index == 0,
|
|
||||||
"non-wildcard descriptors must not iterate past index 0"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let _old_index = self.last_revealed.insert(descriptor_id, target_index);
|
|
||||||
debug_assert!(_old_index < Some(target_index));
|
|
||||||
Some((
|
|
||||||
SpkIterator::new_with_range(descriptor, next_reveal_index..target_index + 1),
|
|
||||||
super::ChangeSet {
|
|
||||||
keychains_added: BTreeMap::new(),
|
|
||||||
last_revealed: core::iter::once((descriptor_id, target_index)).collect(),
|
|
||||||
},
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
|
|
||||||
/// `target_index`.
|
|
||||||
///
|
|
||||||
/// If the `target_index` cannot be reached (due to the descriptor having no wildcard and/or
|
|
||||||
/// the `target_index` is in the hardened index range), this method will make a best-effort and
|
|
||||||
/// reveal up to the last possible index.
|
|
||||||
///
|
|
||||||
/// This returns an iterator of newly revealed indices (alongside their scripts) and a
|
|
||||||
/// [`super::ChangeSet`], which reports updates to the latest revealed index. If no new script
|
|
||||||
/// pubkeys are revealed, then both of these will be empty.
|
|
||||||
///
|
|
||||||
/// Returns None if the provided `keychain` doesn't exist.
|
|
||||||
pub fn reveal_to_target(
|
|
||||||
&mut self,
|
|
||||||
keychain: &K,
|
|
||||||
target_index: u32,
|
|
||||||
) -> Option<(
|
|
||||||
SpkIterator<Descriptor<DescriptorPublicKey>>,
|
|
||||||
super::ChangeSet<K>,
|
|
||||||
)> {
|
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
|
||||||
self.reveal_to_target_with_id(descriptor_id, target_index)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempts to reveal the next script pubkey for `keychain`.
|
|
||||||
///
|
|
||||||
/// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
|
|
||||||
/// [`super::ChangeSet`] which represents changes in the last revealed index (if any).
|
|
||||||
/// Returns None if the provided keychain doesn't exist.
|
|
||||||
///
|
|
||||||
/// When a new script cannot be revealed, we return the last revealed script and an empty
|
|
||||||
/// [`super::ChangeSet`]. There are two scenarios when a new script pubkey cannot be derived:
|
|
||||||
///
|
|
||||||
/// 1. The descriptor has no wildcard and already has one script revealed.
|
|
||||||
/// 2. The descriptor has already revealed scripts up to the numeric bound.
|
|
||||||
/// 3. There is no descriptor associated with the given keychain.
|
|
||||||
pub fn reveal_next_spk(
|
|
||||||
&mut self,
|
|
||||||
keychain: &K,
|
|
||||||
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
|
||||||
let descriptor_id = self.keychains_to_descriptors.get(keychain)?.0;
|
|
||||||
let (next_index, _) = self.next_index(keychain).expect("We know keychain exists");
|
|
||||||
let changeset = self
|
|
||||||
.reveal_to_target(keychain, next_index)
|
|
||||||
.expect("We know keychain exists")
|
|
||||||
.1;
|
|
||||||
let script = self
|
|
||||||
.inner
|
|
||||||
.spk_at_index(&(descriptor_id, next_index))
|
|
||||||
.expect("script must already be stored");
|
|
||||||
Some(((next_index, script), changeset))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gets the next unused script pubkey in the keychain. I.e., the script pubkey with the lowest
|
|
||||||
/// index that has not been used yet.
|
|
||||||
///
|
|
||||||
/// This will derive and reveal a new script pubkey if no more unused script pubkeys exist.
|
|
||||||
///
|
|
||||||
/// If the descriptor has no wildcard and already has a used script pubkey or if a descriptor
|
|
||||||
/// has used all scripts up to the derivation bounds, then the last derived script pubkey will be
|
|
||||||
/// returned.
|
|
||||||
///
|
|
||||||
/// Returns None if the provided keychain doesn't exist.
|
|
||||||
pub fn next_unused_spk(
|
|
||||||
&mut self,
|
|
||||||
keychain: &K,
|
|
||||||
) -> Option<((u32, &Script), super::ChangeSet<K>)> {
|
|
||||||
let need_new = self.unused_keychain_spks(keychain).next().is_none();
|
|
||||||
// this rather strange branch is needed because of some lifetime issues
|
|
||||||
if need_new {
|
|
||||||
self.reveal_next_spk(keychain)
|
|
||||||
} else {
|
|
||||||
Some((
|
|
||||||
self.unused_keychain_spks(keychain)
|
|
||||||
.next()
|
|
||||||
.expect("we already know next exists"),
|
|
||||||
super::ChangeSet::default(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over all [`OutPoint`]s that have `TxOut`s with script pubkeys derived from
|
|
||||||
/// `keychain`.
|
|
||||||
pub fn keychain_outpoints<'a>(
|
|
||||||
&'a self,
|
|
||||||
keychain: &'a K,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (u32, OutPoint)> + 'a {
|
|
||||||
self.keychain_outpoints_in_range(keychain..=keychain)
|
|
||||||
.map(move |(_, i, op)| (i, op))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over [`OutPoint`]s that have script pubkeys derived from keychains in `range`.
|
|
||||||
pub fn keychain_outpoints_in_range<'a>(
|
|
||||||
&'a self,
|
|
||||||
range: impl RangeBounds<K> + 'a,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (&'a K, u32, OutPoint)> + 'a {
|
|
||||||
let bounds = self.map_to_inner_bounds(range);
|
|
||||||
self.inner
|
|
||||||
.outputs_in_range(bounds)
|
|
||||||
.map(move |((desc_id, i), op)| {
|
|
||||||
let keychain = self
|
|
||||||
.keychain_of_desc_id(desc_id)
|
|
||||||
.expect("keychain must exist");
|
|
||||||
(keychain, *i, op)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn map_to_inner_bounds(
|
|
||||||
&self,
|
|
||||||
bound: impl RangeBounds<K>,
|
|
||||||
) -> impl RangeBounds<(DescriptorId, u32)> {
|
|
||||||
let get_desc_id = |keychain| {
|
|
||||||
self.keychains_to_descriptors
|
|
||||||
.get(keychain)
|
|
||||||
.map(|(desc_id, _)| *desc_id)
|
|
||||||
.unwrap_or_else(|| DescriptorId::from_byte_array([0; 32]))
|
|
||||||
};
|
|
||||||
let start = match bound.start_bound() {
|
|
||||||
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MIN)),
|
|
||||||
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MAX)),
|
|
||||||
Bound::Unbounded => Bound::Unbounded,
|
|
||||||
};
|
|
||||||
let end = match bound.end_bound() {
|
|
||||||
Bound::Included(keychain) => Bound::Included((get_desc_id(keychain), u32::MAX)),
|
|
||||||
Bound::Excluded(keychain) => Bound::Excluded((get_desc_id(keychain), u32::MIN)),
|
|
||||||
Bound::Unbounded => Bound::Unbounded,
|
|
||||||
};
|
|
||||||
|
|
||||||
(start, end)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the highest derivation index of the `keychain` where [`KeychainTxOutIndex`] has
|
|
||||||
/// found a [`TxOut`] with it's script pubkey.
|
|
||||||
pub fn last_used_index(&self, keychain: &K) -> Option<u32> {
|
|
||||||
self.keychain_outpoints(keychain).last().map(|(i, _)| i)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
|
|
||||||
/// a [`TxOut`] with it's script pubkey.
|
|
||||||
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
|
|
||||||
self.keychains_to_descriptors
|
|
||||||
.iter()
|
|
||||||
.filter_map(|(keychain, _)| {
|
|
||||||
self.last_used_index(keychain)
|
|
||||||
.map(|index| (keychain.clone(), index))
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Applies the derivation changeset to the [`KeychainTxOutIndex`], as specified in the
|
|
||||||
/// [`ChangeSet::append`] documentation:
|
|
||||||
/// - Extends the number of derived scripts per keychain
|
|
||||||
/// - Adds new descriptors introduced
|
|
||||||
/// - If a descriptor is introduced for a keychain that already had a descriptor, overwrites
|
|
||||||
/// the old descriptor
|
|
||||||
pub fn apply_changeset(&mut self, changeset: super::ChangeSet<K>) {
|
|
||||||
let ChangeSet {
|
|
||||||
keychains_added,
|
|
||||||
last_revealed,
|
|
||||||
} = changeset;
|
|
||||||
for (keychain, descriptor) in keychains_added {
|
|
||||||
let _ = self.insert_descriptor(keychain, descriptor);
|
|
||||||
}
|
|
||||||
let last_revealed = last_revealed
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(desc_id, index)| {
|
|
||||||
let keychain = self.keychain_of_desc_id(&desc_id)?;
|
|
||||||
Some((keychain.clone(), index))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let _ = self.reveal_to_target_multi(&last_revealed);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -21,13 +21,15 @@
|
|||||||
#![warn(missing_docs)]
|
#![warn(missing_docs)]
|
||||||
|
|
||||||
pub use bitcoin;
|
pub use bitcoin;
|
||||||
mod spk_txout_index;
|
mod balance;
|
||||||
pub use spk_txout_index::*;
|
pub use balance::*;
|
||||||
mod chain_data;
|
mod chain_data;
|
||||||
pub use chain_data::*;
|
pub use chain_data::*;
|
||||||
pub mod indexed_tx_graph;
|
pub mod indexed_tx_graph;
|
||||||
pub use indexed_tx_graph::IndexedTxGraph;
|
pub use indexed_tx_graph::IndexedTxGraph;
|
||||||
pub mod keychain;
|
pub mod indexer;
|
||||||
|
pub use indexer::spk_txout;
|
||||||
|
pub use indexer::Indexer;
|
||||||
pub mod local_chain;
|
pub mod local_chain;
|
||||||
mod tx_data_traits;
|
mod tx_data_traits;
|
||||||
pub mod tx_graph;
|
pub mod tx_graph;
|
||||||
@@ -35,6 +37,8 @@ pub use tx_data_traits::*;
|
|||||||
pub use tx_graph::TxGraph;
|
pub use tx_graph::TxGraph;
|
||||||
mod chain_oracle;
|
mod chain_oracle;
|
||||||
pub use chain_oracle::*;
|
pub use chain_oracle::*;
|
||||||
|
mod persist;
|
||||||
|
pub use persist::*;
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub mod example_utils;
|
pub mod example_utils;
|
||||||
@@ -48,19 +52,21 @@ pub use descriptor_ext::{DescriptorExt, DescriptorId};
|
|||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
mod spk_iter;
|
mod spk_iter;
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
|
pub use indexer::keychain_txout;
|
||||||
|
#[cfg(feature = "miniscript")]
|
||||||
pub use spk_iter::*;
|
pub use spk_iter::*;
|
||||||
|
#[cfg(feature = "rusqlite")]
|
||||||
|
pub mod rusqlite_impl;
|
||||||
pub mod spk_client;
|
pub mod spk_client;
|
||||||
|
|
||||||
#[allow(unused_imports)]
|
#[allow(unused_imports)]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
#[cfg(feature = "rusqlite")]
|
||||||
|
pub extern crate rusqlite_crate as rusqlite;
|
||||||
#[cfg(feature = "serde")]
|
#[cfg(feature = "serde")]
|
||||||
pub extern crate serde_crate as serde;
|
pub extern crate serde_crate as serde;
|
||||||
|
|
||||||
#[cfg(feature = "bincode")]
|
|
||||||
extern crate bincode;
|
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate std;
|
extern crate std;
|
||||||
@@ -98,3 +104,25 @@ pub mod collections {
|
|||||||
|
|
||||||
/// How many confirmations are needed f or a coinbase output to be spent.
|
/// How many confirmations are needed f or a coinbase output to be spent.
|
||||||
pub const COINBASE_MATURITY: u32 = 100;
|
pub const COINBASE_MATURITY: u32 = 100;
|
||||||
|
|
||||||
|
/// A tuple of keychain index and `T` representing the indexed value.
|
||||||
|
pub type Indexed<T> = (u32, T);
|
||||||
|
/// A tuple of keychain `K`, derivation index (`u32`) and a `T` associated with them.
|
||||||
|
pub type KeychainIndexed<K, T> = ((K, u32), T);
|
||||||
|
|
||||||
|
/// A wrapper that we use to impl remote traits for types in our crate or dependency crates.
|
||||||
|
pub struct Impl<T>(pub T);
|
||||||
|
|
||||||
|
impl<T> From<T> for Impl<T> {
|
||||||
|
fn from(value: T) -> Self {
|
||||||
|
Self(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> core::ops::Deref for Impl<T> {
|
||||||
|
type Target = T;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,17 +4,11 @@ use core::convert::Infallible;
|
|||||||
use core::ops::RangeBounds;
|
use core::ops::RangeBounds;
|
||||||
|
|
||||||
use crate::collections::BTreeMap;
|
use crate::collections::BTreeMap;
|
||||||
use crate::{BlockId, ChainOracle};
|
use crate::{BlockId, ChainOracle, Merge};
|
||||||
use alloc::sync::Arc;
|
use alloc::sync::Arc;
|
||||||
use bitcoin::block::Header;
|
use bitcoin::block::Header;
|
||||||
use bitcoin::BlockHash;
|
use bitcoin::BlockHash;
|
||||||
|
|
||||||
/// The [`ChangeSet`] represents changes to [`LocalChain`].
|
|
||||||
///
|
|
||||||
/// The key represents the block height, and the value either represents added a new [`CheckPoint`]
|
|
||||||
/// (if [`Some`]), or removing a [`CheckPoint`] (if [`None`]).
|
|
||||||
pub type ChangeSet = BTreeMap<u32, Option<BlockHash>>;
|
|
||||||
|
|
||||||
/// A [`LocalChain`] checkpoint is used to find the agreement point between two chains and as a
|
/// A [`LocalChain`] checkpoint is used to find the agreement point between two chains and as a
|
||||||
/// transaction anchor.
|
/// transaction anchor.
|
||||||
///
|
///
|
||||||
@@ -216,7 +210,7 @@ impl CheckPoint {
|
|||||||
|
|
||||||
/// Apply `changeset` to the checkpoint.
|
/// Apply `changeset` to the checkpoint.
|
||||||
fn apply_changeset(mut self, changeset: &ChangeSet) -> Result<CheckPoint, MissingGenesisError> {
|
fn apply_changeset(mut self, changeset: &ChangeSet) -> Result<CheckPoint, MissingGenesisError> {
|
||||||
if let Some(start_height) = changeset.keys().next().cloned() {
|
if let Some(start_height) = changeset.blocks.keys().next().cloned() {
|
||||||
// changes after point of agreement
|
// changes after point of agreement
|
||||||
let mut extension = BTreeMap::default();
|
let mut extension = BTreeMap::default();
|
||||||
// point of agreement
|
// point of agreement
|
||||||
@@ -231,7 +225,7 @@ impl CheckPoint {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (&height, &hash) in changeset {
|
for (&height, &hash) in &changeset.blocks {
|
||||||
match hash {
|
match hash {
|
||||||
Some(hash) => {
|
Some(hash) => {
|
||||||
extension.insert(height, hash);
|
extension.insert(height, hash);
|
||||||
@@ -331,7 +325,7 @@ impl LocalChain {
|
|||||||
|
|
||||||
/// Construct a [`LocalChain`] from an initial `changeset`.
|
/// Construct a [`LocalChain`] from an initial `changeset`.
|
||||||
pub fn from_changeset(changeset: ChangeSet) -> Result<Self, MissingGenesisError> {
|
pub fn from_changeset(changeset: ChangeSet) -> Result<Self, MissingGenesisError> {
|
||||||
let genesis_entry = changeset.get(&0).copied().flatten();
|
let genesis_entry = changeset.blocks.get(&0).copied().flatten();
|
||||||
let genesis_hash = match genesis_entry {
|
let genesis_hash = match genesis_entry {
|
||||||
Some(hash) => hash,
|
Some(hash) => hash,
|
||||||
None => return Err(MissingGenesisError),
|
None => return Err(MissingGenesisError),
|
||||||
@@ -521,12 +515,14 @@ impl LocalChain {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut changeset = ChangeSet::default();
|
let mut changeset = ChangeSet::default();
|
||||||
changeset.insert(block_id.height, Some(block_id.hash));
|
changeset
|
||||||
|
.blocks
|
||||||
|
.insert(block_id.height, Some(block_id.hash));
|
||||||
self.apply_changeset(&changeset)
|
self.apply_changeset(&changeset)
|
||||||
.map_err(|_| AlterCheckPointError {
|
.map_err(|_| AlterCheckPointError {
|
||||||
height: 0,
|
height: 0,
|
||||||
original_hash: self.genesis_hash(),
|
original_hash: self.genesis_hash(),
|
||||||
update_hash: changeset.get(&0).cloned().flatten(),
|
update_hash: changeset.blocks.get(&0).cloned().flatten(),
|
||||||
})?;
|
})?;
|
||||||
Ok(changeset)
|
Ok(changeset)
|
||||||
}
|
}
|
||||||
@@ -548,7 +544,7 @@ impl LocalChain {
|
|||||||
if cp_id.height < block_id.height {
|
if cp_id.height < block_id.height {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
changeset.insert(cp_id.height, None);
|
changeset.blocks.insert(cp_id.height, None);
|
||||||
if cp_id == block_id {
|
if cp_id == block_id {
|
||||||
remove_from = Some(cp);
|
remove_from = Some(cp);
|
||||||
}
|
}
|
||||||
@@ -569,13 +565,16 @@ impl LocalChain {
|
|||||||
/// Derives an initial [`ChangeSet`], meaning that it can be applied to an empty chain to
|
/// Derives an initial [`ChangeSet`], meaning that it can be applied to an empty chain to
|
||||||
/// recover the current chain.
|
/// recover the current chain.
|
||||||
pub fn initial_changeset(&self) -> ChangeSet {
|
pub fn initial_changeset(&self) -> ChangeSet {
|
||||||
self.tip
|
ChangeSet {
|
||||||
.iter()
|
blocks: self
|
||||||
.map(|cp| {
|
.tip
|
||||||
let block_id = cp.block_id();
|
.iter()
|
||||||
(block_id.height, Some(block_id.hash))
|
.map(|cp| {
|
||||||
})
|
let block_id = cp.block_id();
|
||||||
.collect()
|
(block_id.height, Some(block_id.hash))
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over checkpoints in descending height order.
|
/// Iterate over checkpoints in descending height order.
|
||||||
@@ -587,7 +586,7 @@ impl LocalChain {
|
|||||||
|
|
||||||
fn _check_changeset_is_applied(&self, changeset: &ChangeSet) -> bool {
|
fn _check_changeset_is_applied(&self, changeset: &ChangeSet) -> bool {
|
||||||
let mut curr_cp = self.tip.clone();
|
let mut curr_cp = self.tip.clone();
|
||||||
for (height, exp_hash) in changeset.iter().rev() {
|
for (height, exp_hash) in changeset.blocks.iter().rev() {
|
||||||
match curr_cp.get(*height) {
|
match curr_cp.get(*height) {
|
||||||
Some(query_cp) => {
|
Some(query_cp) => {
|
||||||
if query_cp.height() != *height || Some(query_cp.hash()) != *exp_hash {
|
if query_cp.height() != *height || Some(query_cp.hash()) != *exp_hash {
|
||||||
@@ -630,6 +629,58 @@ impl LocalChain {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The [`ChangeSet`] represents changes to [`LocalChain`].
|
||||||
|
#[derive(Debug, Default, Clone, PartialEq)]
|
||||||
|
#[cfg_attr(
|
||||||
|
feature = "serde",
|
||||||
|
derive(serde::Deserialize, serde::Serialize),
|
||||||
|
serde(crate = "serde_crate")
|
||||||
|
)]
|
||||||
|
pub struct ChangeSet {
|
||||||
|
/// Changes to the [`LocalChain`] blocks.
|
||||||
|
///
|
||||||
|
/// The key represents the block height, and the value either represents added a new [`CheckPoint`]
|
||||||
|
/// (if [`Some`]), or removing a [`CheckPoint`] (if [`None`]).
|
||||||
|
pub blocks: BTreeMap<u32, Option<BlockHash>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Merge for ChangeSet {
|
||||||
|
fn merge(&mut self, other: Self) {
|
||||||
|
Merge::merge(&mut self.blocks, other.blocks)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.blocks.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<B: IntoIterator<Item = (u32, Option<BlockHash>)>> From<B> for ChangeSet {
|
||||||
|
fn from(blocks: B) -> Self {
|
||||||
|
Self {
|
||||||
|
blocks: blocks.into_iter().collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromIterator<(u32, Option<BlockHash>)> for ChangeSet {
|
||||||
|
fn from_iter<T: IntoIterator<Item = (u32, Option<BlockHash>)>>(iter: T) -> Self {
|
||||||
|
Self {
|
||||||
|
blocks: iter.into_iter().collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromIterator<(u32, BlockHash)> for ChangeSet {
|
||||||
|
fn from_iter<T: IntoIterator<Item = (u32, BlockHash)>>(iter: T) -> Self {
|
||||||
|
Self {
|
||||||
|
blocks: iter
|
||||||
|
.into_iter()
|
||||||
|
.map(|(height, hash)| (height, Some(hash)))
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// An error which occurs when a [`LocalChain`] is constructed without a genesis checkpoint.
|
/// An error which occurs when a [`LocalChain`] is constructed without a genesis checkpoint.
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct MissingGenesisError;
|
pub struct MissingGenesisError;
|
||||||
@@ -761,7 +812,7 @@ fn merge_chains(
|
|||||||
match (curr_orig.as_ref(), curr_update.as_ref()) {
|
match (curr_orig.as_ref(), curr_update.as_ref()) {
|
||||||
// Update block that doesn't exist in the original chain
|
// Update block that doesn't exist in the original chain
|
||||||
(o, Some(u)) if Some(u.height()) > o.map(|o| o.height()) => {
|
(o, Some(u)) if Some(u.height()) > o.map(|o| o.height()) => {
|
||||||
changeset.insert(u.height(), Some(u.hash()));
|
changeset.blocks.insert(u.height(), Some(u.hash()));
|
||||||
prev_update = curr_update.take();
|
prev_update = curr_update.take();
|
||||||
}
|
}
|
||||||
// Original block that isn't in the update
|
// Original block that isn't in the update
|
||||||
@@ -813,9 +864,9 @@ fn merge_chains(
|
|||||||
} else {
|
} else {
|
||||||
// We have an invalidation height so we set the height to the updated hash and
|
// We have an invalidation height so we set the height to the updated hash and
|
||||||
// also purge all the original chain block hashes above this block.
|
// also purge all the original chain block hashes above this block.
|
||||||
changeset.insert(u.height(), Some(u.hash()));
|
changeset.blocks.insert(u.height(), Some(u.hash()));
|
||||||
for invalidated_height in potentially_invalidated_heights.drain(..) {
|
for invalidated_height in potentially_invalidated_heights.drain(..) {
|
||||||
changeset.insert(invalidated_height, None);
|
changeset.blocks.insert(invalidated_height, None);
|
||||||
}
|
}
|
||||||
prev_orig_was_invalidated = true;
|
prev_orig_was_invalidated = true;
|
||||||
}
|
}
|
||||||
|
|||||||
169
crates/chain/src/persist.rs
Normal file
169
crates/chain/src/persist.rs
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
use core::{
|
||||||
|
future::Future,
|
||||||
|
ops::{Deref, DerefMut},
|
||||||
|
pin::Pin,
|
||||||
|
};
|
||||||
|
|
||||||
|
use alloc::boxed::Box;
|
||||||
|
|
||||||
|
use crate::Merge;
|
||||||
|
|
||||||
|
/// Represents a type that contains staged changes.
|
||||||
|
pub trait Staged {
|
||||||
|
/// Type for staged changes.
|
||||||
|
type ChangeSet: Merge;
|
||||||
|
|
||||||
|
/// Get mutable reference of staged changes.
|
||||||
|
fn staged(&mut self) -> &mut Self::ChangeSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait that persists the type with `Db`.
|
||||||
|
///
|
||||||
|
/// Methods of this trait should not be called directly.
|
||||||
|
pub trait PersistWith<Db>: Staged + Sized {
|
||||||
|
/// Parameters for [`PersistWith::create`].
|
||||||
|
type CreateParams;
|
||||||
|
/// Parameters for [`PersistWith::load`].
|
||||||
|
type LoadParams;
|
||||||
|
/// Error type of [`PersistWith::create`].
|
||||||
|
type CreateError;
|
||||||
|
/// Error type of [`PersistWith::load`].
|
||||||
|
type LoadError;
|
||||||
|
/// Error type of [`PersistWith::persist`].
|
||||||
|
type PersistError;
|
||||||
|
|
||||||
|
/// Initialize the `Db` and create `Self`.
|
||||||
|
fn create(db: &mut Db, params: Self::CreateParams) -> Result<Self, Self::CreateError>;
|
||||||
|
|
||||||
|
/// Initialize the `Db` and load a previously-persisted `Self`.
|
||||||
|
fn load(db: &mut Db, params: Self::LoadParams) -> Result<Option<Self>, Self::LoadError>;
|
||||||
|
|
||||||
|
/// Persist changes to the `Db`.
|
||||||
|
fn persist(
|
||||||
|
db: &mut Db,
|
||||||
|
changeset: &<Self as Staged>::ChangeSet,
|
||||||
|
) -> Result<(), Self::PersistError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
type FutureResult<'a, T, E> = Pin<Box<dyn Future<Output = Result<T, E>> + Send + 'a>>;
|
||||||
|
|
||||||
|
/// Trait that persists the type with an async `Db`.
|
||||||
|
pub trait PersistAsyncWith<Db>: Staged + Sized {
|
||||||
|
/// Parameters for [`PersistAsyncWith::create`].
|
||||||
|
type CreateParams;
|
||||||
|
/// Parameters for [`PersistAsyncWith::load`].
|
||||||
|
type LoadParams;
|
||||||
|
/// Error type of [`PersistAsyncWith::create`].
|
||||||
|
type CreateError;
|
||||||
|
/// Error type of [`PersistAsyncWith::load`].
|
||||||
|
type LoadError;
|
||||||
|
/// Error type of [`PersistAsyncWith::persist`].
|
||||||
|
type PersistError;
|
||||||
|
|
||||||
|
/// Initialize the `Db` and create `Self`.
|
||||||
|
fn create(db: &mut Db, params: Self::CreateParams) -> FutureResult<Self, Self::CreateError>;
|
||||||
|
|
||||||
|
/// Initialize the `Db` and load a previously-persisted `Self`.
|
||||||
|
fn load(db: &mut Db, params: Self::LoadParams) -> FutureResult<Option<Self>, Self::LoadError>;
|
||||||
|
|
||||||
|
/// Persist changes to the `Db`.
|
||||||
|
fn persist<'a>(
|
||||||
|
db: &'a mut Db,
|
||||||
|
changeset: &'a <Self as Staged>::ChangeSet,
|
||||||
|
) -> FutureResult<'a, (), Self::PersistError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a persisted `T`.
|
||||||
|
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
pub struct Persisted<T> {
|
||||||
|
inner: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Persisted<T> {
|
||||||
|
/// Create a new persisted `T`.
|
||||||
|
pub fn create<Db>(db: &mut Db, params: T::CreateParams) -> Result<Self, T::CreateError>
|
||||||
|
where
|
||||||
|
T: PersistWith<Db>,
|
||||||
|
{
|
||||||
|
T::create(db, params).map(|inner| Self { inner })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new persisted `T` with async `Db`.
|
||||||
|
pub async fn create_async<Db>(
|
||||||
|
db: &mut Db,
|
||||||
|
params: T::CreateParams,
|
||||||
|
) -> Result<Self, T::CreateError>
|
||||||
|
where
|
||||||
|
T: PersistAsyncWith<Db>,
|
||||||
|
{
|
||||||
|
T::create(db, params).await.map(|inner| Self { inner })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a persisted `T` from `Db`.
|
||||||
|
pub fn load<Db>(db: &mut Db, params: T::LoadParams) -> Result<Option<Self>, T::LoadError>
|
||||||
|
where
|
||||||
|
T: PersistWith<Db>,
|
||||||
|
{
|
||||||
|
Ok(T::load(db, params)?.map(|inner| Self { inner }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a persisted `T` from an async `Db`.
|
||||||
|
pub async fn load_async<Db>(
|
||||||
|
db: &mut Db,
|
||||||
|
params: T::LoadParams,
|
||||||
|
) -> Result<Option<Self>, T::LoadError>
|
||||||
|
where
|
||||||
|
T: PersistAsyncWith<Db>,
|
||||||
|
{
|
||||||
|
Ok(T::load(db, params).await?.map(|inner| Self { inner }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Persist staged changes of `T` into `Db`.
|
||||||
|
///
|
||||||
|
/// If the database errors, the staged changes will not be cleared.
|
||||||
|
pub fn persist<Db>(&mut self, db: &mut Db) -> Result<bool, T::PersistError>
|
||||||
|
where
|
||||||
|
T: PersistWith<Db>,
|
||||||
|
{
|
||||||
|
let stage = T::staged(&mut self.inner);
|
||||||
|
if stage.is_empty() {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
T::persist(db, &*stage)?;
|
||||||
|
stage.take();
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Persist staged changes of `T` into an async `Db`.
|
||||||
|
///
|
||||||
|
/// If the database errors, the staged changes will not be cleared.
|
||||||
|
pub async fn persist_async<'a, Db>(
|
||||||
|
&'a mut self,
|
||||||
|
db: &'a mut Db,
|
||||||
|
) -> Result<bool, T::PersistError>
|
||||||
|
where
|
||||||
|
T: PersistAsyncWith<Db>,
|
||||||
|
{
|
||||||
|
let stage = T::staged(&mut self.inner);
|
||||||
|
if stage.is_empty() {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
T::persist(db, &*stage).await?;
|
||||||
|
stage.take();
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Deref for Persisted<T> {
|
||||||
|
type Target = T;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.inner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> DerefMut for Persisted<T> {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
&mut self.inner
|
||||||
|
}
|
||||||
|
}
|
||||||
530
crates/chain/src/rusqlite_impl.rs
Normal file
530
crates/chain/src/rusqlite_impl.rs
Normal file
@@ -0,0 +1,530 @@
|
|||||||
|
//! Module for stuff
|
||||||
|
|
||||||
|
use crate::*;
|
||||||
|
use core::str::FromStr;
|
||||||
|
|
||||||
|
use alloc::{borrow::ToOwned, boxed::Box, string::ToString, sync::Arc, vec::Vec};
|
||||||
|
use bitcoin::consensus::{Decodable, Encodable};
|
||||||
|
use rusqlite;
|
||||||
|
use rusqlite::named_params;
|
||||||
|
use rusqlite::types::{FromSql, FromSqlError, FromSqlResult, ToSql, ToSqlOutput, ValueRef};
|
||||||
|
use rusqlite::OptionalExtension;
|
||||||
|
use rusqlite::Transaction;
|
||||||
|
|
||||||
|
/// Table name for schemas.
|
||||||
|
pub const SCHEMAS_TABLE_NAME: &str = "bdk_schemas";
|
||||||
|
|
||||||
|
/// Initialize the schema table.
|
||||||
|
fn init_schemas_table(db_tx: &Transaction) -> rusqlite::Result<()> {
|
||||||
|
let sql = format!("CREATE TABLE IF NOT EXISTS {}( name TEXT PRIMARY KEY NOT NULL, version INTEGER NOT NULL ) STRICT", SCHEMAS_TABLE_NAME);
|
||||||
|
db_tx.execute(&sql, ())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get schema version of `schema_name`.
|
||||||
|
fn schema_version(db_tx: &Transaction, schema_name: &str) -> rusqlite::Result<Option<u32>> {
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT version FROM {} WHERE name=:name",
|
||||||
|
SCHEMAS_TABLE_NAME
|
||||||
|
);
|
||||||
|
db_tx
|
||||||
|
.query_row(&sql, named_params! { ":name": schema_name }, |row| {
|
||||||
|
row.get::<_, u32>("version")
|
||||||
|
})
|
||||||
|
.optional()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the `schema_version` of `schema_name`.
|
||||||
|
fn set_schema_version(
|
||||||
|
db_tx: &Transaction,
|
||||||
|
schema_name: &str,
|
||||||
|
schema_version: u32,
|
||||||
|
) -> rusqlite::Result<()> {
|
||||||
|
let sql = format!(
|
||||||
|
"REPLACE INTO {}(name, version) VALUES(:name, :version)",
|
||||||
|
SCHEMAS_TABLE_NAME,
|
||||||
|
);
|
||||||
|
db_tx.execute(
|
||||||
|
&sql,
|
||||||
|
named_params! { ":name": schema_name, ":version": schema_version },
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs logic that initializes/migrates the table schemas.
|
||||||
|
pub fn migrate_schema(
|
||||||
|
db_tx: &Transaction,
|
||||||
|
schema_name: &str,
|
||||||
|
versioned_scripts: &[&[&str]],
|
||||||
|
) -> rusqlite::Result<()> {
|
||||||
|
init_schemas_table(db_tx)?;
|
||||||
|
let current_version = schema_version(db_tx, schema_name)?;
|
||||||
|
let exec_from = current_version.map_or(0_usize, |v| v as usize + 1);
|
||||||
|
let scripts_to_exec = versioned_scripts.iter().enumerate().skip(exec_from);
|
||||||
|
for (version, &script) in scripts_to_exec {
|
||||||
|
set_schema_version(db_tx, schema_name, version as u32)?;
|
||||||
|
for statement in script {
|
||||||
|
db_tx.execute(statement, ())?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromSql for Impl<bitcoin::Txid> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
bitcoin::Txid::from_str(value.as_str()?)
|
||||||
|
.map(Self)
|
||||||
|
.map_err(from_sql_error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for Impl<bitcoin::Txid> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
Ok(self.to_string().into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromSql for Impl<bitcoin::BlockHash> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
bitcoin::BlockHash::from_str(value.as_str()?)
|
||||||
|
.map(Self)
|
||||||
|
.map_err(from_sql_error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for Impl<bitcoin::BlockHash> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
Ok(self.to_string().into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "miniscript")]
|
||||||
|
impl FromSql for Impl<DescriptorId> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
DescriptorId::from_str(value.as_str()?)
|
||||||
|
.map(Self)
|
||||||
|
.map_err(from_sql_error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "miniscript")]
|
||||||
|
impl ToSql for Impl<DescriptorId> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
Ok(self.to_string().into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromSql for Impl<bitcoin::Transaction> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
bitcoin::Transaction::consensus_decode_from_finite_reader(&mut value.as_bytes()?)
|
||||||
|
.map(Self)
|
||||||
|
.map_err(from_sql_error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for Impl<bitcoin::Transaction> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
let mut bytes = Vec::<u8>::new();
|
||||||
|
self.consensus_encode(&mut bytes).map_err(to_sql_error)?;
|
||||||
|
Ok(bytes.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromSql for Impl<bitcoin::ScriptBuf> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
Ok(bitcoin::Script::from_bytes(value.as_bytes()?)
|
||||||
|
.to_owned()
|
||||||
|
.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for Impl<bitcoin::ScriptBuf> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
Ok(self.as_bytes().into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromSql for Impl<bitcoin::Amount> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
Ok(bitcoin::Amount::from_sat(value.as_i64()?.try_into().map_err(from_sql_error)?).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for Impl<bitcoin::Amount> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
let amount: i64 = self.to_sat().try_into().map_err(to_sql_error)?;
|
||||||
|
Ok(amount.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A: Anchor + serde_crate::de::DeserializeOwned> FromSql for Impl<A> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
serde_json::from_str(value.as_str()?)
|
||||||
|
.map(Impl)
|
||||||
|
.map_err(from_sql_error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A: Anchor + serde_crate::Serialize> ToSql for Impl<A> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
serde_json::to_string(&self.0)
|
||||||
|
.map(Into::into)
|
||||||
|
.map_err(to_sql_error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "miniscript")]
|
||||||
|
impl FromSql for Impl<miniscript::Descriptor<miniscript::DescriptorPublicKey>> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
miniscript::Descriptor::from_str(value.as_str()?)
|
||||||
|
.map(Self)
|
||||||
|
.map_err(from_sql_error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "miniscript")]
|
||||||
|
impl ToSql for Impl<miniscript::Descriptor<miniscript::DescriptorPublicKey>> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
Ok(self.to_string().into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromSql for Impl<bitcoin::Network> {
|
||||||
|
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
|
||||||
|
bitcoin::Network::from_str(value.as_str()?)
|
||||||
|
.map(Self)
|
||||||
|
.map_err(from_sql_error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql for Impl<bitcoin::Network> {
|
||||||
|
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {
|
||||||
|
Ok(self.to_string().into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_sql_error<E: std::error::Error + Send + Sync + 'static>(err: E) -> FromSqlError {
|
||||||
|
FromSqlError::Other(Box::new(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_sql_error<E: std::error::Error + Send + Sync + 'static>(err: E) -> rusqlite::Error {
|
||||||
|
rusqlite::Error::ToSqlConversionFailure(Box::new(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A> tx_graph::ChangeSet<A>
|
||||||
|
where
|
||||||
|
A: Anchor + Clone + Ord + serde::Serialize + serde::de::DeserializeOwned,
|
||||||
|
{
|
||||||
|
/// Schema name for [`tx_graph::ChangeSet`].
|
||||||
|
pub const SCHEMA_NAME: &'static str = "bdk_txgraph";
|
||||||
|
/// Name of table that stores full transactions and `last_seen` timestamps.
|
||||||
|
pub const TXS_TABLE_NAME: &'static str = "bdk_txs";
|
||||||
|
/// Name of table that stores floating txouts.
|
||||||
|
pub const TXOUTS_TABLE_NAME: &'static str = "bdk_txouts";
|
||||||
|
/// Name of table that stores [`Anchor`]s.
|
||||||
|
pub const ANCHORS_TABLE_NAME: &'static str = "bdk_anchors";
|
||||||
|
|
||||||
|
/// Initialize sqlite tables.
|
||||||
|
fn init_sqlite_tables(db_tx: &rusqlite::Transaction) -> rusqlite::Result<()> {
|
||||||
|
let schema_v0: &[&str] = &[
|
||||||
|
// full transactions
|
||||||
|
&format!(
|
||||||
|
"CREATE TABLE {} ( \
|
||||||
|
txid TEXT PRIMARY KEY NOT NULL, \
|
||||||
|
raw_tx BLOB, \
|
||||||
|
last_seen INTEGER \
|
||||||
|
) STRICT",
|
||||||
|
Self::TXS_TABLE_NAME,
|
||||||
|
),
|
||||||
|
// floating txouts
|
||||||
|
&format!(
|
||||||
|
"CREATE TABLE {} ( \
|
||||||
|
txid TEXT NOT NULL, \
|
||||||
|
vout INTEGER NOT NULL, \
|
||||||
|
value INTEGER NOT NULL, \
|
||||||
|
script BLOB NOT NULL, \
|
||||||
|
PRIMARY KEY (txid, vout) \
|
||||||
|
) STRICT",
|
||||||
|
Self::TXOUTS_TABLE_NAME,
|
||||||
|
),
|
||||||
|
// anchors
|
||||||
|
&format!(
|
||||||
|
"CREATE TABLE {} ( \
|
||||||
|
txid TEXT NOT NULL REFERENCES {} (txid), \
|
||||||
|
block_height INTEGER NOT NULL, \
|
||||||
|
block_hash TEXT NOT NULL, \
|
||||||
|
anchor BLOB NOT NULL, \
|
||||||
|
PRIMARY KEY (txid, block_height, block_hash) \
|
||||||
|
) STRICT",
|
||||||
|
Self::ANCHORS_TABLE_NAME,
|
||||||
|
Self::TXS_TABLE_NAME,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
migrate_schema(db_tx, Self::SCHEMA_NAME, &[schema_v0])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a [`TxGraph`] from an sqlite database.
|
||||||
|
pub fn from_sqlite(db_tx: &rusqlite::Transaction) -> rusqlite::Result<Self> {
|
||||||
|
Self::init_sqlite_tables(db_tx)?;
|
||||||
|
|
||||||
|
let mut changeset = Self::default();
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare(&format!(
|
||||||
|
"SELECT txid, raw_tx, last_seen FROM {}",
|
||||||
|
Self::TXS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
let row_iter = statement.query_map([], |row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, Impl<bitcoin::Txid>>("txid")?,
|
||||||
|
row.get::<_, Option<Impl<bitcoin::Transaction>>>("raw_tx")?,
|
||||||
|
row.get::<_, Option<u64>>("last_seen")?,
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
for row in row_iter {
|
||||||
|
let (Impl(txid), tx, last_seen) = row?;
|
||||||
|
if let Some(Impl(tx)) = tx {
|
||||||
|
changeset.txs.insert(Arc::new(tx));
|
||||||
|
}
|
||||||
|
if let Some(last_seen) = last_seen {
|
||||||
|
changeset.last_seen.insert(txid, last_seen);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare(&format!(
|
||||||
|
"SELECT txid, vout, value, script FROM {}",
|
||||||
|
Self::TXOUTS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
let row_iter = statement.query_map([], |row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, Impl<bitcoin::Txid>>("txid")?,
|
||||||
|
row.get::<_, u32>("vout")?,
|
||||||
|
row.get::<_, Impl<bitcoin::Amount>>("value")?,
|
||||||
|
row.get::<_, Impl<bitcoin::ScriptBuf>>("script")?,
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
for row in row_iter {
|
||||||
|
let (Impl(txid), vout, Impl(value), Impl(script_pubkey)) = row?;
|
||||||
|
changeset.txouts.insert(
|
||||||
|
bitcoin::OutPoint { txid, vout },
|
||||||
|
bitcoin::TxOut {
|
||||||
|
value,
|
||||||
|
script_pubkey,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare(&format!(
|
||||||
|
"SELECT json(anchor), txid FROM {}",
|
||||||
|
Self::ANCHORS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
let row_iter = statement.query_map([], |row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, Impl<A>>("json(anchor)")?,
|
||||||
|
row.get::<_, Impl<bitcoin::Txid>>("txid")?,
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
for row in row_iter {
|
||||||
|
let (Impl(anchor), Impl(txid)) = row?;
|
||||||
|
changeset.anchors.insert((anchor, txid));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(changeset)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Persist `changeset` to the sqlite database.
|
||||||
|
pub fn persist_to_sqlite(&self, db_tx: &rusqlite::Transaction) -> rusqlite::Result<()> {
|
||||||
|
Self::init_sqlite_tables(db_tx)?;
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare_cached(&format!(
|
||||||
|
"INSERT INTO {}(txid, raw_tx) VALUES(:txid, :raw_tx) ON CONFLICT(txid) DO UPDATE SET raw_tx=:raw_tx",
|
||||||
|
Self::TXS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
for tx in &self.txs {
|
||||||
|
statement.execute(named_params! {
|
||||||
|
":txid": Impl(tx.compute_txid()),
|
||||||
|
":raw_tx": Impl(tx.as_ref().clone()),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut statement = db_tx
|
||||||
|
.prepare_cached(&format!(
|
||||||
|
"INSERT INTO {}(txid, last_seen) VALUES(:txid, :last_seen) ON CONFLICT(txid) DO UPDATE SET last_seen=:last_seen",
|
||||||
|
Self::TXS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
for (&txid, &last_seen) in &self.last_seen {
|
||||||
|
statement.execute(named_params! {
|
||||||
|
":txid": Impl(txid),
|
||||||
|
":last_seen": Some(last_seen),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare_cached(&format!(
|
||||||
|
"REPLACE INTO {}(txid, vout, value, script) VALUES(:txid, :vout, :value, :script)",
|
||||||
|
Self::TXOUTS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
for (op, txo) in &self.txouts {
|
||||||
|
statement.execute(named_params! {
|
||||||
|
":txid": Impl(op.txid),
|
||||||
|
":vout": op.vout,
|
||||||
|
":value": Impl(txo.value),
|
||||||
|
":script": Impl(txo.script_pubkey.clone()),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare_cached(&format!(
|
||||||
|
"REPLACE INTO {}(txid, block_height, block_hash, anchor) VALUES(:txid, :block_height, :block_hash, jsonb(:anchor))",
|
||||||
|
Self::ANCHORS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
for (anchor, txid) in &self.anchors {
|
||||||
|
let anchor_block = anchor.anchor_block();
|
||||||
|
statement.execute(named_params! {
|
||||||
|
":txid": Impl(*txid),
|
||||||
|
":block_height": anchor_block.height,
|
||||||
|
":block_hash": Impl(anchor_block.hash),
|
||||||
|
":anchor": Impl(anchor.clone()),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl local_chain::ChangeSet {
|
||||||
|
/// Schema name for the changeset.
|
||||||
|
pub const SCHEMA_NAME: &'static str = "bdk_localchain";
|
||||||
|
/// Name of sqlite table that stores blocks of [`LocalChain`](local_chain::LocalChain).
|
||||||
|
pub const BLOCKS_TABLE_NAME: &'static str = "bdk_blocks";
|
||||||
|
|
||||||
|
/// Initialize sqlite tables for persisting [`local_chain::LocalChain`].
|
||||||
|
fn init_sqlite_tables(db_tx: &rusqlite::Transaction) -> rusqlite::Result<()> {
|
||||||
|
let schema_v0: &[&str] = &[
|
||||||
|
// blocks
|
||||||
|
&format!(
|
||||||
|
"CREATE TABLE {} ( \
|
||||||
|
block_height INTEGER PRIMARY KEY NOT NULL, \
|
||||||
|
block_hash TEXT NOT NULL \
|
||||||
|
) STRICT",
|
||||||
|
Self::BLOCKS_TABLE_NAME,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
migrate_schema(db_tx, Self::SCHEMA_NAME, &[schema_v0])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a [`LocalChain`](local_chain::LocalChain) from sqlite database.
|
||||||
|
pub fn from_sqlite(db_tx: &rusqlite::Transaction) -> rusqlite::Result<Self> {
|
||||||
|
Self::init_sqlite_tables(db_tx)?;
|
||||||
|
|
||||||
|
let mut changeset = Self::default();
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare(&format!(
|
||||||
|
"SELECT block_height, block_hash FROM {}",
|
||||||
|
Self::BLOCKS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
let row_iter = statement.query_map([], |row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, u32>("block_height")?,
|
||||||
|
row.get::<_, Impl<bitcoin::BlockHash>>("block_hash")?,
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
for row in row_iter {
|
||||||
|
let (height, Impl(hash)) = row?;
|
||||||
|
changeset.blocks.insert(height, Some(hash));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(changeset)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Persist `changeset` to the sqlite database.
|
||||||
|
pub fn persist_to_sqlite(&self, db_tx: &rusqlite::Transaction) -> rusqlite::Result<()> {
|
||||||
|
Self::init_sqlite_tables(db_tx)?;
|
||||||
|
|
||||||
|
let mut replace_statement = db_tx.prepare_cached(&format!(
|
||||||
|
"REPLACE INTO {}(block_height, block_hash) VALUES(:block_height, :block_hash)",
|
||||||
|
Self::BLOCKS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
let mut delete_statement = db_tx.prepare_cached(&format!(
|
||||||
|
"DELETE FROM {} WHERE block_height=:block_height",
|
||||||
|
Self::BLOCKS_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
for (&height, &hash) in &self.blocks {
|
||||||
|
match hash {
|
||||||
|
Some(hash) => replace_statement.execute(named_params! {
|
||||||
|
":block_height": height,
|
||||||
|
":block_hash": Impl(hash),
|
||||||
|
})?,
|
||||||
|
None => delete_statement.execute(named_params! {
|
||||||
|
":block_height": height,
|
||||||
|
})?,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "miniscript")]
|
||||||
|
impl keychain_txout::ChangeSet {
|
||||||
|
/// Schema name for the changeset.
|
||||||
|
pub const SCHEMA_NAME: &'static str = "bdk_keychaintxout";
|
||||||
|
/// Name for table that stores last revealed indices per descriptor id.
|
||||||
|
pub const LAST_REVEALED_TABLE_NAME: &'static str = "bdk_descriptor_last_revealed";
|
||||||
|
|
||||||
|
/// Initialize sqlite tables for persisting
|
||||||
|
/// [`KeychainTxOutIndex`](keychain_txout::KeychainTxOutIndex).
|
||||||
|
fn init_sqlite_tables(db_tx: &rusqlite::Transaction) -> rusqlite::Result<()> {
|
||||||
|
let schema_v0: &[&str] = &[
|
||||||
|
// last revealed
|
||||||
|
&format!(
|
||||||
|
"CREATE TABLE {} ( \
|
||||||
|
descriptor_id TEXT PRIMARY KEY NOT NULL, \
|
||||||
|
last_revealed INTEGER NOT NULL \
|
||||||
|
) STRICT",
|
||||||
|
Self::LAST_REVEALED_TABLE_NAME,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
migrate_schema(db_tx, Self::SCHEMA_NAME, &[schema_v0])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct [`KeychainTxOutIndex`](keychain_txout::KeychainTxOutIndex) from sqlite database
|
||||||
|
/// and given parameters.
|
||||||
|
pub fn from_sqlite(db_tx: &rusqlite::Transaction) -> rusqlite::Result<Self> {
|
||||||
|
Self::init_sqlite_tables(db_tx)?;
|
||||||
|
|
||||||
|
let mut changeset = Self::default();
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare(&format!(
|
||||||
|
"SELECT descriptor_id, last_revealed FROM {}",
|
||||||
|
Self::LAST_REVEALED_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
let row_iter = statement.query_map([], |row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, Impl<DescriptorId>>("descriptor_id")?,
|
||||||
|
row.get::<_, u32>("last_revealed")?,
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
for row in row_iter {
|
||||||
|
let (Impl(descriptor_id), last_revealed) = row?;
|
||||||
|
changeset.last_revealed.insert(descriptor_id, last_revealed);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(changeset)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Persist `changeset` to the sqlite database.
|
||||||
|
pub fn persist_to_sqlite(&self, db_tx: &rusqlite::Transaction) -> rusqlite::Result<()> {
|
||||||
|
Self::init_sqlite_tables(db_tx)?;
|
||||||
|
|
||||||
|
let mut statement = db_tx.prepare_cached(&format!(
|
||||||
|
"REPLACE INTO {}(descriptor_id, last_revealed) VALUES(:descriptor_id, :last_revealed)",
|
||||||
|
Self::LAST_REVEALED_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
for (&descriptor_id, &last_revealed) in &self.last_revealed {
|
||||||
|
statement.execute(named_params! {
|
||||||
|
":descriptor_id": Impl(descriptor_id),
|
||||||
|
":last_revealed": last_revealed,
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,18 +1,11 @@
|
|||||||
//! Helper types for spk-based blockchain clients.
|
//! Helper types for spk-based blockchain clients.
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
collections::{BTreeMap, HashMap},
|
collections::BTreeMap, local_chain::CheckPoint, ConfirmationBlockTime, Indexed, TxGraph,
|
||||||
local_chain::CheckPoint,
|
|
||||||
ConfirmationTimeHeightAnchor, TxGraph,
|
|
||||||
};
|
};
|
||||||
use alloc::{boxed::Box, sync::Arc, vec::Vec};
|
use alloc::boxed::Box;
|
||||||
use bitcoin::{OutPoint, Script, ScriptBuf, Transaction, Txid};
|
use bitcoin::{OutPoint, Script, ScriptBuf, Txid};
|
||||||
use core::{fmt::Debug, marker::PhantomData, ops::RangeBounds};
|
use core::marker::PhantomData;
|
||||||
|
|
||||||
/// A cache of [`Arc`]-wrapped full transactions, identified by their [`Txid`]s.
|
|
||||||
///
|
|
||||||
/// This is used by the chain-source to avoid re-fetching full transactions.
|
|
||||||
pub type TxCache = HashMap<Txid, Arc<Transaction>>;
|
|
||||||
|
|
||||||
/// Data required to perform a spk-based blockchain client sync.
|
/// Data required to perform a spk-based blockchain client sync.
|
||||||
///
|
///
|
||||||
@@ -24,8 +17,6 @@ pub struct SyncRequest {
|
|||||||
///
|
///
|
||||||
/// [`LocalChain::tip`]: crate::local_chain::LocalChain::tip
|
/// [`LocalChain::tip`]: crate::local_chain::LocalChain::tip
|
||||||
pub chain_tip: CheckPoint,
|
pub chain_tip: CheckPoint,
|
||||||
/// Cache of full transactions, so the chain-source can avoid re-fetching.
|
|
||||||
pub tx_cache: TxCache,
|
|
||||||
/// Transactions that spend from or to these indexed script pubkeys.
|
/// Transactions that spend from or to these indexed script pubkeys.
|
||||||
pub spks: Box<dyn ExactSizeIterator<Item = ScriptBuf> + Send>,
|
pub spks: Box<dyn ExactSizeIterator<Item = ScriptBuf> + Send>,
|
||||||
/// Transactions with these txids.
|
/// Transactions with these txids.
|
||||||
@@ -39,36 +30,12 @@ impl SyncRequest {
|
|||||||
pub fn from_chain_tip(cp: CheckPoint) -> Self {
|
pub fn from_chain_tip(cp: CheckPoint) -> Self {
|
||||||
Self {
|
Self {
|
||||||
chain_tip: cp,
|
chain_tip: cp,
|
||||||
tx_cache: TxCache::new(),
|
|
||||||
spks: Box::new(core::iter::empty()),
|
spks: Box::new(core::iter::empty()),
|
||||||
txids: Box::new(core::iter::empty()),
|
txids: Box::new(core::iter::empty()),
|
||||||
outpoints: Box::new(core::iter::empty()),
|
outpoints: Box::new(core::iter::empty()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add to the [`TxCache`] held by the request.
|
|
||||||
///
|
|
||||||
/// This consumes the [`SyncRequest`] and returns the updated one.
|
|
||||||
#[must_use]
|
|
||||||
pub fn cache_txs<T>(mut self, full_txs: impl IntoIterator<Item = (Txid, T)>) -> Self
|
|
||||||
where
|
|
||||||
T: Into<Arc<Transaction>>,
|
|
||||||
{
|
|
||||||
self.tx_cache = full_txs
|
|
||||||
.into_iter()
|
|
||||||
.map(|(txid, tx)| (txid, tx.into()))
|
|
||||||
.collect();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add all transactions from [`TxGraph`] into the [`TxCache`].
|
|
||||||
///
|
|
||||||
/// This consumes the [`SyncRequest`] and returns the updated one.
|
|
||||||
#[must_use]
|
|
||||||
pub fn cache_graph_txs<A>(self, graph: &TxGraph<A>) -> Self {
|
|
||||||
self.cache_txs(graph.full_txs().map(|tx_node| (tx_node.txid, tx_node.tx)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the [`Script`]s that will be synced against.
|
/// Set the [`Script`]s that will be synced against.
|
||||||
///
|
///
|
||||||
/// This consumes the [`SyncRequest`] and returns the updated one.
|
/// This consumes the [`SyncRequest`] and returns the updated one.
|
||||||
@@ -190,16 +157,17 @@ impl SyncRequest {
|
|||||||
/// This consumes the [`SyncRequest`] and returns the updated one.
|
/// This consumes the [`SyncRequest`] and returns the updated one.
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn populate_with_revealed_spks<K: Clone + Ord + Debug + Send + Sync>(
|
pub fn populate_with_revealed_spks<K: Clone + Ord + core::fmt::Debug + Send + Sync>(
|
||||||
self,
|
self,
|
||||||
index: &crate::keychain::KeychainTxOutIndex<K>,
|
index: &crate::indexer::keychain_txout::KeychainTxOutIndex<K>,
|
||||||
spk_range: impl RangeBounds<K>,
|
spk_range: impl core::ops::RangeBounds<K>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
use alloc::borrow::ToOwned;
|
use alloc::borrow::ToOwned;
|
||||||
|
use alloc::vec::Vec;
|
||||||
self.chain_spks(
|
self.chain_spks(
|
||||||
index
|
index
|
||||||
.revealed_spks(spk_range)
|
.revealed_spks(spk_range)
|
||||||
.map(|(_, _, spk)| spk.to_owned())
|
.map(|(_, spk)| spk.to_owned())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -208,7 +176,7 @@ impl SyncRequest {
|
|||||||
/// Data returned from a spk-based blockchain client sync.
|
/// Data returned from a spk-based blockchain client sync.
|
||||||
///
|
///
|
||||||
/// See also [`SyncRequest`].
|
/// See also [`SyncRequest`].
|
||||||
pub struct SyncResult<A = ConfirmationTimeHeightAnchor> {
|
pub struct SyncResult<A = ConfirmationBlockTime> {
|
||||||
/// The update to apply to the receiving [`TxGraph`].
|
/// The update to apply to the receiving [`TxGraph`].
|
||||||
pub graph_update: TxGraph<A>,
|
pub graph_update: TxGraph<A>,
|
||||||
/// The update to apply to the receiving [`LocalChain`](crate::local_chain::LocalChain).
|
/// The update to apply to the receiving [`LocalChain`](crate::local_chain::LocalChain).
|
||||||
@@ -227,10 +195,8 @@ pub struct FullScanRequest<K> {
|
|||||||
///
|
///
|
||||||
/// [`LocalChain::tip`]: crate::local_chain::LocalChain::tip
|
/// [`LocalChain::tip`]: crate::local_chain::LocalChain::tip
|
||||||
pub chain_tip: CheckPoint,
|
pub chain_tip: CheckPoint,
|
||||||
/// Cache of full transactions, so the chain-source can avoid re-fetching.
|
|
||||||
pub tx_cache: TxCache,
|
|
||||||
/// Iterators of script pubkeys indexed by the keychain index.
|
/// Iterators of script pubkeys indexed by the keychain index.
|
||||||
pub spks_by_keychain: BTreeMap<K, Box<dyn Iterator<Item = (u32, ScriptBuf)> + Send>>,
|
pub spks_by_keychain: BTreeMap<K, Box<dyn Iterator<Item = Indexed<ScriptBuf>> + Send>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K: Ord + Clone> FullScanRequest<K> {
|
impl<K: Ord + Clone> FullScanRequest<K> {
|
||||||
@@ -239,49 +205,25 @@ impl<K: Ord + Clone> FullScanRequest<K> {
|
|||||||
pub fn from_chain_tip(chain_tip: CheckPoint) -> Self {
|
pub fn from_chain_tip(chain_tip: CheckPoint) -> Self {
|
||||||
Self {
|
Self {
|
||||||
chain_tip,
|
chain_tip,
|
||||||
tx_cache: TxCache::new(),
|
|
||||||
spks_by_keychain: BTreeMap::new(),
|
spks_by_keychain: BTreeMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add to the [`TxCache`] held by the request.
|
|
||||||
///
|
|
||||||
/// This consumes the [`SyncRequest`] and returns the updated one.
|
|
||||||
#[must_use]
|
|
||||||
pub fn cache_txs<T>(mut self, full_txs: impl IntoIterator<Item = (Txid, T)>) -> Self
|
|
||||||
where
|
|
||||||
T: Into<Arc<Transaction>>,
|
|
||||||
{
|
|
||||||
self.tx_cache = full_txs
|
|
||||||
.into_iter()
|
|
||||||
.map(|(txid, tx)| (txid, tx.into()))
|
|
||||||
.collect();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add all transactions from [`TxGraph`] into the [`TxCache`].
|
|
||||||
///
|
|
||||||
/// This consumes the [`SyncRequest`] and returns the updated one.
|
|
||||||
#[must_use]
|
|
||||||
pub fn cache_graph_txs<A>(self, graph: &TxGraph<A>) -> Self {
|
|
||||||
self.cache_txs(graph.full_txs().map(|tx_node| (tx_node.txid, tx_node.tx)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Construct a new [`FullScanRequest`] from a given `chain_tip` and `index`.
|
/// Construct a new [`FullScanRequest`] from a given `chain_tip` and `index`.
|
||||||
///
|
///
|
||||||
/// Unbounded script pubkey iterators for each keychain (`K`) are extracted using
|
/// Unbounded script pubkey iterators for each keychain (`K`) are extracted using
|
||||||
/// [`KeychainTxOutIndex::all_unbounded_spk_iters`] and is used to populate the
|
/// [`KeychainTxOutIndex::all_unbounded_spk_iters`] and is used to populate the
|
||||||
/// [`FullScanRequest`].
|
/// [`FullScanRequest`].
|
||||||
///
|
///
|
||||||
/// [`KeychainTxOutIndex::all_unbounded_spk_iters`]: crate::keychain::KeychainTxOutIndex::all_unbounded_spk_iters
|
/// [`KeychainTxOutIndex::all_unbounded_spk_iters`]: crate::indexer::keychain_txout::KeychainTxOutIndex::all_unbounded_spk_iters
|
||||||
#[cfg(feature = "miniscript")]
|
#[cfg(feature = "miniscript")]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn from_keychain_txout_index(
|
pub fn from_keychain_txout_index(
|
||||||
chain_tip: CheckPoint,
|
chain_tip: CheckPoint,
|
||||||
index: &crate::keychain::KeychainTxOutIndex<K>,
|
index: &crate::indexer::keychain_txout::KeychainTxOutIndex<K>,
|
||||||
) -> Self
|
) -> Self
|
||||||
where
|
where
|
||||||
K: Debug,
|
K: core::fmt::Debug,
|
||||||
{
|
{
|
||||||
let mut req = Self::from_chain_tip(chain_tip);
|
let mut req = Self::from_chain_tip(chain_tip);
|
||||||
for (keychain, spks) in index.all_unbounded_spk_iters() {
|
for (keychain, spks) in index.all_unbounded_spk_iters() {
|
||||||
@@ -297,7 +239,7 @@ impl<K: Ord + Clone> FullScanRequest<K> {
|
|||||||
pub fn set_spks_for_keychain(
|
pub fn set_spks_for_keychain(
|
||||||
mut self,
|
mut self,
|
||||||
keychain: K,
|
keychain: K,
|
||||||
spks: impl IntoIterator<IntoIter = impl Iterator<Item = (u32, ScriptBuf)> + Send + 'static>,
|
spks: impl IntoIterator<IntoIter = impl Iterator<Item = Indexed<ScriptBuf>> + Send + 'static>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.spks_by_keychain
|
self.spks_by_keychain
|
||||||
.insert(keychain, Box::new(spks.into_iter()));
|
.insert(keychain, Box::new(spks.into_iter()));
|
||||||
@@ -311,7 +253,7 @@ impl<K: Ord + Clone> FullScanRequest<K> {
|
|||||||
pub fn chain_spks_for_keychain(
|
pub fn chain_spks_for_keychain(
|
||||||
mut self,
|
mut self,
|
||||||
keychain: K,
|
keychain: K,
|
||||||
spks: impl IntoIterator<IntoIter = impl Iterator<Item = (u32, ScriptBuf)> + Send + 'static>,
|
spks: impl IntoIterator<IntoIter = impl Iterator<Item = Indexed<ScriptBuf>> + Send + 'static>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
match self.spks_by_keychain.remove(&keychain) {
|
match self.spks_by_keychain.remove(&keychain) {
|
||||||
// clippy here suggests to remove `into_iter` from `spks.into_iter()`, but doing so
|
// clippy here suggests to remove `into_iter` from `spks.into_iter()`, but doing so
|
||||||
@@ -375,7 +317,7 @@ impl<K: Ord + Clone> FullScanRequest<K> {
|
|||||||
/// Data returned from a spk-based blockchain client full scan.
|
/// Data returned from a spk-based blockchain client full scan.
|
||||||
///
|
///
|
||||||
/// See also [`FullScanRequest`].
|
/// See also [`FullScanRequest`].
|
||||||
pub struct FullScanResult<K, A = ConfirmationTimeHeightAnchor> {
|
pub struct FullScanResult<K, A = ConfirmationBlockTime> {
|
||||||
/// The update to apply to the receiving [`LocalChain`](crate::local_chain::LocalChain).
|
/// The update to apply to the receiving [`LocalChain`](crate::local_chain::LocalChain).
|
||||||
pub graph_update: TxGraph<A>,
|
pub graph_update: TxGraph<A>,
|
||||||
/// The update to apply to the receiving [`TxGraph`].
|
/// The update to apply to the receiving [`TxGraph`].
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
bitcoin::{secp256k1::Secp256k1, ScriptBuf},
|
bitcoin::{secp256k1::Secp256k1, ScriptBuf},
|
||||||
miniscript::{Descriptor, DescriptorPublicKey},
|
miniscript::{Descriptor, DescriptorPublicKey},
|
||||||
|
Indexed,
|
||||||
};
|
};
|
||||||
use core::{borrow::Borrow, ops::Bound, ops::RangeBounds};
|
use core::{borrow::Borrow, ops::Bound, ops::RangeBounds};
|
||||||
|
|
||||||
@@ -97,7 +98,7 @@ impl<D> Iterator for SpkIterator<D>
|
|||||||
where
|
where
|
||||||
D: Borrow<Descriptor<DescriptorPublicKey>>,
|
D: Borrow<Descriptor<DescriptorPublicKey>>,
|
||||||
{
|
{
|
||||||
type Item = (u32, ScriptBuf);
|
type Item = Indexed<ScriptBuf>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
// For non-wildcard descriptors, we expect the first element to be Some((0, spk)), then None after.
|
// For non-wildcard descriptors, we expect the first element to be Some((0, spk)), then None after.
|
||||||
@@ -136,7 +137,7 @@ where
|
|||||||
mod test {
|
mod test {
|
||||||
use crate::{
|
use crate::{
|
||||||
bitcoin::secp256k1::Secp256k1,
|
bitcoin::secp256k1::Secp256k1,
|
||||||
keychain::KeychainTxOutIndex,
|
indexer::keychain_txout::KeychainTxOutIndex,
|
||||||
miniscript::{Descriptor, DescriptorPublicKey},
|
miniscript::{Descriptor, DescriptorPublicKey},
|
||||||
spk_iter::{SpkIterator, BIP32_MAX_INDEX},
|
spk_iter::{SpkIterator, BIP32_MAX_INDEX},
|
||||||
};
|
};
|
||||||
@@ -158,8 +159,12 @@ mod test {
|
|||||||
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
|
||||||
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
|
||||||
|
|
||||||
let _ = txout_index.insert_descriptor(TestKeychain::External, external_descriptor.clone());
|
let _ = txout_index
|
||||||
let _ = txout_index.insert_descriptor(TestKeychain::Internal, internal_descriptor.clone());
|
.insert_descriptor(TestKeychain::External, external_descriptor.clone())
|
||||||
|
.unwrap();
|
||||||
|
let _ = txout_index
|
||||||
|
.insert_descriptor(TestKeychain::Internal, internal_descriptor.clone())
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
(txout_index, external_descriptor, internal_descriptor)
|
(txout_index, external_descriptor, internal_descriptor)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,8 +20,7 @@ use alloc::vec::Vec;
|
|||||||
/// # use bdk_chain::local_chain::LocalChain;
|
/// # use bdk_chain::local_chain::LocalChain;
|
||||||
/// # use bdk_chain::tx_graph::TxGraph;
|
/// # use bdk_chain::tx_graph::TxGraph;
|
||||||
/// # use bdk_chain::BlockId;
|
/// # use bdk_chain::BlockId;
|
||||||
/// # use bdk_chain::ConfirmationHeightAnchor;
|
/// # use bdk_chain::ConfirmationBlockTime;
|
||||||
/// # use bdk_chain::ConfirmationTimeHeightAnchor;
|
|
||||||
/// # use bdk_chain::example_utils::*;
|
/// # use bdk_chain::example_utils::*;
|
||||||
/// # use bitcoin::hashes::Hash;
|
/// # use bitcoin::hashes::Hash;
|
||||||
/// // Initialize the local chain with two blocks.
|
/// // Initialize the local chain with two blocks.
|
||||||
@@ -43,46 +42,26 @@ use alloc::vec::Vec;
|
|||||||
/// let mut graph_a = TxGraph::<BlockId>::default();
|
/// let mut graph_a = TxGraph::<BlockId>::default();
|
||||||
/// let _ = graph_a.insert_tx(tx.clone());
|
/// let _ = graph_a.insert_tx(tx.clone());
|
||||||
/// graph_a.insert_anchor(
|
/// graph_a.insert_anchor(
|
||||||
/// tx.txid(),
|
/// tx.compute_txid(),
|
||||||
/// BlockId {
|
/// BlockId {
|
||||||
/// height: 1,
|
/// height: 1,
|
||||||
/// hash: Hash::hash("first".as_bytes()),
|
/// hash: Hash::hash("first".as_bytes()),
|
||||||
/// },
|
/// },
|
||||||
/// );
|
/// );
|
||||||
///
|
///
|
||||||
/// // Insert `tx` into a `TxGraph` that uses `ConfirmationHeightAnchor` as the anchor type.
|
/// // Insert `tx` into a `TxGraph` that uses `ConfirmationBlockTime` as the anchor type.
|
||||||
/// // This anchor records the anchor block and the confirmation height of the transaction.
|
/// // This anchor records the anchor block and the confirmation time of the transaction. When a
|
||||||
/// // When a transaction is anchored with `ConfirmationHeightAnchor`, the anchor block and
|
/// // transaction is anchored with `ConfirmationBlockTime`, the anchor block and confirmation block
|
||||||
/// // confirmation block can be different. However, the confirmation block cannot be higher than
|
/// // of the transaction is the same block.
|
||||||
/// // the anchor block and both blocks must be in the same chain for the anchor to be valid.
|
/// let mut graph_c = TxGraph::<ConfirmationBlockTime>::default();
|
||||||
/// let mut graph_b = TxGraph::<ConfirmationHeightAnchor>::default();
|
|
||||||
/// let _ = graph_b.insert_tx(tx.clone());
|
|
||||||
/// graph_b.insert_anchor(
|
|
||||||
/// tx.txid(),
|
|
||||||
/// ConfirmationHeightAnchor {
|
|
||||||
/// anchor_block: BlockId {
|
|
||||||
/// height: 2,
|
|
||||||
/// hash: Hash::hash("second".as_bytes()),
|
|
||||||
/// },
|
|
||||||
/// confirmation_height: 1,
|
|
||||||
/// },
|
|
||||||
/// );
|
|
||||||
///
|
|
||||||
/// // Insert `tx` into a `TxGraph` that uses `ConfirmationTimeHeightAnchor` as the anchor type.
|
|
||||||
/// // This anchor records the anchor block, the confirmation height and time of the transaction.
|
|
||||||
/// // When a transaction is anchored with `ConfirmationTimeHeightAnchor`, the anchor block and
|
|
||||||
/// // confirmation block can be different. However, the confirmation block cannot be higher than
|
|
||||||
/// // the anchor block and both blocks must be in the same chain for the anchor to be valid.
|
|
||||||
/// let mut graph_c = TxGraph::<ConfirmationTimeHeightAnchor>::default();
|
|
||||||
/// let _ = graph_c.insert_tx(tx.clone());
|
/// let _ = graph_c.insert_tx(tx.clone());
|
||||||
/// graph_c.insert_anchor(
|
/// graph_c.insert_anchor(
|
||||||
/// tx.txid(),
|
/// tx.compute_txid(),
|
||||||
/// ConfirmationTimeHeightAnchor {
|
/// ConfirmationBlockTime {
|
||||||
/// anchor_block: BlockId {
|
/// block_id: BlockId {
|
||||||
/// height: 2,
|
/// height: 2,
|
||||||
/// hash: Hash::hash("third".as_bytes()),
|
/// hash: Hash::hash("third".as_bytes()),
|
||||||
/// },
|
/// },
|
||||||
/// confirmation_height: 1,
|
|
||||||
/// confirmation_time: 123,
|
/// confirmation_time: 123,
|
||||||
/// },
|
/// },
|
||||||
/// );
|
/// );
|
||||||
@@ -113,17 +92,26 @@ pub trait AnchorFromBlockPosition: Anchor {
|
|||||||
fn from_block_position(block: &bitcoin::Block, block_id: BlockId, tx_pos: usize) -> Self;
|
fn from_block_position(block: &bitcoin::Block, block_id: BlockId, tx_pos: usize) -> Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Trait that makes an object appendable.
|
/// Trait that makes an object mergeable.
|
||||||
pub trait Append {
|
pub trait Merge: Default {
|
||||||
/// Append another object of the same type onto `self`.
|
/// Merge another object of the same type onto `self`.
|
||||||
fn append(&mut self, other: Self);
|
fn merge(&mut self, other: Self);
|
||||||
|
|
||||||
/// Returns whether the structure is considered empty.
|
/// Returns whether the structure is considered empty.
|
||||||
fn is_empty(&self) -> bool;
|
fn is_empty(&self) -> bool;
|
||||||
|
|
||||||
|
/// Take the value, replacing it with the default value.
|
||||||
|
fn take(&mut self) -> Option<Self> {
|
||||||
|
if self.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(core::mem::take(self))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K: Ord, V> Append for BTreeMap<K, V> {
|
impl<K: Ord, V> Merge for BTreeMap<K, V> {
|
||||||
fn append(&mut self, other: Self) {
|
fn merge(&mut self, other: Self) {
|
||||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
||||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
||||||
BTreeMap::extend(self, other)
|
BTreeMap::extend(self, other)
|
||||||
@@ -134,8 +122,8 @@ impl<K: Ord, V> Append for BTreeMap<K, V> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Ord> Append for BTreeSet<T> {
|
impl<T: Ord> Merge for BTreeSet<T> {
|
||||||
fn append(&mut self, other: Self) {
|
fn merge(&mut self, other: Self) {
|
||||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
||||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
||||||
BTreeSet::extend(self, other)
|
BTreeSet::extend(self, other)
|
||||||
@@ -146,8 +134,8 @@ impl<T: Ord> Append for BTreeSet<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Append for Vec<T> {
|
impl<T> Merge for Vec<T> {
|
||||||
fn append(&mut self, mut other: Self) {
|
fn merge(&mut self, mut other: Self) {
|
||||||
Vec::append(self, &mut other)
|
Vec::append(self, &mut other)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -156,30 +144,30 @@ impl<T> Append for Vec<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! impl_append_for_tuple {
|
macro_rules! impl_merge_for_tuple {
|
||||||
($($a:ident $b:tt)*) => {
|
($($a:ident $b:tt)*) => {
|
||||||
impl<$($a),*> Append for ($($a,)*) where $($a: Append),* {
|
impl<$($a),*> Merge for ($($a,)*) where $($a: Merge),* {
|
||||||
|
|
||||||
fn append(&mut self, _other: Self) {
|
fn merge(&mut self, _other: Self) {
|
||||||
$(Append::append(&mut self.$b, _other.$b) );*
|
$(Merge::merge(&mut self.$b, _other.$b) );*
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_empty(&self) -> bool {
|
fn is_empty(&self) -> bool {
|
||||||
$(Append::is_empty(&self.$b) && )* true
|
$(Merge::is_empty(&self.$b) && )* true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_append_for_tuple!();
|
impl_merge_for_tuple!();
|
||||||
impl_append_for_tuple!(T0 0);
|
impl_merge_for_tuple!(T0 0);
|
||||||
impl_append_for_tuple!(T0 0 T1 1);
|
impl_merge_for_tuple!(T0 0 T1 1);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2 T3 3);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2 T3 3);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6 T7 7);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6 T7 7);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6 T7 7 T8 8);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6 T7 7 T8 8);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6 T7 7 T8 8 T9 9);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6 T7 7 T8 8 T9 9);
|
||||||
impl_append_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6 T7 7 T8 8 T9 9 T10 10);
|
impl_merge_for_tuple!(T0 0 T1 1 T2 2 T3 3 T4 4 T5 5 T6 6 T7 7 T8 8 T9 9 T10 10);
|
||||||
|
|||||||
@@ -69,7 +69,7 @@
|
|||||||
//! A [`TxGraph`] can also be updated with another [`TxGraph`] which merges them together.
|
//! A [`TxGraph`] can also be updated with another [`TxGraph`] which merges them together.
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
//! # use bdk_chain::{Append, BlockId};
|
//! # use bdk_chain::{Merge, BlockId};
|
||||||
//! # use bdk_chain::tx_graph::TxGraph;
|
//! # use bdk_chain::tx_graph::TxGraph;
|
||||||
//! # use bdk_chain::example_utils::*;
|
//! # use bdk_chain::example_utils::*;
|
||||||
//! # use bitcoin::Transaction;
|
//! # use bitcoin::Transaction;
|
||||||
@@ -89,13 +89,12 @@
|
|||||||
//! [`insert_txout`]: TxGraph::insert_txout
|
//! [`insert_txout`]: TxGraph::insert_txout
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
collections::*, keychain::Balance, Anchor, Append, BlockId, ChainOracle, ChainPosition,
|
collections::*, Anchor, Balance, BlockId, ChainOracle, ChainPosition, FullTxOut, Merge,
|
||||||
FullTxOut,
|
|
||||||
};
|
};
|
||||||
use alloc::collections::vec_deque::VecDeque;
|
use alloc::collections::vec_deque::VecDeque;
|
||||||
use alloc::sync::Arc;
|
use alloc::sync::Arc;
|
||||||
use alloc::vec::Vec;
|
use alloc::vec::Vec;
|
||||||
use bitcoin::{Amount, OutPoint, Script, Transaction, TxOut, Txid};
|
use bitcoin::{Amount, OutPoint, ScriptBuf, SignedAmount, Transaction, TxOut, Txid};
|
||||||
use core::fmt::{self, Formatter};
|
use core::fmt::{self, Formatter};
|
||||||
use core::{
|
use core::{
|
||||||
convert::Infallible,
|
convert::Infallible,
|
||||||
@@ -109,10 +108,11 @@ use core::{
|
|||||||
/// [module-level documentation]: crate::tx_graph
|
/// [module-level documentation]: crate::tx_graph
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct TxGraph<A = ()> {
|
pub struct TxGraph<A = ()> {
|
||||||
// all transactions that the graph is aware of in format: `(tx_node, tx_anchors, tx_last_seen)`
|
// all transactions that the graph is aware of in format: `(tx_node, tx_anchors)`
|
||||||
txs: HashMap<Txid, (TxNodeInternal, BTreeSet<A>, u64)>,
|
txs: HashMap<Txid, (TxNodeInternal, BTreeSet<A>)>,
|
||||||
spends: BTreeMap<OutPoint, HashSet<Txid>>,
|
spends: BTreeMap<OutPoint, HashSet<Txid>>,
|
||||||
anchors: BTreeSet<(A, Txid)>,
|
anchors: BTreeSet<(A, Txid)>,
|
||||||
|
last_seen: HashMap<Txid, u64>,
|
||||||
|
|
||||||
// This atrocity exists so that `TxGraph::outspends()` can return a reference.
|
// This atrocity exists so that `TxGraph::outspends()` can return a reference.
|
||||||
// FIXME: This can be removed once `HashSet::new` is a const fn.
|
// FIXME: This can be removed once `HashSet::new` is a const fn.
|
||||||
@@ -125,6 +125,7 @@ impl<A> Default for TxGraph<A> {
|
|||||||
txs: Default::default(),
|
txs: Default::default(),
|
||||||
spends: Default::default(),
|
spends: Default::default(),
|
||||||
anchors: Default::default(),
|
anchors: Default::default(),
|
||||||
|
last_seen: Default::default(),
|
||||||
empty_outspends: Default::default(),
|
empty_outspends: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -140,7 +141,7 @@ pub struct TxNode<'a, T, A> {
|
|||||||
/// The blocks that the transaction is "anchored" in.
|
/// The blocks that the transaction is "anchored" in.
|
||||||
pub anchors: &'a BTreeSet<A>,
|
pub anchors: &'a BTreeSet<A>,
|
||||||
/// The last-seen unix timestamp of the transaction as unconfirmed.
|
/// The last-seen unix timestamp of the transaction as unconfirmed.
|
||||||
pub last_seen_unconfirmed: u64,
|
pub last_seen_unconfirmed: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, T, A> Deref for TxNode<'a, T, A> {
|
impl<'a, T, A> Deref for TxNode<'a, T, A> {
|
||||||
@@ -182,7 +183,7 @@ pub enum CalculateFeeError {
|
|||||||
/// Missing `TxOut` for one or more of the inputs of the tx
|
/// Missing `TxOut` for one or more of the inputs of the tx
|
||||||
MissingTxOut(Vec<OutPoint>),
|
MissingTxOut(Vec<OutPoint>),
|
||||||
/// When the transaction is invalid according to the graph it has a negative fee
|
/// When the transaction is invalid according to the graph it has a negative fee
|
||||||
NegativeFee(i64),
|
NegativeFee(SignedAmount),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for CalculateFeeError {
|
impl fmt::Display for CalculateFeeError {
|
||||||
@@ -196,7 +197,7 @@ impl fmt::Display for CalculateFeeError {
|
|||||||
CalculateFeeError::NegativeFee(fee) => write!(
|
CalculateFeeError::NegativeFee(fee) => write!(
|
||||||
f,
|
f,
|
||||||
"transaction is invalid according to the graph and has negative fee: {}",
|
"transaction is invalid according to the graph and has negative fee: {}",
|
||||||
fee
|
fee.display_dynamic()
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -210,7 +211,7 @@ impl<A> TxGraph<A> {
|
|||||||
///
|
///
|
||||||
/// This includes txouts of both full transactions as well as floating transactions.
|
/// This includes txouts of both full transactions as well as floating transactions.
|
||||||
pub fn all_txouts(&self) -> impl Iterator<Item = (OutPoint, &TxOut)> {
|
pub fn all_txouts(&self) -> impl Iterator<Item = (OutPoint, &TxOut)> {
|
||||||
self.txs.iter().flat_map(|(txid, (tx, _, _))| match tx {
|
self.txs.iter().flat_map(|(txid, (tx, _))| match tx {
|
||||||
TxNodeInternal::Whole(tx) => tx
|
TxNodeInternal::Whole(tx) => tx
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.output
|
.output
|
||||||
@@ -232,7 +233,7 @@ impl<A> TxGraph<A> {
|
|||||||
pub fn floating_txouts(&self) -> impl Iterator<Item = (OutPoint, &TxOut)> {
|
pub fn floating_txouts(&self) -> impl Iterator<Item = (OutPoint, &TxOut)> {
|
||||||
self.txs
|
self.txs
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(txid, (tx_node, _, _))| match tx_node {
|
.filter_map(|(txid, (tx_node, _))| match tx_node {
|
||||||
TxNodeInternal::Whole(_) => None,
|
TxNodeInternal::Whole(_) => None,
|
||||||
TxNodeInternal::Partial(txouts) => Some(
|
TxNodeInternal::Partial(txouts) => Some(
|
||||||
txouts
|
txouts
|
||||||
@@ -247,17 +248,30 @@ impl<A> TxGraph<A> {
|
|||||||
pub fn full_txs(&self) -> impl Iterator<Item = TxNode<'_, Arc<Transaction>, A>> {
|
pub fn full_txs(&self) -> impl Iterator<Item = TxNode<'_, Arc<Transaction>, A>> {
|
||||||
self.txs
|
self.txs
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(&txid, (tx, anchors, last_seen))| match tx {
|
.filter_map(|(&txid, (tx, anchors))| match tx {
|
||||||
TxNodeInternal::Whole(tx) => Some(TxNode {
|
TxNodeInternal::Whole(tx) => Some(TxNode {
|
||||||
txid,
|
txid,
|
||||||
tx: tx.clone(),
|
tx: tx.clone(),
|
||||||
anchors,
|
anchors,
|
||||||
last_seen_unconfirmed: *last_seen,
|
last_seen_unconfirmed: self.last_seen.get(&txid).copied(),
|
||||||
}),
|
}),
|
||||||
TxNodeInternal::Partial(_) => None,
|
TxNodeInternal::Partial(_) => None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Iterate over graph transactions with no anchors or last-seen.
|
||||||
|
pub fn txs_with_no_anchor_or_last_seen(
|
||||||
|
&self,
|
||||||
|
) -> impl Iterator<Item = TxNode<'_, Arc<Transaction>, A>> {
|
||||||
|
self.full_txs().filter_map(|tx| {
|
||||||
|
if tx.anchors.is_empty() && tx.last_seen_unconfirmed.is_none() {
|
||||||
|
Some(tx)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Get a transaction by txid. This only returns `Some` for full transactions.
|
/// Get a transaction by txid. This only returns `Some` for full transactions.
|
||||||
///
|
///
|
||||||
/// Refer to [`get_txout`] for getting a specific [`TxOut`].
|
/// Refer to [`get_txout`] for getting a specific [`TxOut`].
|
||||||
@@ -270,11 +284,11 @@ impl<A> TxGraph<A> {
|
|||||||
/// Get a transaction node by txid. This only returns `Some` for full transactions.
|
/// Get a transaction node by txid. This only returns `Some` for full transactions.
|
||||||
pub fn get_tx_node(&self, txid: Txid) -> Option<TxNode<'_, Arc<Transaction>, A>> {
|
pub fn get_tx_node(&self, txid: Txid) -> Option<TxNode<'_, Arc<Transaction>, A>> {
|
||||||
match &self.txs.get(&txid)? {
|
match &self.txs.get(&txid)? {
|
||||||
(TxNodeInternal::Whole(tx), anchors, last_seen) => Some(TxNode {
|
(TxNodeInternal::Whole(tx), anchors) => Some(TxNode {
|
||||||
txid,
|
txid,
|
||||||
tx: tx.clone(),
|
tx: tx.clone(),
|
||||||
anchors,
|
anchors,
|
||||||
last_seen_unconfirmed: *last_seen,
|
last_seen_unconfirmed: self.last_seen.get(&txid).copied(),
|
||||||
}),
|
}),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
@@ -307,7 +321,7 @@ impl<A> TxGraph<A> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculates the fee of a given transaction. Returns 0 if `tx` is a coinbase transaction.
|
/// Calculates the fee of a given transaction. Returns [`Amount::ZERO`] if `tx` is a coinbase transaction.
|
||||||
/// Returns `OK(_)` if we have all the [`TxOut`]s being spent by `tx` in the graph (either as
|
/// Returns `OK(_)` if we have all the [`TxOut`]s being spent by `tx` in the graph (either as
|
||||||
/// the full transactions or individual txouts).
|
/// the full transactions or individual txouts).
|
||||||
///
|
///
|
||||||
@@ -318,20 +332,20 @@ impl<A> TxGraph<A> {
|
|||||||
/// Note `tx` does not have to be in the graph for this to work.
|
/// Note `tx` does not have to be in the graph for this to work.
|
||||||
///
|
///
|
||||||
/// [`insert_txout`]: Self::insert_txout
|
/// [`insert_txout`]: Self::insert_txout
|
||||||
pub fn calculate_fee(&self, tx: &Transaction) -> Result<u64, CalculateFeeError> {
|
pub fn calculate_fee(&self, tx: &Transaction) -> Result<Amount, CalculateFeeError> {
|
||||||
if tx.is_coinbase() {
|
if tx.is_coinbase() {
|
||||||
return Ok(0);
|
return Ok(Amount::ZERO);
|
||||||
}
|
}
|
||||||
|
|
||||||
let (inputs_sum, missing_outputs) = tx.input.iter().fold(
|
let (inputs_sum, missing_outputs) = tx.input.iter().fold(
|
||||||
(0_i64, Vec::new()),
|
(SignedAmount::ZERO, Vec::new()),
|
||||||
|(mut sum, mut missing_outpoints), txin| match self.get_txout(txin.previous_output) {
|
|(mut sum, mut missing_outpoints), txin| match self.get_txout(txin.previous_output) {
|
||||||
None => {
|
None => {
|
||||||
missing_outpoints.push(txin.previous_output);
|
missing_outpoints.push(txin.previous_output);
|
||||||
(sum, missing_outpoints)
|
(sum, missing_outpoints)
|
||||||
}
|
}
|
||||||
Some(txout) => {
|
Some(txout) => {
|
||||||
sum += txout.value.to_sat() as i64;
|
sum += txout.value.to_signed().expect("valid `SignedAmount`");
|
||||||
(sum, missing_outpoints)
|
(sum, missing_outpoints)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -343,15 +357,12 @@ impl<A> TxGraph<A> {
|
|||||||
let outputs_sum = tx
|
let outputs_sum = tx
|
||||||
.output
|
.output
|
||||||
.iter()
|
.iter()
|
||||||
.map(|txout| txout.value.to_sat() as i64)
|
.map(|txout| txout.value.to_signed().expect("valid `SignedAmount`"))
|
||||||
.sum::<i64>();
|
.sum::<SignedAmount>();
|
||||||
|
|
||||||
let fee = inputs_sum - outputs_sum;
|
let fee = inputs_sum - outputs_sum;
|
||||||
if fee < 0 {
|
fee.to_unsigned()
|
||||||
Err(CalculateFeeError::NegativeFee(fee))
|
.map_err(|_| CalculateFeeError::NegativeFee(fee))
|
||||||
} else {
|
|
||||||
Ok(fee as u64)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The transactions spending from this output.
|
/// The transactions spending from this output.
|
||||||
@@ -448,7 +459,7 @@ impl<A> TxGraph<A> {
|
|||||||
&'g self,
|
&'g self,
|
||||||
tx: &'g Transaction,
|
tx: &'g Transaction,
|
||||||
) -> impl Iterator<Item = (usize, Txid)> + '_ {
|
) -> impl Iterator<Item = (usize, Txid)> + '_ {
|
||||||
let txid = tx.txid();
|
let txid = tx.compute_txid();
|
||||||
tx.input
|
tx.input
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
@@ -507,7 +518,6 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
(
|
(
|
||||||
TxNodeInternal::Partial([(outpoint.vout, txout)].into()),
|
TxNodeInternal::Partial([(outpoint.vout, txout)].into()),
|
||||||
BTreeSet::new(),
|
BTreeSet::new(),
|
||||||
0,
|
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
self.apply_update(update)
|
self.apply_update(update)
|
||||||
@@ -519,9 +529,10 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
pub fn insert_tx<T: Into<Arc<Transaction>>>(&mut self, tx: T) -> ChangeSet<A> {
|
pub fn insert_tx<T: Into<Arc<Transaction>>>(&mut self, tx: T) -> ChangeSet<A> {
|
||||||
let tx = tx.into();
|
let tx = tx.into();
|
||||||
let mut update = Self::default();
|
let mut update = Self::default();
|
||||||
update
|
update.txs.insert(
|
||||||
.txs
|
tx.compute_txid(),
|
||||||
.insert(tx.txid(), (TxNodeInternal::Whole(tx), BTreeSet::new(), 0));
|
(TxNodeInternal::Whole(tx), BTreeSet::new()),
|
||||||
|
);
|
||||||
self.apply_update(update)
|
self.apply_update(update)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -536,8 +547,8 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
) -> ChangeSet<A> {
|
) -> ChangeSet<A> {
|
||||||
let mut changeset = ChangeSet::<A>::default();
|
let mut changeset = ChangeSet::<A>::default();
|
||||||
for (tx, seen_at) in txs {
|
for (tx, seen_at) in txs {
|
||||||
changeset.append(self.insert_seen_at(tx.txid(), seen_at));
|
changeset.merge(self.insert_seen_at(tx.compute_txid(), seen_at));
|
||||||
changeset.append(self.insert_tx(tx));
|
changeset.merge(self.insert_tx(tx));
|
||||||
}
|
}
|
||||||
changeset
|
changeset
|
||||||
}
|
}
|
||||||
@@ -561,8 +572,7 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
/// [`update_last_seen_unconfirmed`]: Self::update_last_seen_unconfirmed
|
/// [`update_last_seen_unconfirmed`]: Self::update_last_seen_unconfirmed
|
||||||
pub fn insert_seen_at(&mut self, txid: Txid, seen_at: u64) -> ChangeSet<A> {
|
pub fn insert_seen_at(&mut self, txid: Txid, seen_at: u64) -> ChangeSet<A> {
|
||||||
let mut update = Self::default();
|
let mut update = Self::default();
|
||||||
let (_, _, update_last_seen) = update.txs.entry(txid).or_default();
|
update.last_seen.insert(txid, seen_at);
|
||||||
*update_last_seen = seen_at;
|
|
||||||
self.apply_update(update)
|
self.apply_update(update)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -609,7 +619,7 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
.txs
|
.txs
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(
|
.filter_map(
|
||||||
|(&txid, (_, anchors, _))| {
|
|(&txid, (_, anchors))| {
|
||||||
if anchors.is_empty() {
|
if anchors.is_empty() {
|
||||||
Some(txid)
|
Some(txid)
|
||||||
} else {
|
} else {
|
||||||
@@ -620,7 +630,7 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
for txid in unanchored_txs {
|
for txid in unanchored_txs {
|
||||||
changeset.append(self.insert_seen_at(txid, seen_at));
|
changeset.merge(self.insert_seen_at(txid, seen_at));
|
||||||
}
|
}
|
||||||
changeset
|
changeset
|
||||||
}
|
}
|
||||||
@@ -645,7 +655,7 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
pub fn apply_changeset(&mut self, changeset: ChangeSet<A>) {
|
pub fn apply_changeset(&mut self, changeset: ChangeSet<A>) {
|
||||||
for wrapped_tx in changeset.txs {
|
for wrapped_tx in changeset.txs {
|
||||||
let tx = wrapped_tx.as_ref();
|
let tx = wrapped_tx.as_ref();
|
||||||
let txid = tx.txid();
|
let txid = tx.compute_txid();
|
||||||
|
|
||||||
tx.input
|
tx.input
|
||||||
.iter()
|
.iter()
|
||||||
@@ -658,21 +668,19 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
});
|
});
|
||||||
|
|
||||||
match self.txs.get_mut(&txid) {
|
match self.txs.get_mut(&txid) {
|
||||||
Some((tx_node @ TxNodeInternal::Partial(_), _, _)) => {
|
Some((tx_node @ TxNodeInternal::Partial(_), _)) => {
|
||||||
*tx_node = TxNodeInternal::Whole(wrapped_tx.clone());
|
*tx_node = TxNodeInternal::Whole(wrapped_tx.clone());
|
||||||
}
|
}
|
||||||
Some((TxNodeInternal::Whole(tx), _, _)) => {
|
Some((TxNodeInternal::Whole(tx), _)) => {
|
||||||
debug_assert_eq!(
|
debug_assert_eq!(
|
||||||
tx.as_ref().txid(),
|
tx.as_ref().compute_txid(),
|
||||||
txid,
|
txid,
|
||||||
"tx should produce txid that is same as key"
|
"tx should produce txid that is same as key"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
self.txs.insert(
|
self.txs
|
||||||
txid,
|
.insert(txid, (TxNodeInternal::Whole(wrapped_tx), BTreeSet::new()));
|
||||||
(TxNodeInternal::Whole(wrapped_tx), BTreeSet::new(), 0),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -681,9 +689,8 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
let tx_entry = self.txs.entry(outpoint.txid).or_default();
|
let tx_entry = self.txs.entry(outpoint.txid).or_default();
|
||||||
|
|
||||||
match tx_entry {
|
match tx_entry {
|
||||||
(TxNodeInternal::Whole(_), _, _) => { /* do nothing since we already have full tx */
|
(TxNodeInternal::Whole(_), _) => { /* do nothing since we already have full tx */ }
|
||||||
}
|
(TxNodeInternal::Partial(txouts), _) => {
|
||||||
(TxNodeInternal::Partial(txouts), _, _) => {
|
|
||||||
txouts.insert(outpoint.vout, txout);
|
txouts.insert(outpoint.vout, txout);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -691,13 +698,13 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
|
|
||||||
for (anchor, txid) in changeset.anchors {
|
for (anchor, txid) in changeset.anchors {
|
||||||
if self.anchors.insert((anchor.clone(), txid)) {
|
if self.anchors.insert((anchor.clone(), txid)) {
|
||||||
let (_, anchors, _) = self.txs.entry(txid).or_default();
|
let (_, anchors) = self.txs.entry(txid).or_default();
|
||||||
anchors.insert(anchor);
|
anchors.insert(anchor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (txid, new_last_seen) in changeset.last_seen {
|
for (txid, new_last_seen) in changeset.last_seen {
|
||||||
let (_, _, last_seen) = self.txs.entry(txid).or_default();
|
let last_seen = self.last_seen.entry(txid).or_default();
|
||||||
if new_last_seen > *last_seen {
|
if new_last_seen > *last_seen {
|
||||||
*last_seen = new_last_seen;
|
*last_seen = new_last_seen;
|
||||||
}
|
}
|
||||||
@@ -711,11 +718,10 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
pub(crate) fn determine_changeset(&self, update: TxGraph<A>) -> ChangeSet<A> {
|
pub(crate) fn determine_changeset(&self, update: TxGraph<A>) -> ChangeSet<A> {
|
||||||
let mut changeset = ChangeSet::<A>::default();
|
let mut changeset = ChangeSet::<A>::default();
|
||||||
|
|
||||||
for (&txid, (update_tx_node, _, update_last_seen)) in &update.txs {
|
for (&txid, (update_tx_node, _)) in &update.txs {
|
||||||
let prev_last_seen: u64 = match (self.txs.get(&txid), update_tx_node) {
|
match (self.txs.get(&txid), update_tx_node) {
|
||||||
(None, TxNodeInternal::Whole(update_tx)) => {
|
(None, TxNodeInternal::Whole(update_tx)) => {
|
||||||
changeset.txs.insert(update_tx.clone());
|
changeset.txs.insert(update_tx.clone());
|
||||||
0
|
|
||||||
}
|
}
|
||||||
(None, TxNodeInternal::Partial(update_txos)) => {
|
(None, TxNodeInternal::Partial(update_txos)) => {
|
||||||
changeset.txouts.extend(
|
changeset.txouts.extend(
|
||||||
@@ -723,18 +729,13 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|(&vout, txo)| (OutPoint::new(txid, vout), txo.clone())),
|
.map(|(&vout, txo)| (OutPoint::new(txid, vout), txo.clone())),
|
||||||
);
|
);
|
||||||
0
|
|
||||||
}
|
}
|
||||||
(Some((TxNodeInternal::Whole(_), _, last_seen)), _) => *last_seen,
|
(Some((TxNodeInternal::Whole(_), _)), _) => {}
|
||||||
(
|
(Some((TxNodeInternal::Partial(_), _)), TxNodeInternal::Whole(update_tx)) => {
|
||||||
Some((TxNodeInternal::Partial(_), _, last_seen)),
|
|
||||||
TxNodeInternal::Whole(update_tx),
|
|
||||||
) => {
|
|
||||||
changeset.txs.insert(update_tx.clone());
|
changeset.txs.insert(update_tx.clone());
|
||||||
*last_seen
|
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
Some((TxNodeInternal::Partial(txos), _, last_seen)),
|
Some((TxNodeInternal::Partial(txos), _)),
|
||||||
TxNodeInternal::Partial(update_txos),
|
TxNodeInternal::Partial(update_txos),
|
||||||
) => {
|
) => {
|
||||||
changeset.txouts.extend(
|
changeset.txouts.extend(
|
||||||
@@ -743,12 +744,14 @@ impl<A: Clone + Ord> TxGraph<A> {
|
|||||||
.filter(|(vout, _)| !txos.contains_key(*vout))
|
.filter(|(vout, _)| !txos.contains_key(*vout))
|
||||||
.map(|(&vout, txo)| (OutPoint::new(txid, vout), txo.clone())),
|
.map(|(&vout, txo)| (OutPoint::new(txid, vout), txo.clone())),
|
||||||
);
|
);
|
||||||
*last_seen
|
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if *update_last_seen > prev_last_seen {
|
for (txid, update_last_seen) in update.last_seen {
|
||||||
changeset.last_seen.insert(txid, *update_last_seen);
|
let prev_last_seen = self.last_seen.get(&txid).copied();
|
||||||
|
if Some(update_last_seen) > prev_last_seen {
|
||||||
|
changeset.last_seen.insert(txid, update_last_seen);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -788,7 +791,7 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
chain_tip: BlockId,
|
chain_tip: BlockId,
|
||||||
txid: Txid,
|
txid: Txid,
|
||||||
) -> Result<Option<ChainPosition<&A>>, C::Error> {
|
) -> Result<Option<ChainPosition<&A>>, C::Error> {
|
||||||
let (tx_node, anchors, last_seen) = match self.txs.get(&txid) {
|
let (tx_node, anchors) = match self.txs.get(&txid) {
|
||||||
Some(v) => v,
|
Some(v) => v,
|
||||||
None => return Ok(None),
|
None => return Ok(None),
|
||||||
};
|
};
|
||||||
@@ -800,6 +803,13 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If no anchors are in best chain and we don't have a last_seen, we can return
|
||||||
|
// early because by definition the tx doesn't have a chain position.
|
||||||
|
let last_seen = match self.last_seen.get(&txid) {
|
||||||
|
Some(t) => *t,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
// The tx is not anchored to a block in the best chain, which means that it
|
// The tx is not anchored to a block in the best chain, which means that it
|
||||||
// might be in mempool, or it might have been dropped already.
|
// might be in mempool, or it might have been dropped already.
|
||||||
// Let's check conflicts to find out!
|
// Let's check conflicts to find out!
|
||||||
@@ -828,7 +838,7 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
// resulting array will also include `tx`
|
// resulting array will also include `tx`
|
||||||
let unconfirmed_ancestor_txs =
|
let unconfirmed_ancestor_txs =
|
||||||
TxAncestors::new_include_root(self, tx.clone(), |_, ancestor_tx: Arc<Transaction>| {
|
TxAncestors::new_include_root(self, tx.clone(), |_, ancestor_tx: Arc<Transaction>| {
|
||||||
let tx_node = self.get_tx_node(ancestor_tx.as_ref().txid())?;
|
let tx_node = self.get_tx_node(ancestor_tx.as_ref().compute_txid())?;
|
||||||
// We're filtering the ancestors to keep only the unconfirmed ones (= no anchors in
|
// We're filtering the ancestors to keep only the unconfirmed ones (= no anchors in
|
||||||
// the best chain)
|
// the best chain)
|
||||||
for block in tx_node.anchors {
|
for block in tx_node.anchors {
|
||||||
@@ -846,7 +856,7 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
// and our unconf descendants' last seen.
|
// and our unconf descendants' last seen.
|
||||||
let unconfirmed_descendants_txs = TxDescendants::new_include_root(
|
let unconfirmed_descendants_txs = TxDescendants::new_include_root(
|
||||||
self,
|
self,
|
||||||
tx.as_ref().txid(),
|
tx.as_ref().compute_txid(),
|
||||||
|_, descendant_txid: Txid| {
|
|_, descendant_txid: Txid| {
|
||||||
let tx_node = self.get_tx_node(descendant_txid)?;
|
let tx_node = self.get_tx_node(descendant_txid)?;
|
||||||
// We're filtering the ancestors to keep only the unconfirmed ones (= no anchors in
|
// We're filtering the ancestors to keep only the unconfirmed ones (= no anchors in
|
||||||
@@ -886,8 +896,8 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
if conflicting_tx.last_seen_unconfirmed > tx_last_seen {
|
if conflicting_tx.last_seen_unconfirmed > tx_last_seen {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
if conflicting_tx.last_seen_unconfirmed == *last_seen
|
if conflicting_tx.last_seen_unconfirmed == Some(last_seen)
|
||||||
&& conflicting_tx.as_ref().txid() > tx.as_ref().txid()
|
&& conflicting_tx.as_ref().compute_txid() > tx.as_ref().compute_txid()
|
||||||
{
|
{
|
||||||
// Conflicting tx has priority if txid of conflicting tx > txid of original tx
|
// Conflicting tx has priority if txid of conflicting tx > txid of original tx
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
@@ -895,7 +905,7 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(ChainPosition::Unconfirmed(*last_seen)))
|
Ok(Some(ChainPosition::Unconfirmed(last_seen)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the position of the transaction in `chain` with tip `chain_tip`.
|
/// Get the position of the transaction in `chain` with tip `chain_tip`.
|
||||||
@@ -973,10 +983,10 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
/// If the [`ChainOracle`] implementation (`chain`) fails, an error will be returned with the
|
/// If the [`ChainOracle`] implementation (`chain`) fails, an error will be returned with the
|
||||||
/// returned item.
|
/// returned item.
|
||||||
///
|
///
|
||||||
/// If the [`ChainOracle`] is infallible, [`list_chain_txs`] can be used instead.
|
/// If the [`ChainOracle`] is infallible, [`list_canonical_txs`] can be used instead.
|
||||||
///
|
///
|
||||||
/// [`list_chain_txs`]: Self::list_chain_txs
|
/// [`list_canonical_txs`]: Self::list_canonical_txs
|
||||||
pub fn try_list_chain_txs<'a, C: ChainOracle + 'a>(
|
pub fn try_list_canonical_txs<'a, C: ChainOracle + 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
chain: &'a C,
|
chain: &'a C,
|
||||||
chain_tip: BlockId,
|
chain_tip: BlockId,
|
||||||
@@ -995,15 +1005,15 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
|
|
||||||
/// List graph transactions that are in `chain` with `chain_tip`.
|
/// List graph transactions that are in `chain` with `chain_tip`.
|
||||||
///
|
///
|
||||||
/// This is the infallible version of [`try_list_chain_txs`].
|
/// This is the infallible version of [`try_list_canonical_txs`].
|
||||||
///
|
///
|
||||||
/// [`try_list_chain_txs`]: Self::try_list_chain_txs
|
/// [`try_list_canonical_txs`]: Self::try_list_canonical_txs
|
||||||
pub fn list_chain_txs<'a, C: ChainOracle + 'a>(
|
pub fn list_canonical_txs<'a, C: ChainOracle + 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
chain: &'a C,
|
chain: &'a C,
|
||||||
chain_tip: BlockId,
|
chain_tip: BlockId,
|
||||||
) -> impl Iterator<Item = CanonicalTx<'a, Arc<Transaction>, A>> {
|
) -> impl Iterator<Item = CanonicalTx<'a, Arc<Transaction>, A>> {
|
||||||
self.try_list_chain_txs(chain, chain_tip)
|
self.try_list_canonical_txs(chain, chain_tip)
|
||||||
.map(|r| r.expect("oracle is infallible"))
|
.map(|r| r.expect("oracle is infallible"))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1153,7 +1163,7 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
chain: &C,
|
chain: &C,
|
||||||
chain_tip: BlockId,
|
chain_tip: BlockId,
|
||||||
outpoints: impl IntoIterator<Item = (OI, OutPoint)>,
|
outpoints: impl IntoIterator<Item = (OI, OutPoint)>,
|
||||||
mut trust_predicate: impl FnMut(&OI, &Script) -> bool,
|
mut trust_predicate: impl FnMut(&OI, ScriptBuf) -> bool,
|
||||||
) -> Result<Balance, C::Error> {
|
) -> Result<Balance, C::Error> {
|
||||||
let mut immature = Amount::ZERO;
|
let mut immature = Amount::ZERO;
|
||||||
let mut trusted_pending = Amount::ZERO;
|
let mut trusted_pending = Amount::ZERO;
|
||||||
@@ -1172,7 +1182,7 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ChainPosition::Unconfirmed(_) => {
|
ChainPosition::Unconfirmed(_) => {
|
||||||
if trust_predicate(&spk_i, &txout.txout.script_pubkey) {
|
if trust_predicate(&spk_i, txout.txout.script_pubkey) {
|
||||||
trusted_pending += txout.txout.value;
|
trusted_pending += txout.txout.value;
|
||||||
} else {
|
} else {
|
||||||
untrusted_pending += txout.txout.value;
|
untrusted_pending += txout.txout.value;
|
||||||
@@ -1199,7 +1209,7 @@ impl<A: Anchor> TxGraph<A> {
|
|||||||
chain: &C,
|
chain: &C,
|
||||||
chain_tip: BlockId,
|
chain_tip: BlockId,
|
||||||
outpoints: impl IntoIterator<Item = (OI, OutPoint)>,
|
outpoints: impl IntoIterator<Item = (OI, OutPoint)>,
|
||||||
trust_predicate: impl FnMut(&OI, &Script) -> bool,
|
trust_predicate: impl FnMut(&OI, ScriptBuf) -> bool,
|
||||||
) -> Balance {
|
) -> Balance {
|
||||||
self.try_balance(chain, chain_tip, outpoints, trust_predicate)
|
self.try_balance(chain, chain_tip, outpoints, trust_predicate)
|
||||||
.expect("oracle is infallible")
|
.expect("oracle is infallible")
|
||||||
@@ -1258,7 +1268,7 @@ impl<A> ChangeSet<A> {
|
|||||||
tx.output
|
tx.output
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(move |(vout, txout)| (OutPoint::new(tx.txid(), vout as _), txout))
|
.map(move |(vout, txout)| (OutPoint::new(tx.compute_txid(), vout as _), txout))
|
||||||
})
|
})
|
||||||
.chain(self.txouts.iter().map(|(op, txout)| (*op, txout)))
|
.chain(self.txouts.iter().map(|(op, txout)| (*op, txout)))
|
||||||
}
|
}
|
||||||
@@ -1283,8 +1293,8 @@ impl<A> ChangeSet<A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A: Ord> Append for ChangeSet<A> {
|
impl<A: Ord> Merge for ChangeSet<A> {
|
||||||
fn append(&mut self, other: Self) {
|
fn merge(&mut self, other: Self) {
|
||||||
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
// We use `extend` instead of `BTreeMap::append` due to performance issues with `append`.
|
||||||
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
// Refer to https://github.com/rust-lang/rust/issues/34666#issuecomment-675658420
|
||||||
self.txs.extend(other.txs);
|
self.txs.extend(other.txs);
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
use rand::distributions::{Alphanumeric, DistString};
|
use rand::distributions::{Alphanumeric, DistString};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use bdk_chain::{tx_graph::TxGraph, Anchor, SpkTxOutIndex};
|
use bdk_chain::{spk_txout::SpkTxOutIndex, tx_graph::TxGraph, Anchor};
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
locktime::absolute::LockTime, secp256k1::Secp256k1, transaction, Amount, OutPoint, ScriptBuf,
|
locktime::absolute::LockTime, secp256k1::Secp256k1, transaction, Amount, OutPoint, ScriptBuf,
|
||||||
Sequence, Transaction, TxIn, TxOut, Txid, Witness,
|
Sequence, Transaction, TxIn, TxOut, Txid, Witness,
|
||||||
@@ -119,21 +119,19 @@ pub fn init_graph<'a, A: Anchor + Clone + 'a>(
|
|||||||
},
|
},
|
||||||
Some(index) => TxOut {
|
Some(index) => TxOut {
|
||||||
value: Amount::from_sat(output.value),
|
value: Amount::from_sat(output.value),
|
||||||
script_pubkey: spk_index.spk_at_index(index).unwrap().to_owned(),
|
script_pubkey: spk_index.spk_at_index(index).unwrap(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
};
|
};
|
||||||
|
|
||||||
tx_ids.insert(tx_tmp.tx_name, tx.txid());
|
tx_ids.insert(tx_tmp.tx_name, tx.compute_txid());
|
||||||
spk_index.scan(&tx);
|
spk_index.scan(&tx);
|
||||||
let _ = graph.insert_tx(tx.clone());
|
let _ = graph.insert_tx(tx.clone());
|
||||||
for anchor in tx_tmp.anchors.iter() {
|
for anchor in tx_tmp.anchors.iter() {
|
||||||
let _ = graph.insert_anchor(tx.txid(), anchor.clone());
|
let _ = graph.insert_anchor(tx.compute_txid(), anchor.clone());
|
||||||
}
|
|
||||||
if let Some(seen_at) = tx_tmp.last_seen {
|
|
||||||
let _ = graph.insert_seen_at(tx.txid(), seen_at);
|
|
||||||
}
|
}
|
||||||
|
let _ = graph.insert_seen_at(tx.compute_txid(), tx_tmp.last_seen.unwrap_or(0));
|
||||||
}
|
}
|
||||||
(graph, spk_index, tx_ids)
|
(graph, spk_index, tx_ids)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,13 +8,11 @@ use std::{collections::BTreeSet, sync::Arc};
|
|||||||
use crate::common::DESCRIPTORS;
|
use crate::common::DESCRIPTORS;
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
indexed_tx_graph::{self, IndexedTxGraph},
|
indexed_tx_graph::{self, IndexedTxGraph},
|
||||||
keychain::{self, Balance, KeychainTxOutIndex},
|
indexer::keychain_txout::KeychainTxOutIndex,
|
||||||
local_chain::LocalChain,
|
local_chain::LocalChain,
|
||||||
tx_graph, ChainPosition, ConfirmationHeightAnchor, DescriptorExt,
|
tx_graph, Balance, ChainPosition, ConfirmationBlockTime, DescriptorExt,
|
||||||
};
|
|
||||||
use bitcoin::{
|
|
||||||
secp256k1::Secp256k1, Amount, OutPoint, Script, ScriptBuf, Transaction, TxIn, TxOut,
|
|
||||||
};
|
};
|
||||||
|
use bitcoin::{secp256k1::Secp256k1, Amount, OutPoint, ScriptBuf, Transaction, TxIn, TxOut};
|
||||||
use miniscript::Descriptor;
|
use miniscript::Descriptor;
|
||||||
|
|
||||||
/// Ensure [`IndexedTxGraph::insert_relevant_txs`] can successfully index transactions NOT presented
|
/// Ensure [`IndexedTxGraph::insert_relevant_txs`] can successfully index transactions NOT presented
|
||||||
@@ -26,15 +24,19 @@ use miniscript::Descriptor;
|
|||||||
/// agnostic.
|
/// agnostic.
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_relevant_txs() {
|
fn insert_relevant_txs() {
|
||||||
|
use bdk_chain::indexer::keychain_txout;
|
||||||
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), DESCRIPTORS[0])
|
let (descriptor, _) = Descriptor::parse_descriptor(&Secp256k1::signing_only(), DESCRIPTORS[0])
|
||||||
.expect("must be valid");
|
.expect("must be valid");
|
||||||
let spk_0 = descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
let spk_0 = descriptor.at_derivation_index(0).unwrap().script_pubkey();
|
||||||
let spk_1 = descriptor.at_derivation_index(9).unwrap().script_pubkey();
|
let spk_1 = descriptor.at_derivation_index(9).unwrap().script_pubkey();
|
||||||
|
|
||||||
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<()>>::new(
|
let mut graph = IndexedTxGraph::<ConfirmationBlockTime, KeychainTxOutIndex<()>>::new(
|
||||||
KeychainTxOutIndex::new(10),
|
KeychainTxOutIndex::new(10),
|
||||||
);
|
);
|
||||||
let _ = graph.index.insert_descriptor((), descriptor.clone());
|
let _ = graph
|
||||||
|
.index
|
||||||
|
.insert_descriptor((), descriptor.clone())
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let tx_a = Transaction {
|
let tx_a = Transaction {
|
||||||
output: vec![
|
output: vec![
|
||||||
@@ -52,7 +54,7 @@ fn insert_relevant_txs() {
|
|||||||
|
|
||||||
let tx_b = Transaction {
|
let tx_b = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_a.txid(), 0),
|
previous_output: OutPoint::new(tx_a.compute_txid(), 0),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}],
|
}],
|
||||||
..common::new_tx(1)
|
..common::new_tx(1)
|
||||||
@@ -60,7 +62,7 @@ fn insert_relevant_txs() {
|
|||||||
|
|
||||||
let tx_c = Transaction {
|
let tx_c = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_a.txid(), 1),
|
previous_output: OutPoint::new(tx_a.compute_txid(), 1),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}],
|
}],
|
||||||
..common::new_tx(2)
|
..common::new_tx(2)
|
||||||
@@ -69,13 +71,12 @@ fn insert_relevant_txs() {
|
|||||||
let txs = [tx_c, tx_b, tx_a];
|
let txs = [tx_c, tx_b, tx_a];
|
||||||
|
|
||||||
let changeset = indexed_tx_graph::ChangeSet {
|
let changeset = indexed_tx_graph::ChangeSet {
|
||||||
graph: tx_graph::ChangeSet {
|
tx_graph: tx_graph::ChangeSet {
|
||||||
txs: txs.iter().cloned().map(Arc::new).collect(),
|
txs: txs.iter().cloned().map(Arc::new).collect(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
indexer: keychain::ChangeSet {
|
indexer: keychain_txout::ChangeSet {
|
||||||
last_revealed: [(descriptor.descriptor_id(), 9_u32)].into(),
|
last_revealed: [(descriptor.descriptor_id(), 9_u32)].into(),
|
||||||
keychains_added: [].into(),
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -86,10 +87,9 @@ fn insert_relevant_txs() {
|
|||||||
|
|
||||||
// The initial changeset will also contain info about the keychain we added
|
// The initial changeset will also contain info about the keychain we added
|
||||||
let initial_changeset = indexed_tx_graph::ChangeSet {
|
let initial_changeset = indexed_tx_graph::ChangeSet {
|
||||||
graph: changeset.graph,
|
tx_graph: changeset.tx_graph,
|
||||||
indexer: keychain::ChangeSet {
|
indexer: keychain_txout::ChangeSet {
|
||||||
last_revealed: changeset.indexer.last_revealed,
|
last_revealed: changeset.indexer.last_revealed,
|
||||||
keychains_added: [((), descriptor)].into(),
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -113,8 +113,8 @@ fn insert_relevant_txs() {
|
|||||||
/// tx1: A Coinbase, sending 70000 sats to "trusted" address. [Block 0]
|
/// tx1: A Coinbase, sending 70000 sats to "trusted" address. [Block 0]
|
||||||
/// tx2: A external Receive, sending 30000 sats to "untrusted" address. [Block 1]
|
/// tx2: A external Receive, sending 30000 sats to "untrusted" address. [Block 1]
|
||||||
/// tx3: Internal Spend. Spends tx2 and returns change of 10000 to "trusted" address. [Block 2]
|
/// tx3: Internal Spend. Spends tx2 and returns change of 10000 to "trusted" address. [Block 2]
|
||||||
/// tx4: Mempool tx, sending 20000 sats to "trusted" address.
|
/// tx4: Mempool tx, sending 20000 sats to "untrusted" address.
|
||||||
/// tx5: Mempool tx, sending 15000 sats to "untested" address.
|
/// tx5: Mempool tx, sending 15000 sats to "trusted" address.
|
||||||
/// tx6: Complete unrelated tx. [Block 3]
|
/// tx6: Complete unrelated tx. [Block 3]
|
||||||
///
|
///
|
||||||
/// Different transactions are added via `insert_relevant_txs`.
|
/// Different transactions are added via `insert_relevant_txs`.
|
||||||
@@ -136,12 +136,18 @@ fn test_list_owned_txouts() {
|
|||||||
let (desc_2, _) =
|
let (desc_2, _) =
|
||||||
Descriptor::parse_descriptor(&Secp256k1::signing_only(), common::DESCRIPTORS[3]).unwrap();
|
Descriptor::parse_descriptor(&Secp256k1::signing_only(), common::DESCRIPTORS[3]).unwrap();
|
||||||
|
|
||||||
let mut graph = IndexedTxGraph::<ConfirmationHeightAnchor, KeychainTxOutIndex<String>>::new(
|
let mut graph = IndexedTxGraph::<ConfirmationBlockTime, KeychainTxOutIndex<String>>::new(
|
||||||
KeychainTxOutIndex::new(10),
|
KeychainTxOutIndex::new(10),
|
||||||
);
|
);
|
||||||
|
|
||||||
let _ = graph.index.insert_descriptor("keychain_1".into(), desc_1);
|
assert!(graph
|
||||||
let _ = graph.index.insert_descriptor("keychain_2".into(), desc_2);
|
.index
|
||||||
|
.insert_descriptor("keychain_1".into(), desc_1)
|
||||||
|
.unwrap());
|
||||||
|
assert!(graph
|
||||||
|
.index
|
||||||
|
.insert_descriptor("keychain_2".into(), desc_2)
|
||||||
|
.unwrap());
|
||||||
|
|
||||||
// Get trusted and untrusted addresses
|
// Get trusted and untrusted addresses
|
||||||
|
|
||||||
@@ -149,11 +155,11 @@ fn test_list_owned_txouts() {
|
|||||||
let mut untrusted_spks: Vec<ScriptBuf> = Vec::new();
|
let mut untrusted_spks: Vec<ScriptBuf> = Vec::new();
|
||||||
|
|
||||||
{
|
{
|
||||||
// we need to scope here to take immutanble reference of the graph
|
// we need to scope here to take immutable reference of the graph
|
||||||
for _ in 0..10 {
|
for _ in 0..10 {
|
||||||
let ((_, script), _) = graph
|
let ((_, script), _) = graph
|
||||||
.index
|
.index
|
||||||
.reveal_next_spk(&"keychain_1".to_string())
|
.reveal_next_spk("keychain_1".to_string())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
// TODO Assert indexes
|
// TODO Assert indexes
|
||||||
trusted_spks.push(script.to_owned());
|
trusted_spks.push(script.to_owned());
|
||||||
@@ -163,7 +169,7 @@ fn test_list_owned_txouts() {
|
|||||||
for _ in 0..10 {
|
for _ in 0..10 {
|
||||||
let ((_, script), _) = graph
|
let ((_, script), _) = graph
|
||||||
.index
|
.index
|
||||||
.reveal_next_spk(&"keychain_2".to_string())
|
.reveal_next_spk("keychain_2".to_string())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
untrusted_spks.push(script.to_owned());
|
untrusted_spks.push(script.to_owned());
|
||||||
}
|
}
|
||||||
@@ -196,7 +202,7 @@ fn test_list_owned_txouts() {
|
|||||||
// tx3 spends tx2 and gives a change back in trusted keychain. Confirmed at Block 2.
|
// tx3 spends tx2 and gives a change back in trusted keychain. Confirmed at Block 2.
|
||||||
let tx3 = Transaction {
|
let tx3 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx2.txid(), 0),
|
previous_output: OutPoint::new(tx2.compute_txid(), 0),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut {
|
output: vec![TxOut {
|
||||||
@@ -215,7 +221,7 @@ fn test_list_owned_txouts() {
|
|||||||
..common::new_tx(0)
|
..common::new_tx(0)
|
||||||
};
|
};
|
||||||
|
|
||||||
// tx5 is spending tx3 and receiving change at trusted keychain, unconfirmed.
|
// tx5 is an external transaction receiving at trusted keychain, unconfirmed.
|
||||||
let tx5 = Transaction {
|
let tx5 = Transaction {
|
||||||
output: vec![TxOut {
|
output: vec![TxOut {
|
||||||
value: Amount::from_sat(15000),
|
value: Amount::from_sat(15000),
|
||||||
@@ -228,7 +234,7 @@ fn test_list_owned_txouts() {
|
|||||||
let tx6 = common::new_tx(0);
|
let tx6 = common::new_tx(0);
|
||||||
|
|
||||||
// Insert transactions into graph with respective anchors
|
// Insert transactions into graph with respective anchors
|
||||||
// For unconfirmed txs we pass in `None`.
|
// Insert unconfirmed txs with a last_seen timestamp
|
||||||
|
|
||||||
let _ =
|
let _ =
|
||||||
graph.batch_insert_relevant([&tx1, &tx2, &tx3, &tx6].iter().enumerate().map(|(i, tx)| {
|
graph.batch_insert_relevant([&tx1, &tx2, &tx3, &tx6].iter().enumerate().map(|(i, tx)| {
|
||||||
@@ -238,9 +244,9 @@ fn test_list_owned_txouts() {
|
|||||||
local_chain
|
local_chain
|
||||||
.get(height)
|
.get(height)
|
||||||
.map(|cp| cp.block_id())
|
.map(|cp| cp.block_id())
|
||||||
.map(|anchor_block| ConfirmationHeightAnchor {
|
.map(|block_id| ConfirmationBlockTime {
|
||||||
anchor_block,
|
block_id,
|
||||||
confirmation_height: anchor_block.height,
|
confirmation_time: 100,
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
}));
|
}));
|
||||||
@@ -249,32 +255,36 @@ fn test_list_owned_txouts() {
|
|||||||
|
|
||||||
// A helper lambda to extract and filter data from the graph.
|
// A helper lambda to extract and filter data from the graph.
|
||||||
let fetch =
|
let fetch =
|
||||||
|height: u32,
|
|height: u32, graph: &IndexedTxGraph<ConfirmationBlockTime, KeychainTxOutIndex<String>>| {
|
||||||
graph: &IndexedTxGraph<ConfirmationHeightAnchor, KeychainTxOutIndex<String>>| {
|
|
||||||
let chain_tip = local_chain
|
let chain_tip = local_chain
|
||||||
.get(height)
|
.get(height)
|
||||||
.map(|cp| cp.block_id())
|
.map(|cp| cp.block_id())
|
||||||
.unwrap_or_else(|| panic!("block must exist at {}", height));
|
.unwrap_or_else(|| panic!("block must exist at {}", height));
|
||||||
let txouts = graph
|
let txouts = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_txouts(&local_chain, chain_tip, graph.index.outpoints())
|
.filter_chain_txouts(
|
||||||
|
&local_chain,
|
||||||
|
chain_tip,
|
||||||
|
graph.index.outpoints().iter().cloned(),
|
||||||
|
)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(&local_chain, chain_tip, graph.index.outpoints())
|
.filter_chain_unspents(
|
||||||
|
&local_chain,
|
||||||
|
chain_tip,
|
||||||
|
graph.index.outpoints().iter().cloned(),
|
||||||
|
)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let balance = graph.graph().balance(
|
let balance = graph.graph().balance(
|
||||||
&local_chain,
|
&local_chain,
|
||||||
chain_tip,
|
chain_tip,
|
||||||
graph.index.outpoints(),
|
graph.index.outpoints().iter().cloned(),
|
||||||
|_, spk: &Script| trusted_spks.contains(&spk.to_owned()),
|
|_, spk: ScriptBuf| trusted_spks.contains(&spk),
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(txouts.len(), 5);
|
|
||||||
assert_eq!(utxos.len(), 4);
|
|
||||||
|
|
||||||
let confirmed_txouts_txid = txouts
|
let confirmed_txouts_txid = txouts
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(_, full_txout)| {
|
.filter_map(|(_, full_txout)| {
|
||||||
@@ -340,23 +350,25 @@ fn test_list_owned_txouts() {
|
|||||||
balance,
|
balance,
|
||||||
) = fetch(0, &graph);
|
) = fetch(0, &graph);
|
||||||
|
|
||||||
assert_eq!(confirmed_txouts_txid, [tx1.txid()].into());
|
// tx1 is a confirmed txout and is unspent
|
||||||
|
// tx4, tx5 are unconfirmed
|
||||||
|
assert_eq!(confirmed_txouts_txid, [tx1.compute_txid()].into());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
unconfirmed_txouts_txid,
|
unconfirmed_txouts_txid,
|
||||||
[tx2.txid(), tx3.txid(), tx4.txid(), tx5.txid()].into()
|
[tx4.compute_txid(), tx5.compute_txid()].into()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(confirmed_utxos_txid, [tx1.txid()].into());
|
assert_eq!(confirmed_utxos_txid, [tx1.compute_txid()].into());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
unconfirmed_utxos_txid,
|
unconfirmed_utxos_txid,
|
||||||
[tx3.txid(), tx4.txid(), tx5.txid()].into()
|
[tx4.compute_txid(), tx5.compute_txid()].into()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
balance,
|
balance,
|
||||||
Balance {
|
Balance {
|
||||||
immature: Amount::from_sat(70000), // immature coinbase
|
immature: Amount::from_sat(70000), // immature coinbase
|
||||||
trusted_pending: Amount::from_sat(25000), // tx3 + tx5
|
trusted_pending: Amount::from_sat(15000), // tx5
|
||||||
untrusted_pending: Amount::from_sat(20000), // tx4
|
untrusted_pending: Amount::from_sat(20000), // tx4
|
||||||
confirmed: Amount::ZERO // Nothing is confirmed yet
|
confirmed: Amount::ZERO // Nothing is confirmed yet
|
||||||
}
|
}
|
||||||
@@ -374,26 +386,32 @@ fn test_list_owned_txouts() {
|
|||||||
) = fetch(1, &graph);
|
) = fetch(1, &graph);
|
||||||
|
|
||||||
// tx2 gets into confirmed txout set
|
// tx2 gets into confirmed txout set
|
||||||
assert_eq!(confirmed_txouts_txid, [tx1.txid(), tx2.txid()].into());
|
assert_eq!(
|
||||||
|
confirmed_txouts_txid,
|
||||||
|
[tx1.compute_txid(), tx2.compute_txid()].into()
|
||||||
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
unconfirmed_txouts_txid,
|
unconfirmed_txouts_txid,
|
||||||
[tx3.txid(), tx4.txid(), tx5.txid()].into()
|
[tx4.compute_txid(), tx5.compute_txid()].into()
|
||||||
);
|
);
|
||||||
|
|
||||||
// tx2 doesn't get into confirmed utxos set
|
// tx2 gets into confirmed utxos set
|
||||||
assert_eq!(confirmed_utxos_txid, [tx1.txid()].into());
|
assert_eq!(
|
||||||
|
confirmed_utxos_txid,
|
||||||
|
[tx1.compute_txid(), tx2.compute_txid()].into()
|
||||||
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
unconfirmed_utxos_txid,
|
unconfirmed_utxos_txid,
|
||||||
[tx3.txid(), tx4.txid(), tx5.txid()].into()
|
[tx4.compute_txid(), tx5.compute_txid()].into()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
balance,
|
balance,
|
||||||
Balance {
|
Balance {
|
||||||
immature: Amount::from_sat(70000), // immature coinbase
|
immature: Amount::from_sat(70000), // immature coinbase
|
||||||
trusted_pending: Amount::from_sat(25000), // tx3 + tx5
|
trusted_pending: Amount::from_sat(15000), // tx5
|
||||||
untrusted_pending: Amount::from_sat(20000), // tx4
|
untrusted_pending: Amount::from_sat(20000), // tx4
|
||||||
confirmed: Amount::ZERO // Nothing is confirmed yet
|
confirmed: Amount::from_sat(30_000) // tx2 got confirmed
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -411,13 +429,22 @@ fn test_list_owned_txouts() {
|
|||||||
// tx3 now gets into the confirmed txout set
|
// tx3 now gets into the confirmed txout set
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
confirmed_txouts_txid,
|
confirmed_txouts_txid,
|
||||||
[tx1.txid(), tx2.txid(), tx3.txid()].into()
|
[tx1.compute_txid(), tx2.compute_txid(), tx3.compute_txid()].into()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unconfirmed_txouts_txid,
|
||||||
|
[tx4.compute_txid(), tx5.compute_txid()].into()
|
||||||
);
|
);
|
||||||
assert_eq!(unconfirmed_txouts_txid, [tx4.txid(), tx5.txid()].into());
|
|
||||||
|
|
||||||
// tx3 also gets into confirmed utxo set
|
// tx3 also gets into confirmed utxo set
|
||||||
assert_eq!(confirmed_utxos_txid, [tx1.txid(), tx3.txid()].into());
|
assert_eq!(
|
||||||
assert_eq!(unconfirmed_utxos_txid, [tx4.txid(), tx5.txid()].into());
|
confirmed_utxos_txid,
|
||||||
|
[tx1.compute_txid(), tx3.compute_txid()].into()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unconfirmed_utxos_txid,
|
||||||
|
[tx4.compute_txid(), tx5.compute_txid()].into()
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
balance,
|
balance,
|
||||||
@@ -440,14 +467,24 @@ fn test_list_owned_txouts() {
|
|||||||
balance,
|
balance,
|
||||||
) = fetch(98, &graph);
|
) = fetch(98, &graph);
|
||||||
|
|
||||||
|
// no change compared to block 2
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
confirmed_txouts_txid,
|
confirmed_txouts_txid,
|
||||||
[tx1.txid(), tx2.txid(), tx3.txid()].into()
|
[tx1.compute_txid(), tx2.compute_txid(), tx3.compute_txid()].into()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unconfirmed_txouts_txid,
|
||||||
|
[tx4.compute_txid(), tx5.compute_txid()].into()
|
||||||
);
|
);
|
||||||
assert_eq!(unconfirmed_txouts_txid, [tx4.txid(), tx5.txid()].into());
|
|
||||||
|
|
||||||
assert_eq!(confirmed_utxos_txid, [tx1.txid(), tx3.txid()].into());
|
assert_eq!(
|
||||||
assert_eq!(unconfirmed_utxos_txid, [tx4.txid(), tx5.txid()].into());
|
confirmed_utxos_txid,
|
||||||
|
[tx1.compute_txid(), tx3.compute_txid()].into()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unconfirmed_utxos_txid,
|
||||||
|
[tx4.compute_txid(), tx5.compute_txid()].into()
|
||||||
|
);
|
||||||
|
|
||||||
// Coinbase is still immature
|
// Coinbase is still immature
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -456,14 +493,14 @@ fn test_list_owned_txouts() {
|
|||||||
immature: Amount::from_sat(70000), // immature coinbase
|
immature: Amount::from_sat(70000), // immature coinbase
|
||||||
trusted_pending: Amount::from_sat(15000), // tx5
|
trusted_pending: Amount::from_sat(15000), // tx5
|
||||||
untrusted_pending: Amount::from_sat(20000), // tx4
|
untrusted_pending: Amount::from_sat(20000), // tx4
|
||||||
confirmed: Amount::from_sat(10000) // tx1 got matured
|
confirmed: Amount::from_sat(10000) // tx3 is confirmed
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// AT Block 99
|
// AT Block 99
|
||||||
{
|
{
|
||||||
let (_, _, _, _, balance) = fetch(100, &graph);
|
let (_, _, _, _, balance) = fetch(99, &graph);
|
||||||
|
|
||||||
// Coinbase maturity hits
|
// Coinbase maturity hits
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -477,3 +514,147 @@ fn test_list_owned_txouts() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Given a `LocalChain`, `IndexedTxGraph`, and a `Transaction`, when we insert some anchor
|
||||||
|
/// (possibly non-canonical) and/or a last-seen timestamp into the graph, we expect the
|
||||||
|
/// result of `get_chain_position` in these cases:
|
||||||
|
///
|
||||||
|
/// - tx with no anchors or last_seen has no `ChainPosition`
|
||||||
|
/// - tx with any last_seen will be `Unconfirmed`
|
||||||
|
/// - tx with an anchor in best chain will be `Confirmed`
|
||||||
|
/// - tx with an anchor not in best chain (no last_seen) has no `ChainPosition`
|
||||||
|
#[test]
|
||||||
|
fn test_get_chain_position() {
|
||||||
|
use bdk_chain::local_chain::CheckPoint;
|
||||||
|
use bdk_chain::spk_txout::SpkTxOutIndex;
|
||||||
|
use bdk_chain::BlockId;
|
||||||
|
|
||||||
|
struct TestCase<A> {
|
||||||
|
name: &'static str,
|
||||||
|
tx: Transaction,
|
||||||
|
anchor: Option<A>,
|
||||||
|
last_seen: Option<u64>,
|
||||||
|
exp_pos: Option<ChainPosition<A>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// addr: bcrt1qc6fweuf4xjvz4x3gx3t9e0fh4hvqyu2qw4wvxm
|
||||||
|
let spk = ScriptBuf::from_hex("0014c692ecf13534982a9a2834565cbd37add8027140").unwrap();
|
||||||
|
let mut graph = IndexedTxGraph::new({
|
||||||
|
let mut index = SpkTxOutIndex::default();
|
||||||
|
let _ = index.insert_spk(0u32, spk.clone());
|
||||||
|
index
|
||||||
|
});
|
||||||
|
|
||||||
|
// Anchors to test
|
||||||
|
let blocks = vec![block_id!(0, "g"), block_id!(1, "A"), block_id!(2, "B")];
|
||||||
|
|
||||||
|
let cp = CheckPoint::from_block_ids(blocks.clone()).unwrap();
|
||||||
|
let chain = LocalChain::from_tip(cp).unwrap();
|
||||||
|
|
||||||
|
// The test will insert a transaction into the indexed tx graph
|
||||||
|
// along with any anchors and timestamps, then check the value
|
||||||
|
// returned by `get_chain_position`.
|
||||||
|
fn run(
|
||||||
|
chain: &LocalChain,
|
||||||
|
graph: &mut IndexedTxGraph<BlockId, SpkTxOutIndex<u32>>,
|
||||||
|
test: TestCase<BlockId>,
|
||||||
|
) {
|
||||||
|
let TestCase {
|
||||||
|
name,
|
||||||
|
tx,
|
||||||
|
anchor,
|
||||||
|
last_seen,
|
||||||
|
exp_pos,
|
||||||
|
} = test;
|
||||||
|
|
||||||
|
// add data to graph
|
||||||
|
let txid = tx.compute_txid();
|
||||||
|
let _ = graph.insert_tx(tx);
|
||||||
|
if let Some(anchor) = anchor {
|
||||||
|
let _ = graph.insert_anchor(txid, anchor);
|
||||||
|
}
|
||||||
|
if let Some(seen_at) = last_seen {
|
||||||
|
let _ = graph.insert_seen_at(txid, seen_at);
|
||||||
|
}
|
||||||
|
|
||||||
|
// check chain position
|
||||||
|
let res = graph
|
||||||
|
.graph()
|
||||||
|
.get_chain_position(chain, chain.tip().block_id(), txid);
|
||||||
|
assert_eq!(
|
||||||
|
res.map(ChainPosition::cloned),
|
||||||
|
exp_pos,
|
||||||
|
"failed test case: {name}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
[
|
||||||
|
TestCase {
|
||||||
|
name: "tx no anchors or last_seen - no chain pos",
|
||||||
|
tx: Transaction {
|
||||||
|
output: vec![TxOut {
|
||||||
|
value: Amount::ONE_BTC,
|
||||||
|
script_pubkey: spk.clone(),
|
||||||
|
}],
|
||||||
|
..common::new_tx(0)
|
||||||
|
},
|
||||||
|
anchor: None,
|
||||||
|
last_seen: None,
|
||||||
|
exp_pos: None,
|
||||||
|
},
|
||||||
|
TestCase {
|
||||||
|
name: "tx last_seen - unconfirmed",
|
||||||
|
tx: Transaction {
|
||||||
|
output: vec![TxOut {
|
||||||
|
value: Amount::ONE_BTC,
|
||||||
|
script_pubkey: spk.clone(),
|
||||||
|
}],
|
||||||
|
..common::new_tx(1)
|
||||||
|
},
|
||||||
|
anchor: None,
|
||||||
|
last_seen: Some(2),
|
||||||
|
exp_pos: Some(ChainPosition::Unconfirmed(2)),
|
||||||
|
},
|
||||||
|
TestCase {
|
||||||
|
name: "tx anchor in best chain - confirmed",
|
||||||
|
tx: Transaction {
|
||||||
|
output: vec![TxOut {
|
||||||
|
value: Amount::ONE_BTC,
|
||||||
|
script_pubkey: spk.clone(),
|
||||||
|
}],
|
||||||
|
..common::new_tx(2)
|
||||||
|
},
|
||||||
|
anchor: Some(blocks[1]),
|
||||||
|
last_seen: None,
|
||||||
|
exp_pos: Some(ChainPosition::Confirmed(blocks[1])),
|
||||||
|
},
|
||||||
|
TestCase {
|
||||||
|
name: "tx unknown anchor with last_seen - unconfirmed",
|
||||||
|
tx: Transaction {
|
||||||
|
output: vec![TxOut {
|
||||||
|
value: Amount::ONE_BTC,
|
||||||
|
script_pubkey: spk.clone(),
|
||||||
|
}],
|
||||||
|
..common::new_tx(3)
|
||||||
|
},
|
||||||
|
anchor: Some(block_id!(2, "B'")),
|
||||||
|
last_seen: Some(2),
|
||||||
|
exp_pos: Some(ChainPosition::Unconfirmed(2)),
|
||||||
|
},
|
||||||
|
TestCase {
|
||||||
|
name: "tx unknown anchor - no chain pos",
|
||||||
|
tx: Transaction {
|
||||||
|
output: vec![TxOut {
|
||||||
|
value: Amount::ONE_BTC,
|
||||||
|
script_pubkey: spk.clone(),
|
||||||
|
}],
|
||||||
|
..common::new_tx(4)
|
||||||
|
},
|
||||||
|
anchor: Some(block_id!(2, "B'")),
|
||||||
|
last_seen: None,
|
||||||
|
exp_pos: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.for_each(|t| run(&chain, &mut graph, t));
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,9 +4,8 @@
|
|||||||
mod common;
|
mod common;
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
indexed_tx_graph::Indexer,
|
indexer::keychain_txout::{ChangeSet, KeychainTxOutIndex},
|
||||||
keychain::{self, ChangeSet, KeychainTxOutIndex},
|
DescriptorExt, DescriptorId, Indexer, Merge,
|
||||||
Append, DescriptorExt, DescriptorId,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use bitcoin::{secp256k1::Secp256k1, Amount, OutPoint, ScriptBuf, Transaction, TxOut};
|
use bitcoin::{secp256k1::Secp256k1, Amount, OutPoint, ScriptBuf, Transaction, TxOut};
|
||||||
@@ -31,11 +30,15 @@ fn init_txout_index(
|
|||||||
external_descriptor: Descriptor<DescriptorPublicKey>,
|
external_descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
internal_descriptor: Descriptor<DescriptorPublicKey>,
|
internal_descriptor: Descriptor<DescriptorPublicKey>,
|
||||||
lookahead: u32,
|
lookahead: u32,
|
||||||
) -> bdk_chain::keychain::KeychainTxOutIndex<TestKeychain> {
|
) -> KeychainTxOutIndex<TestKeychain> {
|
||||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<TestKeychain>::new(lookahead);
|
let mut txout_index = KeychainTxOutIndex::<TestKeychain>::new(lookahead);
|
||||||
|
|
||||||
let _ = txout_index.insert_descriptor(TestKeychain::External, external_descriptor);
|
let _ = txout_index
|
||||||
let _ = txout_index.insert_descriptor(TestKeychain::Internal, internal_descriptor);
|
.insert_descriptor(TestKeychain::External, external_descriptor)
|
||||||
|
.unwrap();
|
||||||
|
let _ = txout_index
|
||||||
|
.insert_descriptor(TestKeychain::Internal, internal_descriptor)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
txout_index
|
txout_index
|
||||||
}
|
}
|
||||||
@@ -48,13 +51,13 @@ fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> Scr
|
|||||||
}
|
}
|
||||||
|
|
||||||
// We create two empty changesets lhs and rhs, we then insert various descriptors with various
|
// We create two empty changesets lhs and rhs, we then insert various descriptors with various
|
||||||
// last_revealed, append rhs to lhs, and check that the result is consistent with these rules:
|
// last_revealed, merge rhs to lhs, and check that the result is consistent with these rules:
|
||||||
// - Existing index doesn't update if the new index in `other` is lower than `self`.
|
// - Existing index doesn't update if the new index in `other` is lower than `self`.
|
||||||
// - Existing index updates if the new index in `other` is higher than `self`.
|
// - Existing index updates if the new index in `other` is higher than `self`.
|
||||||
// - Existing index is unchanged if keychain doesn't exist in `other`.
|
// - Existing index is unchanged if keychain doesn't exist in `other`.
|
||||||
// - New keychain gets added if the keychain is in `other` but not in `self`.
|
// - New keychain gets added if the keychain is in `other` but not in `self`.
|
||||||
#[test]
|
#[test]
|
||||||
fn append_changesets_check_last_revealed() {
|
fn merge_changesets_check_last_revealed() {
|
||||||
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
let secp = bitcoin::secp256k1::Secp256k1::signing_only();
|
||||||
let descriptor_ids: Vec<_> = DESCRIPTORS
|
let descriptor_ids: Vec<_> = DESCRIPTORS
|
||||||
.iter()
|
.iter()
|
||||||
@@ -78,14 +81,12 @@ fn append_changesets_check_last_revealed() {
|
|||||||
lhs_di.insert(descriptor_ids[3], 4); // key doesn't exist in lhs
|
lhs_di.insert(descriptor_ids[3], 4); // key doesn't exist in lhs
|
||||||
|
|
||||||
let mut lhs = ChangeSet {
|
let mut lhs = ChangeSet {
|
||||||
keychains_added: BTreeMap::<(), _>::new(),
|
|
||||||
last_revealed: lhs_di,
|
last_revealed: lhs_di,
|
||||||
};
|
};
|
||||||
let rhs = ChangeSet {
|
let rhs = ChangeSet {
|
||||||
keychains_added: BTreeMap::<(), _>::new(),
|
|
||||||
last_revealed: rhs_di,
|
last_revealed: rhs_di,
|
||||||
};
|
};
|
||||||
lhs.append(rhs);
|
lhs.merge(rhs);
|
||||||
|
|
||||||
// Existing index doesn't update if the new index in `other` is lower than `self`.
|
// Existing index doesn't update if the new index in `other` is lower than `self`.
|
||||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[0]), Some(&7));
|
assert_eq!(lhs.last_revealed.get(&descriptor_ids[0]), Some(&7));
|
||||||
@@ -97,53 +98,8 @@ fn append_changesets_check_last_revealed() {
|
|||||||
assert_eq!(lhs.last_revealed.get(&descriptor_ids[3]), Some(&4));
|
assert_eq!(lhs.last_revealed.get(&descriptor_ids[3]), Some(&4));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_apply_changeset_with_different_descriptors_to_same_keychain() {
|
|
||||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
|
||||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
|
||||||
let mut txout_index =
|
|
||||||
init_txout_index(external_descriptor.clone(), internal_descriptor.clone(), 0);
|
|
||||||
assert_eq!(
|
|
||||||
txout_index.keychains().collect::<Vec<_>>(),
|
|
||||||
vec![
|
|
||||||
(&TestKeychain::External, &external_descriptor),
|
|
||||||
(&TestKeychain::Internal, &internal_descriptor)
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
let changeset = ChangeSet {
|
|
||||||
keychains_added: [(TestKeychain::External, internal_descriptor.clone())].into(),
|
|
||||||
last_revealed: [].into(),
|
|
||||||
};
|
|
||||||
txout_index.apply_changeset(changeset);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
txout_index.keychains().collect::<Vec<_>>(),
|
|
||||||
vec![
|
|
||||||
(&TestKeychain::External, &internal_descriptor),
|
|
||||||
(&TestKeychain::Internal, &internal_descriptor)
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
let changeset = ChangeSet {
|
|
||||||
keychains_added: [(TestKeychain::Internal, external_descriptor.clone())].into(),
|
|
||||||
last_revealed: [].into(),
|
|
||||||
};
|
|
||||||
txout_index.apply_changeset(changeset);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
txout_index.keychains().collect::<Vec<_>>(),
|
|
||||||
vec![
|
|
||||||
(&TestKeychain::External, &internal_descriptor),
|
|
||||||
(&TestKeychain::Internal, &external_descriptor)
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_set_all_derivation_indices() {
|
fn test_set_all_derivation_indices() {
|
||||||
use bdk_chain::indexed_tx_graph::Indexer;
|
|
||||||
|
|
||||||
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
let external_descriptor = parse_descriptor(DESCRIPTORS[0]);
|
||||||
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
let internal_descriptor = parse_descriptor(DESCRIPTORS[1]);
|
||||||
let mut txout_index =
|
let mut txout_index =
|
||||||
@@ -156,16 +112,15 @@ fn test_set_all_derivation_indices() {
|
|||||||
]
|
]
|
||||||
.into();
|
.into();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
txout_index.reveal_to_target_multi(&derive_to),
|
||||||
ChangeSet {
|
ChangeSet {
|
||||||
keychains_added: BTreeMap::new(),
|
|
||||||
last_revealed: last_revealed.clone()
|
last_revealed: last_revealed.clone()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assert_eq!(txout_index.last_revealed_indices(), derive_to);
|
assert_eq!(txout_index.last_revealed_indices(), derive_to);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.reveal_to_target_multi(&derive_to).1,
|
txout_index.reveal_to_target_multi(&derive_to),
|
||||||
keychain::ChangeSet::default(),
|
ChangeSet::default(),
|
||||||
"no changes if we set to the same thing"
|
"no changes if we set to the same thing"
|
||||||
);
|
);
|
||||||
assert_eq!(txout_index.initial_changeset().last_revealed, last_revealed);
|
assert_eq!(txout_index.initial_changeset().last_revealed, last_revealed);
|
||||||
@@ -187,10 +142,10 @@ fn test_lookahead() {
|
|||||||
// - stored scripts of external keychain should be of expected counts
|
// - stored scripts of external keychain should be of expected counts
|
||||||
for index in (0..20).skip_while(|i| i % 2 == 1) {
|
for index in (0..20).skip_while(|i| i % 2 == 1) {
|
||||||
let (revealed_spks, revealed_changeset) = txout_index
|
let (revealed_spks, revealed_changeset) = txout_index
|
||||||
.reveal_to_target(&TestKeychain::External, index)
|
.reveal_to_target(TestKeychain::External, index)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
revealed_spks.collect::<Vec<_>>(),
|
revealed_spks,
|
||||||
vec![(index, spk_at_index(&external_descriptor, index))],
|
vec![(index, spk_at_index(&external_descriptor, index))],
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -206,25 +161,25 @@ fn test_lookahead() {
|
|||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index
|
txout_index
|
||||||
.revealed_keychain_spks(&TestKeychain::External)
|
.revealed_keychain_spks(TestKeychain::External)
|
||||||
.count(),
|
.count(),
|
||||||
index as usize + 1,
|
index as usize + 1,
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index
|
txout_index
|
||||||
.revealed_keychain_spks(&TestKeychain::Internal)
|
.revealed_keychain_spks(TestKeychain::Internal)
|
||||||
.count(),
|
.count(),
|
||||||
0,
|
0,
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index
|
txout_index
|
||||||
.unused_keychain_spks(&TestKeychain::External)
|
.unused_keychain_spks(TestKeychain::External)
|
||||||
.count(),
|
.count(),
|
||||||
index as usize + 1,
|
index as usize + 1,
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index
|
txout_index
|
||||||
.unused_keychain_spks(&TestKeychain::Internal)
|
.unused_keychain_spks(TestKeychain::Internal)
|
||||||
.count(),
|
.count(),
|
||||||
0,
|
0,
|
||||||
);
|
);
|
||||||
@@ -238,10 +193,10 @@ fn test_lookahead() {
|
|||||||
// expect:
|
// expect:
|
||||||
// - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped
|
// - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped
|
||||||
let (revealed_spks, revealed_changeset) = txout_index
|
let (revealed_spks, revealed_changeset) = txout_index
|
||||||
.reveal_to_target(&TestKeychain::Internal, 24)
|
.reveal_to_target(TestKeychain::Internal, 24)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
revealed_spks.collect::<Vec<_>>(),
|
revealed_spks,
|
||||||
(0..=24)
|
(0..=24)
|
||||||
.map(|index| (index, spk_at_index(&internal_descriptor, index)))
|
.map(|index| (index, spk_at_index(&internal_descriptor, index)))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
@@ -259,17 +214,17 @@ fn test_lookahead() {
|
|||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index
|
txout_index
|
||||||
.revealed_keychain_spks(&TestKeychain::Internal)
|
.revealed_keychain_spks(TestKeychain::Internal)
|
||||||
.count(),
|
.count(),
|
||||||
25,
|
25,
|
||||||
);
|
);
|
||||||
|
|
||||||
// ensure derivation indices are expected for each keychain
|
// ensure derivation indices are expected for each keychain
|
||||||
let last_external_index = txout_index
|
let last_external_index = txout_index
|
||||||
.last_revealed_index(&TestKeychain::External)
|
.last_revealed_index(TestKeychain::External)
|
||||||
.expect("already derived");
|
.expect("already derived");
|
||||||
let last_internal_index = txout_index
|
let last_internal_index = txout_index
|
||||||
.last_revealed_index(&TestKeychain::Internal)
|
.last_revealed_index(TestKeychain::Internal)
|
||||||
.expect("already derived");
|
.expect("already derived");
|
||||||
assert_eq!(last_external_index, 19);
|
assert_eq!(last_external_index, 19);
|
||||||
assert_eq!(last_internal_index, 24);
|
assert_eq!(last_internal_index, 24);
|
||||||
@@ -300,24 +255,24 @@ fn test_lookahead() {
|
|||||||
],
|
],
|
||||||
..common::new_tx(external_index)
|
..common::new_tx(external_index)
|
||||||
};
|
};
|
||||||
assert_eq!(txout_index.index_tx(&tx), keychain::ChangeSet::default());
|
assert_eq!(txout_index.index_tx(&tx), ChangeSet::default());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.last_revealed_index(&TestKeychain::External),
|
txout_index.last_revealed_index(TestKeychain::External),
|
||||||
Some(last_external_index)
|
Some(last_external_index)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.last_revealed_index(&TestKeychain::Internal),
|
txout_index.last_revealed_index(TestKeychain::Internal),
|
||||||
Some(last_internal_index)
|
Some(last_internal_index)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index
|
txout_index
|
||||||
.revealed_keychain_spks(&TestKeychain::External)
|
.revealed_keychain_spks(TestKeychain::External)
|
||||||
.count(),
|
.count(),
|
||||||
last_external_index as usize + 1,
|
last_external_index as usize + 1,
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index
|
txout_index
|
||||||
.revealed_keychain_spks(&TestKeychain::Internal)
|
.revealed_keychain_spks(TestKeychain::Internal)
|
||||||
.count(),
|
.count(),
|
||||||
last_internal_index as usize + 1,
|
last_internal_index as usize + 1,
|
||||||
);
|
);
|
||||||
@@ -362,11 +317,11 @@ fn test_scan_with_lookahead() {
|
|||||||
&[(external_descriptor.descriptor_id(), spk_i)].into()
|
&[(external_descriptor.descriptor_id(), spk_i)].into()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.last_revealed_index(&TestKeychain::External),
|
txout_index.last_revealed_index(TestKeychain::External),
|
||||||
Some(spk_i)
|
Some(spk_i)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.last_used_index(&TestKeychain::External),
|
txout_index.last_used_index(TestKeychain::External),
|
||||||
Some(spk_i)
|
Some(spk_i)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -402,12 +357,12 @@ fn test_wildcard_derivations() {
|
|||||||
// - next_derivation_index() == (0, true)
|
// - next_derivation_index() == (0, true)
|
||||||
// - derive_new() == ((0, <spk>), keychain::ChangeSet)
|
// - derive_new() == ((0, <spk>), keychain::ChangeSet)
|
||||||
// - next_unused() == ((0, <spk>), keychain::ChangeSet:is_empty())
|
// - next_unused() == ((0, <spk>), keychain::ChangeSet:is_empty())
|
||||||
assert_eq!(txout_index.next_index(&TestKeychain::External).unwrap(), (0, true));
|
assert_eq!(txout_index.next_index(TestKeychain::External).unwrap(), (0, true));
|
||||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External).unwrap();
|
let (spk, changeset) = txout_index.reveal_next_spk(TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
assert_eq!(spk, (0_u32, external_spk_0.clone()));
|
||||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 0)].into());
|
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 0)].into());
|
||||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
let (spk, changeset) = txout_index.next_unused_spk(TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (0_u32, external_spk_0.as_script()));
|
assert_eq!(spk, (0_u32, external_spk_0.clone()));
|
||||||
assert_eq!(&changeset.last_revealed, &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
|
|
||||||
// - derived till 25
|
// - derived till 25
|
||||||
@@ -418,21 +373,21 @@ fn test_wildcard_derivations() {
|
|||||||
// - next_derivation_index() = (26, true)
|
// - next_derivation_index() = (26, true)
|
||||||
// - derive_new() = ((26, <spk>), keychain::ChangeSet)
|
// - derive_new() = ((26, <spk>), keychain::ChangeSet)
|
||||||
// - next_unused() == ((16, <spk>), keychain::ChangeSet::is_empty())
|
// - next_unused() == ((16, <spk>), keychain::ChangeSet::is_empty())
|
||||||
let _ = txout_index.reveal_to_target(&TestKeychain::External, 25);
|
let _ = txout_index.reveal_to_target(TestKeychain::External, 25);
|
||||||
|
|
||||||
(0..=15)
|
(0..=15)
|
||||||
.chain([17, 20, 23])
|
.chain([17, 20, 23])
|
||||||
.for_each(|index| assert!(txout_index.mark_used(TestKeychain::External, index)));
|
.for_each(|index| assert!(txout_index.mark_used(TestKeychain::External, index)));
|
||||||
|
|
||||||
assert_eq!(txout_index.next_index(&TestKeychain::External).unwrap(), (26, true));
|
assert_eq!(txout_index.next_index(TestKeychain::External).unwrap(), (26, true));
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External).unwrap();
|
let (spk, changeset) = txout_index.reveal_next_spk(TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (26, external_spk_26.as_script()));
|
assert_eq!(spk, (26, external_spk_26));
|
||||||
|
|
||||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 26)].into());
|
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 26)].into());
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
let (spk, changeset) = txout_index.next_unused_spk(TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (16, external_spk_16.as_script()));
|
assert_eq!(spk, (16, external_spk_16));
|
||||||
assert_eq!(&changeset.last_revealed, &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
|
|
||||||
// - Use all the derived till 26.
|
// - Use all the derived till 26.
|
||||||
@@ -441,8 +396,8 @@ fn test_wildcard_derivations() {
|
|||||||
txout_index.mark_used(TestKeychain::External, index);
|
txout_index.mark_used(TestKeychain::External, index);
|
||||||
});
|
});
|
||||||
|
|
||||||
let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External).unwrap();
|
let (spk, changeset) = txout_index.next_unused_spk(TestKeychain::External).unwrap();
|
||||||
assert_eq!(spk, (27, external_spk_27.as_script()));
|
assert_eq!(spk, (27, external_spk_27));
|
||||||
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 27)].into());
|
assert_eq!(&changeset.last_revealed, &[(external_descriptor.descriptor_id(), 27)].into());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -458,7 +413,9 @@ fn test_non_wildcard_derivations() {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.script_pubkey();
|
.script_pubkey();
|
||||||
|
|
||||||
let _ = txout_index.insert_descriptor(TestKeychain::External, no_wildcard_descriptor.clone());
|
let _ = txout_index
|
||||||
|
.insert_descriptor(TestKeychain::External, no_wildcard_descriptor.clone())
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// given:
|
// given:
|
||||||
// - `txout_index` with no stored scripts
|
// - `txout_index` with no stored scripts
|
||||||
@@ -467,22 +424,18 @@ fn test_non_wildcard_derivations() {
|
|||||||
// - when we derive a new script, script @ index 0
|
// - when we derive a new script, script @ index 0
|
||||||
// - when we get the next unused script, script @ index 0
|
// - when we get the next unused script, script @ index 0
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.next_index(&TestKeychain::External).unwrap(),
|
txout_index.next_index(TestKeychain::External).unwrap(),
|
||||||
(0, true)
|
(0, true)
|
||||||
);
|
);
|
||||||
let (spk, changeset) = txout_index
|
let (spk, changeset) = txout_index.reveal_next_spk(TestKeychain::External).unwrap();
|
||||||
.reveal_next_spk(&TestKeychain::External)
|
assert_eq!(spk, (0, external_spk.clone()));
|
||||||
.unwrap();
|
|
||||||
assert_eq!(spk, (0, external_spk.as_script()));
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&changeset.last_revealed,
|
&changeset.last_revealed,
|
||||||
&[(no_wildcard_descriptor.descriptor_id(), 0)].into()
|
&[(no_wildcard_descriptor.descriptor_id(), 0)].into()
|
||||||
);
|
);
|
||||||
|
|
||||||
let (spk, changeset) = txout_index
|
let (spk, changeset) = txout_index.next_unused_spk(TestKeychain::External).unwrap();
|
||||||
.next_unused_spk(&TestKeychain::External)
|
assert_eq!(spk, (0, external_spk.clone()));
|
||||||
.unwrap();
|
|
||||||
assert_eq!(spk, (0, external_spk.as_script()));
|
|
||||||
assert_eq!(&changeset.last_revealed, &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
|
|
||||||
// given:
|
// given:
|
||||||
@@ -492,32 +445,28 @@ fn test_non_wildcard_derivations() {
|
|||||||
// - derive new and next unused should return the old script
|
// - derive new and next unused should return the old script
|
||||||
// - store_up_to should not panic and return empty changeset
|
// - store_up_to should not panic and return empty changeset
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index.next_index(&TestKeychain::External).unwrap(),
|
txout_index.next_index(TestKeychain::External).unwrap(),
|
||||||
(0, false)
|
(0, false)
|
||||||
);
|
);
|
||||||
txout_index.mark_used(TestKeychain::External, 0);
|
txout_index.mark_used(TestKeychain::External, 0);
|
||||||
|
|
||||||
let (spk, changeset) = txout_index
|
let (spk, changeset) = txout_index.reveal_next_spk(TestKeychain::External).unwrap();
|
||||||
.reveal_next_spk(&TestKeychain::External)
|
assert_eq!(spk, (0, external_spk.clone()));
|
||||||
.unwrap();
|
|
||||||
assert_eq!(spk, (0, external_spk.as_script()));
|
|
||||||
assert_eq!(&changeset.last_revealed, &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
|
|
||||||
let (spk, changeset) = txout_index
|
let (spk, changeset) = txout_index.next_unused_spk(TestKeychain::External).unwrap();
|
||||||
.next_unused_spk(&TestKeychain::External)
|
assert_eq!(spk, (0, external_spk.clone()));
|
||||||
.unwrap();
|
|
||||||
assert_eq!(spk, (0, external_spk.as_script()));
|
|
||||||
assert_eq!(&changeset.last_revealed, &[].into());
|
assert_eq!(&changeset.last_revealed, &[].into());
|
||||||
let (revealed_spks, revealed_changeset) = txout_index
|
let (revealed_spks, revealed_changeset) = txout_index
|
||||||
.reveal_to_target(&TestKeychain::External, 200)
|
.reveal_to_target(TestKeychain::External, 200)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(revealed_spks.count(), 0);
|
assert_eq!(revealed_spks.len(), 0);
|
||||||
assert!(revealed_changeset.is_empty());
|
assert!(revealed_changeset.is_empty());
|
||||||
|
|
||||||
// we check that spks_of_keychain returns a SpkIterator with just one element
|
// we check that spks_of_keychain returns a SpkIterator with just one element
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txout_index
|
txout_index
|
||||||
.revealed_keychain_spks(&TestKeychain::External)
|
.revealed_keychain_spks(TestKeychain::External)
|
||||||
.count(),
|
.count(),
|
||||||
1,
|
1,
|
||||||
);
|
);
|
||||||
@@ -583,27 +532,25 @@ fn lookahead_to_target() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if let Some(last_revealed) = t.external_last_revealed {
|
if let Some(last_revealed) = t.external_last_revealed {
|
||||||
let _ = index.reveal_to_target(&TestKeychain::External, last_revealed);
|
let _ = index.reveal_to_target(TestKeychain::External, last_revealed);
|
||||||
}
|
}
|
||||||
if let Some(last_revealed) = t.internal_last_revealed {
|
if let Some(last_revealed) = t.internal_last_revealed {
|
||||||
let _ = index.reveal_to_target(&TestKeychain::Internal, last_revealed);
|
let _ = index.reveal_to_target(TestKeychain::Internal, last_revealed);
|
||||||
}
|
}
|
||||||
|
|
||||||
let keychain_test_cases = [
|
let keychain_test_cases = [
|
||||||
(
|
(
|
||||||
external_descriptor.descriptor_id(),
|
|
||||||
TestKeychain::External,
|
TestKeychain::External,
|
||||||
t.external_last_revealed,
|
t.external_last_revealed,
|
||||||
t.external_target,
|
t.external_target,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
internal_descriptor.descriptor_id(),
|
|
||||||
TestKeychain::Internal,
|
TestKeychain::Internal,
|
||||||
t.internal_last_revealed,
|
t.internal_last_revealed,
|
||||||
t.internal_target,
|
t.internal_target,
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
for (descriptor_id, keychain, last_revealed, target) in keychain_test_cases {
|
for (keychain, last_revealed, target) in keychain_test_cases {
|
||||||
if let Some(target) = target {
|
if let Some(target) = target {
|
||||||
let original_last_stored_index = match last_revealed {
|
let original_last_stored_index = match last_revealed {
|
||||||
Some(last_revealed) => Some(last_revealed + t.lookahead),
|
Some(last_revealed) => Some(last_revealed + t.lookahead),
|
||||||
@@ -615,14 +562,14 @@ fn lookahead_to_target() {
|
|||||||
}
|
}
|
||||||
None => target,
|
None => target,
|
||||||
};
|
};
|
||||||
index.lookahead_to_target(&keychain, target);
|
index.lookahead_to_target(keychain.clone(), target);
|
||||||
let keys = index
|
let keys = index
|
||||||
.inner()
|
.inner()
|
||||||
.all_spks()
|
.all_spks()
|
||||||
.range((descriptor_id, 0)..=(descriptor_id, u32::MAX))
|
.range((keychain.clone(), 0)..=(keychain.clone(), u32::MAX))
|
||||||
.map(|(k, _)| *k)
|
.map(|(k, _)| k.clone())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let exp_keys = core::iter::repeat(descriptor_id)
|
let exp_keys = core::iter::repeat(keychain)
|
||||||
.zip(0_u32..=exp_last_stored_index)
|
.zip(0_u32..=exp_last_stored_index)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
assert_eq!(keys, exp_keys);
|
assert_eq!(keys, exp_keys);
|
||||||
@@ -631,95 +578,35 @@ fn lookahead_to_target() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `::index_txout` should still index txouts with spks derived from descriptors without keychains.
|
|
||||||
/// This includes properly refilling the lookahead for said descriptors.
|
|
||||||
#[test]
|
|
||||||
fn index_txout_after_changing_descriptor_under_keychain() {
|
|
||||||
let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
|
|
||||||
let (desc_a, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0])
|
|
||||||
.expect("descriptor 0 must be valid");
|
|
||||||
let (desc_b, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[1])
|
|
||||||
.expect("descriptor 1 must be valid");
|
|
||||||
let desc_id_a = desc_a.descriptor_id();
|
|
||||||
|
|
||||||
let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<()>::new(10);
|
|
||||||
|
|
||||||
// Introduce `desc_a` under keychain `()` and replace the descriptor.
|
|
||||||
let _ = txout_index.insert_descriptor((), desc_a.clone());
|
|
||||||
let _ = txout_index.insert_descriptor((), desc_b.clone());
|
|
||||||
|
|
||||||
// Loop through spks in intervals of `lookahead` to create outputs with. We should always be
|
|
||||||
// able to index these outputs if `lookahead` is respected.
|
|
||||||
let spk_indices = [9, 19, 29, 39];
|
|
||||||
for i in spk_indices {
|
|
||||||
let spk_at_index = desc_a
|
|
||||||
.at_derivation_index(i)
|
|
||||||
.expect("must derive")
|
|
||||||
.script_pubkey();
|
|
||||||
let index_changeset = txout_index.index_txout(
|
|
||||||
// Use spk derivation index as vout as we just want an unique outpoint.
|
|
||||||
OutPoint::new(h!("mock_tx"), i as _),
|
|
||||||
&TxOut {
|
|
||||||
value: Amount::from_sat(10_000),
|
|
||||||
script_pubkey: spk_at_index,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
index_changeset,
|
|
||||||
bdk_chain::keychain::ChangeSet {
|
|
||||||
keychains_added: BTreeMap::default(),
|
|
||||||
last_revealed: [(desc_id_a, i)].into(),
|
|
||||||
},
|
|
||||||
"must always increase last active if impl respects lookahead"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn insert_descriptor_no_change() {
|
|
||||||
let secp = Secp256k1::signing_only();
|
|
||||||
let (desc, _) =
|
|
||||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, DESCRIPTORS[0]).unwrap();
|
|
||||||
let mut txout_index = KeychainTxOutIndex::<()>::default();
|
|
||||||
assert_eq!(
|
|
||||||
txout_index.insert_descriptor((), desc.clone()),
|
|
||||||
keychain::ChangeSet {
|
|
||||||
keychains_added: [((), desc.clone())].into(),
|
|
||||||
last_revealed: Default::default()
|
|
||||||
},
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
txout_index.insert_descriptor((), desc.clone()),
|
|
||||||
keychain::ChangeSet::default(),
|
|
||||||
"inserting the same descriptor for keychain should return an empty changeset",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn applying_changesets_one_by_one_vs_aggregate_must_have_same_result() {
|
fn applying_changesets_one_by_one_vs_aggregate_must_have_same_result() {
|
||||||
let desc = parse_descriptor(DESCRIPTORS[0]);
|
let desc = parse_descriptor(DESCRIPTORS[0]);
|
||||||
let changesets: &[ChangeSet<TestKeychain>] = &[
|
let changesets: &[ChangeSet] = &[
|
||||||
ChangeSet {
|
ChangeSet {
|
||||||
keychains_added: [(TestKeychain::Internal, desc.clone())].into(),
|
last_revealed: [(desc.descriptor_id(), 10)].into(),
|
||||||
last_revealed: [].into(),
|
|
||||||
},
|
},
|
||||||
ChangeSet {
|
ChangeSet {
|
||||||
keychains_added: [(TestKeychain::External, desc.clone())].into(),
|
|
||||||
last_revealed: [(desc.descriptor_id(), 12)].into(),
|
last_revealed: [(desc.descriptor_id(), 12)].into(),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
let mut indexer_a = KeychainTxOutIndex::<TestKeychain>::new(0);
|
let mut indexer_a = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
|
indexer_a
|
||||||
|
.insert_descriptor(TestKeychain::External, desc.clone())
|
||||||
|
.expect("must insert keychain");
|
||||||
for changeset in changesets {
|
for changeset in changesets {
|
||||||
indexer_a.apply_changeset(changeset.clone());
|
indexer_a.apply_changeset(changeset.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut indexer_b = KeychainTxOutIndex::<TestKeychain>::new(0);
|
let mut indexer_b = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
|
indexer_b
|
||||||
|
.insert_descriptor(TestKeychain::External, desc.clone())
|
||||||
|
.expect("must insert keychain");
|
||||||
let aggregate_changesets = changesets
|
let aggregate_changesets = changesets
|
||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.reduce(|mut agg, cs| {
|
.reduce(|mut agg, cs| {
|
||||||
agg.append(cs);
|
agg.merge(cs);
|
||||||
agg
|
agg
|
||||||
})
|
})
|
||||||
.expect("must aggregate changesets");
|
.expect("must aggregate changesets");
|
||||||
@@ -743,37 +630,60 @@ fn applying_changesets_one_by_one_vs_aggregate_must_have_same_result() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// When the same descriptor is associated with various keychains,
|
|
||||||
// index methods only return the highest keychain by Ord
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_only_highest_ord_keychain_is_returned() {
|
fn assigning_same_descriptor_to_multiple_keychains_should_error() {
|
||||||
let desc = parse_descriptor(DESCRIPTORS[0]);
|
let desc = parse_descriptor(DESCRIPTORS[0]);
|
||||||
|
|
||||||
let mut indexer = KeychainTxOutIndex::<TestKeychain>::new(0);
|
let mut indexer = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
let _ = indexer.insert_descriptor(TestKeychain::Internal, desc.clone());
|
let _ = indexer
|
||||||
let _ = indexer.insert_descriptor(TestKeychain::External, desc);
|
.insert_descriptor(TestKeychain::Internal, desc.clone())
|
||||||
|
.unwrap();
|
||||||
|
assert!(indexer
|
||||||
|
.insert_descriptor(TestKeychain::External, desc)
|
||||||
|
.is_err())
|
||||||
|
}
|
||||||
|
|
||||||
// reveal_next_spk will work with either keychain
|
#[test]
|
||||||
let spk0: ScriptBuf = indexer
|
fn reassigning_keychain_to_a_new_descriptor_should_error() {
|
||||||
.reveal_next_spk(&TestKeychain::External)
|
let desc1 = parse_descriptor(DESCRIPTORS[0]);
|
||||||
.unwrap()
|
let desc2 = parse_descriptor(DESCRIPTORS[1]);
|
||||||
.0
|
let mut indexer = KeychainTxOutIndex::<TestKeychain>::new(0);
|
||||||
.1
|
let _ = indexer.insert_descriptor(TestKeychain::Internal, desc1);
|
||||||
.into();
|
assert!(indexer
|
||||||
let spk1: ScriptBuf = indexer
|
.insert_descriptor(TestKeychain::Internal, desc2)
|
||||||
.reveal_next_spk(&TestKeychain::Internal)
|
.is_err());
|
||||||
.unwrap()
|
}
|
||||||
.0
|
|
||||||
.1
|
|
||||||
.into();
|
|
||||||
|
|
||||||
// index_of_spk will always return External
|
#[test]
|
||||||
|
fn when_querying_over_a_range_of_keychains_the_utxos_should_show_up() {
|
||||||
|
let mut indexer = KeychainTxOutIndex::<usize>::new(0);
|
||||||
|
let mut tx = common::new_tx(0);
|
||||||
|
|
||||||
|
for (i, descriptor) in DESCRIPTORS.iter().enumerate() {
|
||||||
|
let descriptor = parse_descriptor(descriptor);
|
||||||
|
let _ = indexer.insert_descriptor(i, descriptor.clone()).unwrap();
|
||||||
|
if i != 4 {
|
||||||
|
// skip one in the middle to see if uncovers any bugs
|
||||||
|
indexer.reveal_next_spk(i);
|
||||||
|
}
|
||||||
|
tx.output.push(TxOut {
|
||||||
|
script_pubkey: descriptor.at_derivation_index(0).unwrap().script_pubkey(),
|
||||||
|
value: Amount::from_sat(10_000),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let n_spks = DESCRIPTORS.len() - /*we skipped one*/ 1;
|
||||||
|
|
||||||
|
let _ = indexer.index_tx(&tx);
|
||||||
|
assert_eq!(indexer.outpoints().len(), n_spks);
|
||||||
|
|
||||||
|
assert_eq!(indexer.revealed_spks(0..DESCRIPTORS.len()).count(), n_spks);
|
||||||
|
assert_eq!(indexer.revealed_spks(1..4).count(), 4 - 1);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
indexer.index_of_spk(&spk0),
|
indexer.net_value(&tx, 0..DESCRIPTORS.len()).to_sat(),
|
||||||
Some((TestKeychain::External, 0))
|
(10_000 * n_spks) as i64
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
indexer.index_of_spk(&spk1),
|
indexer.net_value(&tx, 3..6).to_sat(),
|
||||||
Some((TestKeychain::External, 1))
|
(10_000 * (6 - 3 - /*the skipped one*/ 1)) as i64
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use bdk_chain::{indexed_tx_graph::Indexer, SpkTxOutIndex};
|
use bdk_chain::{spk_txout::SpkTxOutIndex, Indexer};
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
absolute, transaction, Amount, OutPoint, ScriptBuf, SignedAmount, Transaction, TxIn, TxOut,
|
absolute, transaction, Amount, OutPoint, ScriptBuf, SignedAmount, Transaction, TxIn, TxOut,
|
||||||
};
|
};
|
||||||
@@ -47,7 +47,7 @@ fn spk_txout_sent_and_received() {
|
|||||||
lock_time: absolute::LockTime::ZERO,
|
lock_time: absolute::LockTime::ZERO,
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint {
|
previous_output: OutPoint {
|
||||||
txid: tx1.txid(),
|
txid: tx1.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
},
|
},
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
|||||||
@@ -7,11 +7,11 @@ use bdk_chain::{
|
|||||||
collections::*,
|
collections::*,
|
||||||
local_chain::LocalChain,
|
local_chain::LocalChain,
|
||||||
tx_graph::{ChangeSet, TxGraph},
|
tx_graph::{ChangeSet, TxGraph},
|
||||||
Anchor, Append, BlockId, ChainOracle, ChainPosition, ConfirmationHeightAnchor,
|
Anchor, BlockId, ChainOracle, ChainPosition, ConfirmationBlockTime, Merge,
|
||||||
};
|
};
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
absolute, hashes::Hash, transaction, Amount, BlockHash, OutPoint, ScriptBuf, Transaction, TxIn,
|
absolute, hashes::Hash, transaction, Amount, BlockHash, OutPoint, ScriptBuf, SignedAmount,
|
||||||
TxOut, Txid,
|
Transaction, TxIn, TxOut, Txid,
|
||||||
};
|
};
|
||||||
use common::*;
|
use common::*;
|
||||||
use core::iter;
|
use core::iter;
|
||||||
@@ -130,11 +130,11 @@ fn insert_txouts() {
|
|||||||
|
|
||||||
// Mark it as confirmed.
|
// Mark it as confirmed.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph.insert_anchor(update_txs.txid(), conf_anchor),
|
graph.insert_anchor(update_txs.compute_txid(), conf_anchor),
|
||||||
ChangeSet {
|
ChangeSet {
|
||||||
txs: [].into(),
|
txs: [].into(),
|
||||||
txouts: [].into(),
|
txouts: [].into(),
|
||||||
anchors: [(conf_anchor, update_txs.txid())].into(),
|
anchors: [(conf_anchor, update_txs.compute_txid())].into(),
|
||||||
last_seen: [].into()
|
last_seen: [].into()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -149,7 +149,11 @@ fn insert_txouts() {
|
|||||||
ChangeSet {
|
ChangeSet {
|
||||||
txs: [Arc::new(update_txs.clone())].into(),
|
txs: [Arc::new(update_txs.clone())].into(),
|
||||||
txouts: update_ops.clone().into(),
|
txouts: update_ops.clone().into(),
|
||||||
anchors: [(conf_anchor, update_txs.txid()), (unconf_anchor, h!("tx2"))].into(),
|
anchors: [
|
||||||
|
(conf_anchor, update_txs.compute_txid()),
|
||||||
|
(unconf_anchor, h!("tx2"))
|
||||||
|
]
|
||||||
|
.into(),
|
||||||
last_seen: [(h!("tx2"), 1000000)].into()
|
last_seen: [(h!("tx2"), 1000000)].into()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -183,7 +187,9 @@ fn insert_txouts() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph.tx_outputs(update_txs.txid()).expect("should exists"),
|
graph
|
||||||
|
.tx_outputs(update_txs.compute_txid())
|
||||||
|
.expect("should exists"),
|
||||||
[(
|
[(
|
||||||
0u32,
|
0u32,
|
||||||
&TxOut {
|
&TxOut {
|
||||||
@@ -200,7 +206,11 @@ fn insert_txouts() {
|
|||||||
ChangeSet {
|
ChangeSet {
|
||||||
txs: [Arc::new(update_txs.clone())].into(),
|
txs: [Arc::new(update_txs.clone())].into(),
|
||||||
txouts: update_ops.into_iter().chain(original_ops).collect(),
|
txouts: update_ops.into_iter().chain(original_ops).collect(),
|
||||||
anchors: [(conf_anchor, update_txs.txid()), (unconf_anchor, h!("tx2"))].into(),
|
anchors: [
|
||||||
|
(conf_anchor, update_txs.compute_txid()),
|
||||||
|
(unconf_anchor, h!("tx2"))
|
||||||
|
]
|
||||||
|
.into(),
|
||||||
last_seen: [(h!("tx2"), 1000000)].into()
|
last_seen: [(h!("tx2"), 1000000)].into()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -235,7 +245,7 @@ fn insert_tx_graph_keeps_track_of_spend() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let op = OutPoint {
|
let op = OutPoint {
|
||||||
txid: tx1.txid(),
|
txid: tx1.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -261,7 +271,7 @@ fn insert_tx_graph_keeps_track_of_spend() {
|
|||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph1.outspends(op),
|
graph1.outspends(op),
|
||||||
&iter::once(tx2.txid()).collect::<HashSet<_>>()
|
&iter::once(tx2.compute_txid()).collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(graph2.outspends(op), graph1.outspends(op));
|
assert_eq!(graph2.outspends(op), graph1.outspends(op));
|
||||||
}
|
}
|
||||||
@@ -281,7 +291,9 @@ fn insert_tx_can_retrieve_full_tx_from_graph() {
|
|||||||
let mut graph = TxGraph::<()>::default();
|
let mut graph = TxGraph::<()>::default();
|
||||||
let _ = graph.insert_tx(tx.clone());
|
let _ = graph.insert_tx(tx.clone());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph.get_tx(tx.txid()).map(|tx| tx.as_ref().clone()),
|
graph
|
||||||
|
.get_tx(tx.compute_txid())
|
||||||
|
.map(|tx| tx.as_ref().clone()),
|
||||||
Some(tx)
|
Some(tx)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -301,7 +313,7 @@ fn insert_tx_displaces_txouts() {
|
|||||||
|
|
||||||
let changeset = tx_graph.insert_txout(
|
let changeset = tx_graph.insert_txout(
|
||||||
OutPoint {
|
OutPoint {
|
||||||
txid: tx.txid(),
|
txid: tx.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
},
|
},
|
||||||
TxOut {
|
TxOut {
|
||||||
@@ -314,7 +326,7 @@ fn insert_tx_displaces_txouts() {
|
|||||||
|
|
||||||
let _ = tx_graph.insert_txout(
|
let _ = tx_graph.insert_txout(
|
||||||
OutPoint {
|
OutPoint {
|
||||||
txid: tx.txid(),
|
txid: tx.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
},
|
},
|
||||||
TxOut {
|
TxOut {
|
||||||
@@ -328,7 +340,7 @@ fn insert_tx_displaces_txouts() {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx_graph
|
tx_graph
|
||||||
.get_txout(OutPoint {
|
.get_txout(OutPoint {
|
||||||
txid: tx.txid(),
|
txid: tx.compute_txid(),
|
||||||
vout: 0
|
vout: 0
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@@ -337,7 +349,7 @@ fn insert_tx_displaces_txouts() {
|
|||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx_graph.get_txout(OutPoint {
|
tx_graph.get_txout(OutPoint {
|
||||||
txid: tx.txid(),
|
txid: tx.compute_txid(),
|
||||||
vout: 1
|
vout: 1
|
||||||
}),
|
}),
|
||||||
None
|
None
|
||||||
@@ -361,7 +373,7 @@ fn insert_txout_does_not_displace_tx() {
|
|||||||
|
|
||||||
let _ = tx_graph.insert_txout(
|
let _ = tx_graph.insert_txout(
|
||||||
OutPoint {
|
OutPoint {
|
||||||
txid: tx.txid(),
|
txid: tx.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
},
|
},
|
||||||
TxOut {
|
TxOut {
|
||||||
@@ -372,7 +384,7 @@ fn insert_txout_does_not_displace_tx() {
|
|||||||
|
|
||||||
let _ = tx_graph.insert_txout(
|
let _ = tx_graph.insert_txout(
|
||||||
OutPoint {
|
OutPoint {
|
||||||
txid: tx.txid(),
|
txid: tx.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
},
|
},
|
||||||
TxOut {
|
TxOut {
|
||||||
@@ -384,7 +396,7 @@ fn insert_txout_does_not_displace_tx() {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx_graph
|
tx_graph
|
||||||
.get_txout(OutPoint {
|
.get_txout(OutPoint {
|
||||||
txid: tx.txid(),
|
txid: tx.compute_txid(),
|
||||||
vout: 0
|
vout: 0
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@@ -393,7 +405,7 @@ fn insert_txout_does_not_displace_tx() {
|
|||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx_graph.get_txout(OutPoint {
|
tx_graph.get_txout(OutPoint {
|
||||||
txid: tx.txid(),
|
txid: tx.compute_txid(),
|
||||||
vout: 1
|
vout: 1
|
||||||
}),
|
}),
|
||||||
None
|
None
|
||||||
@@ -443,14 +455,14 @@ fn test_calculate_fee() {
|
|||||||
input: vec![
|
input: vec![
|
||||||
TxIn {
|
TxIn {
|
||||||
previous_output: OutPoint {
|
previous_output: OutPoint {
|
||||||
txid: intx1.txid(),
|
txid: intx1.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
},
|
},
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
TxIn {
|
TxIn {
|
||||||
previous_output: OutPoint {
|
previous_output: OutPoint {
|
||||||
txid: intx2.txid(),
|
txid: intx2.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
},
|
},
|
||||||
..Default::default()
|
..Default::default()
|
||||||
@@ -466,14 +478,14 @@ fn test_calculate_fee() {
|
|||||||
}],
|
}],
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(graph.calculate_fee(&tx), Ok(100));
|
assert_eq!(graph.calculate_fee(&tx), Ok(Amount::from_sat(100)));
|
||||||
|
|
||||||
tx.input.remove(2);
|
tx.input.remove(2);
|
||||||
|
|
||||||
// fee would be negative, should return CalculateFeeError::NegativeFee
|
// fee would be negative, should return CalculateFeeError::NegativeFee
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph.calculate_fee(&tx),
|
graph.calculate_fee(&tx),
|
||||||
Err(CalculateFeeError::NegativeFee(-200))
|
Err(CalculateFeeError::NegativeFee(SignedAmount::from_sat(-200)))
|
||||||
);
|
);
|
||||||
|
|
||||||
// If we have an unknown outpoint, fee should return CalculateFeeError::MissingTxOut.
|
// If we have an unknown outpoint, fee should return CalculateFeeError::MissingTxOut.
|
||||||
@@ -505,7 +517,7 @@ fn test_calculate_fee_on_coinbase() {
|
|||||||
|
|
||||||
let graph = TxGraph::<()>::default();
|
let graph = TxGraph::<()>::default();
|
||||||
|
|
||||||
assert_eq!(graph.calculate_fee(&tx), Ok(0));
|
assert_eq!(graph.calculate_fee(&tx), Ok(Amount::ZERO));
|
||||||
}
|
}
|
||||||
|
|
||||||
// `test_walk_ancestors` uses the following transaction structure:
|
// `test_walk_ancestors` uses the following transaction structure:
|
||||||
@@ -543,7 +555,7 @@ fn test_walk_ancestors() {
|
|||||||
// tx_b0 spends tx_a0
|
// tx_b0 spends tx_a0
|
||||||
let tx_b0 = Transaction {
|
let tx_b0 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_a0.txid(), 0),
|
previous_output: OutPoint::new(tx_a0.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL, TxOut::NULL],
|
output: vec![TxOut::NULL, TxOut::NULL],
|
||||||
@@ -553,7 +565,7 @@ fn test_walk_ancestors() {
|
|||||||
// tx_b1 spends tx_a0
|
// tx_b1 spends tx_a0
|
||||||
let tx_b1 = Transaction {
|
let tx_b1 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_a0.txid(), 1),
|
previous_output: OutPoint::new(tx_a0.compute_txid(), 1),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
@@ -572,7 +584,7 @@ fn test_walk_ancestors() {
|
|||||||
// tx_c0 spends tx_b0
|
// tx_c0 spends tx_b0
|
||||||
let tx_c0 = Transaction {
|
let tx_c0 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_b0.txid(), 0),
|
previous_output: OutPoint::new(tx_b0.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
@@ -582,7 +594,7 @@ fn test_walk_ancestors() {
|
|||||||
// tx_c1 spends tx_b0
|
// tx_c1 spends tx_b0
|
||||||
let tx_c1 = Transaction {
|
let tx_c1 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_b0.txid(), 1),
|
previous_output: OutPoint::new(tx_b0.compute_txid(), 1),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
@@ -593,11 +605,11 @@ fn test_walk_ancestors() {
|
|||||||
let tx_c2 = Transaction {
|
let tx_c2 = Transaction {
|
||||||
input: vec![
|
input: vec![
|
||||||
TxIn {
|
TxIn {
|
||||||
previous_output: OutPoint::new(tx_b1.txid(), 0),
|
previous_output: OutPoint::new(tx_b1.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
},
|
},
|
||||||
TxIn {
|
TxIn {
|
||||||
previous_output: OutPoint::new(tx_b2.txid(), 0),
|
previous_output: OutPoint::new(tx_b2.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@@ -617,7 +629,7 @@ fn test_walk_ancestors() {
|
|||||||
// tx_d0 spends tx_c1
|
// tx_d0 spends tx_c1
|
||||||
let tx_d0 = Transaction {
|
let tx_d0 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_c1.txid(), 0),
|
previous_output: OutPoint::new(tx_c1.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
@@ -628,11 +640,11 @@ fn test_walk_ancestors() {
|
|||||||
let tx_d1 = Transaction {
|
let tx_d1 = Transaction {
|
||||||
input: vec![
|
input: vec![
|
||||||
TxIn {
|
TxIn {
|
||||||
previous_output: OutPoint::new(tx_c2.txid(), 0),
|
previous_output: OutPoint::new(tx_c2.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
},
|
},
|
||||||
TxIn {
|
TxIn {
|
||||||
previous_output: OutPoint::new(tx_c3.txid(), 0),
|
previous_output: OutPoint::new(tx_c3.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@@ -643,7 +655,7 @@ fn test_walk_ancestors() {
|
|||||||
// tx_e0 spends tx_d1
|
// tx_e0 spends tx_d1
|
||||||
let tx_e0 = Transaction {
|
let tx_e0 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_d1.txid(), 0),
|
previous_output: OutPoint::new(tx_d1.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
@@ -665,7 +677,7 @@ fn test_walk_ancestors() {
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
[&tx_a0, &tx_b1].iter().for_each(|&tx| {
|
[&tx_a0, &tx_b1].iter().for_each(|&tx| {
|
||||||
let changeset = graph.insert_anchor(tx.txid(), tip.block_id());
|
let changeset = graph.insert_anchor(tx.compute_txid(), tip.block_id());
|
||||||
assert!(!changeset.is_empty());
|
assert!(!changeset.is_empty());
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -682,7 +694,7 @@ fn test_walk_ancestors() {
|
|||||||
// Only traverse unconfirmed ancestors of tx_e0 this time
|
// Only traverse unconfirmed ancestors of tx_e0 this time
|
||||||
graph
|
graph
|
||||||
.walk_ancestors(tx_e0.clone(), |depth, tx| {
|
.walk_ancestors(tx_e0.clone(), |depth, tx| {
|
||||||
let tx_node = graph.get_tx_node(tx.txid())?;
|
let tx_node = graph.get_tx_node(tx.compute_txid())?;
|
||||||
for block in tx_node.anchors {
|
for block in tx_node.anchors {
|
||||||
match local_chain.is_block_in_chain(block.anchor_block(), tip.block_id()) {
|
match local_chain.is_block_in_chain(block.anchor_block(), tip.block_id()) {
|
||||||
Ok(Some(true)) => return None,
|
Ok(Some(true)) => return None,
|
||||||
@@ -746,15 +758,15 @@ fn test_conflicting_descendants() {
|
|||||||
// tx_b spends tx_a
|
// tx_b spends tx_a
|
||||||
let tx_b = Transaction {
|
let tx_b = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_a.txid(), 0),
|
previous_output: OutPoint::new(tx_a.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
..common::new_tx(2)
|
..common::new_tx(2)
|
||||||
};
|
};
|
||||||
|
|
||||||
let txid_a = tx_a.txid();
|
let txid_a = tx_a.compute_txid();
|
||||||
let txid_b = tx_b.txid();
|
let txid_b = tx_b.compute_txid();
|
||||||
|
|
||||||
let mut graph = TxGraph::<()>::default();
|
let mut graph = TxGraph::<()>::default();
|
||||||
let _ = graph.insert_tx(tx_a);
|
let _ = graph.insert_tx(tx_a);
|
||||||
@@ -778,7 +790,7 @@ fn test_descendants_no_repeat() {
|
|||||||
let txs_b = (0..3)
|
let txs_b = (0..3)
|
||||||
.map(|vout| Transaction {
|
.map(|vout| Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_a.txid(), vout),
|
previous_output: OutPoint::new(tx_a.compute_txid(), vout),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
@@ -789,7 +801,7 @@ fn test_descendants_no_repeat() {
|
|||||||
let txs_c = (0..2)
|
let txs_c = (0..2)
|
||||||
.map(|vout| Transaction {
|
.map(|vout| Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(txs_b[vout as usize].txid(), vout),
|
previous_output: OutPoint::new(txs_b[vout as usize].compute_txid(), vout),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
@@ -800,11 +812,11 @@ fn test_descendants_no_repeat() {
|
|||||||
let tx_d = Transaction {
|
let tx_d = Transaction {
|
||||||
input: vec![
|
input: vec![
|
||||||
TxIn {
|
TxIn {
|
||||||
previous_output: OutPoint::new(txs_c[0].txid(), 0),
|
previous_output: OutPoint::new(txs_c[0].compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
},
|
},
|
||||||
TxIn {
|
TxIn {
|
||||||
previous_output: OutPoint::new(txs_c[1].txid(), 0),
|
previous_output: OutPoint::new(txs_c[1].compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@@ -814,7 +826,7 @@ fn test_descendants_no_repeat() {
|
|||||||
|
|
||||||
let tx_e = Transaction {
|
let tx_e = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_d.txid(), 0),
|
previous_output: OutPoint::new(tx_d.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![TxOut::NULL],
|
output: vec![TxOut::NULL],
|
||||||
@@ -848,11 +860,11 @@ fn test_descendants_no_repeat() {
|
|||||||
.chain(core::iter::once(&tx_e))
|
.chain(core::iter::once(&tx_e))
|
||||||
{
|
{
|
||||||
let _ = graph.insert_tx(tx.clone());
|
let _ = graph.insert_tx(tx.clone());
|
||||||
expected_txids.push(tx.txid());
|
expected_txids.push(tx.compute_txid());
|
||||||
}
|
}
|
||||||
|
|
||||||
let descendants = graph
|
let descendants = graph
|
||||||
.walk_descendants(tx_a.txid(), |_, txid| Some(txid))
|
.walk_descendants(tx_a.compute_txid(), |_, txid| Some(txid))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
assert_eq!(descendants, expected_txids);
|
assert_eq!(descendants, expected_txids);
|
||||||
@@ -888,7 +900,7 @@ fn test_chain_spends() {
|
|||||||
// The first confirmed transaction spends vout: 0. And is confirmed at block 98.
|
// The first confirmed transaction spends vout: 0. And is confirmed at block 98.
|
||||||
let tx_1 = Transaction {
|
let tx_1 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_0.txid(), 0),
|
previous_output: OutPoint::new(tx_0.compute_txid(), 0),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![
|
output: vec![
|
||||||
@@ -907,7 +919,7 @@ fn test_chain_spends() {
|
|||||||
// The second transactions spends vout:1, and is unconfirmed.
|
// The second transactions spends vout:1, and is unconfirmed.
|
||||||
let tx_2 = Transaction {
|
let tx_2 = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_0.txid(), 1),
|
previous_output: OutPoint::new(tx_0.compute_txid(), 1),
|
||||||
..TxIn::default()
|
..TxIn::default()
|
||||||
}],
|
}],
|
||||||
output: vec![
|
output: vec![
|
||||||
@@ -923,7 +935,7 @@ fn test_chain_spends() {
|
|||||||
..common::new_tx(0)
|
..common::new_tx(0)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut graph = TxGraph::<ConfirmationHeightAnchor>::default();
|
let mut graph = TxGraph::<ConfirmationBlockTime>::default();
|
||||||
|
|
||||||
let _ = graph.insert_tx(tx_0.clone());
|
let _ = graph.insert_tx(tx_0.clone());
|
||||||
let _ = graph.insert_tx(tx_1.clone());
|
let _ = graph.insert_tx(tx_1.clone());
|
||||||
@@ -931,57 +943,65 @@ fn test_chain_spends() {
|
|||||||
|
|
||||||
for (ht, tx) in [(95, &tx_0), (98, &tx_1)] {
|
for (ht, tx) in [(95, &tx_0), (98, &tx_1)] {
|
||||||
let _ = graph.insert_anchor(
|
let _ = graph.insert_anchor(
|
||||||
tx.txid(),
|
tx.compute_txid(),
|
||||||
ConfirmationHeightAnchor {
|
ConfirmationBlockTime {
|
||||||
anchor_block: tip.block_id(),
|
block_id: tip.get(ht).unwrap().block_id(),
|
||||||
confirmation_height: ht,
|
confirmation_time: 100,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Assert that confirmed spends are returned correctly.
|
// Assert that confirmed spends are returned correctly.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph.get_chain_spend(&local_chain, tip.block_id(), OutPoint::new(tx_0.txid(), 0)),
|
graph.get_chain_spend(
|
||||||
|
&local_chain,
|
||||||
|
tip.block_id(),
|
||||||
|
OutPoint::new(tx_0.compute_txid(), 0)
|
||||||
|
),
|
||||||
Some((
|
Some((
|
||||||
ChainPosition::Confirmed(&ConfirmationHeightAnchor {
|
ChainPosition::Confirmed(&ConfirmationBlockTime {
|
||||||
anchor_block: tip.block_id(),
|
block_id: BlockId {
|
||||||
confirmation_height: 98
|
hash: tip.get(98).unwrap().hash(),
|
||||||
|
height: 98,
|
||||||
|
},
|
||||||
|
confirmation_time: 100
|
||||||
}),
|
}),
|
||||||
tx_1.txid(),
|
tx_1.compute_txid(),
|
||||||
)),
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Check if chain position is returned correctly.
|
// Check if chain position is returned correctly.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph.get_chain_position(&local_chain, tip.block_id(), tx_0.txid()),
|
graph.get_chain_position(&local_chain, tip.block_id(), tx_0.compute_txid()),
|
||||||
// Some(ObservedAs::Confirmed(&local_chain.get_block(95).expect("block expected"))),
|
// Some(ObservedAs::Confirmed(&local_chain.get_block(95).expect("block expected"))),
|
||||||
Some(ChainPosition::Confirmed(&ConfirmationHeightAnchor {
|
Some(ChainPosition::Confirmed(&ConfirmationBlockTime {
|
||||||
anchor_block: tip.block_id(),
|
block_id: BlockId {
|
||||||
confirmation_height: 95
|
hash: tip.get(95).unwrap().hash(),
|
||||||
|
height: 95,
|
||||||
|
},
|
||||||
|
confirmation_time: 100
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
||||||
// Even if unconfirmed tx has a last_seen of 0, it can still be part of a chain spend.
|
|
||||||
assert_eq!(
|
|
||||||
graph.get_chain_spend(&local_chain, tip.block_id(), OutPoint::new(tx_0.txid(), 1)),
|
|
||||||
Some((ChainPosition::Unconfirmed(0), tx_2.txid())),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Mark the unconfirmed as seen and check correct ObservedAs status is returned.
|
// Mark the unconfirmed as seen and check correct ObservedAs status is returned.
|
||||||
let _ = graph.insert_seen_at(tx_2.txid(), 1234567);
|
let _ = graph.insert_seen_at(tx_2.compute_txid(), 1234567);
|
||||||
|
|
||||||
// Check chain spend returned correctly.
|
// Check chain spend returned correctly.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph
|
graph
|
||||||
.get_chain_spend(&local_chain, tip.block_id(), OutPoint::new(tx_0.txid(), 1))
|
.get_chain_spend(
|
||||||
|
&local_chain,
|
||||||
|
tip.block_id(),
|
||||||
|
OutPoint::new(tx_0.compute_txid(), 1)
|
||||||
|
)
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
(ChainPosition::Unconfirmed(1234567), tx_2.txid())
|
(ChainPosition::Unconfirmed(1234567), tx_2.compute_txid())
|
||||||
);
|
);
|
||||||
|
|
||||||
// A conflicting transaction that conflicts with tx_1.
|
// A conflicting transaction that conflicts with tx_1.
|
||||||
let tx_1_conflict = Transaction {
|
let tx_1_conflict = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_0.txid(), 0),
|
previous_output: OutPoint::new(tx_0.compute_txid(), 0),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}],
|
}],
|
||||||
..common::new_tx(0)
|
..common::new_tx(0)
|
||||||
@@ -990,13 +1010,13 @@ fn test_chain_spends() {
|
|||||||
|
|
||||||
// Because this tx conflicts with an already confirmed transaction, chain position should return none.
|
// Because this tx conflicts with an already confirmed transaction, chain position should return none.
|
||||||
assert!(graph
|
assert!(graph
|
||||||
.get_chain_position(&local_chain, tip.block_id(), tx_1_conflict.txid())
|
.get_chain_position(&local_chain, tip.block_id(), tx_1_conflict.compute_txid())
|
||||||
.is_none());
|
.is_none());
|
||||||
|
|
||||||
// Another conflicting tx that conflicts with tx_2.
|
// Another conflicting tx that conflicts with tx_2.
|
||||||
let tx_2_conflict = Transaction {
|
let tx_2_conflict = Transaction {
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint::new(tx_0.txid(), 1),
|
previous_output: OutPoint::new(tx_0.compute_txid(), 1),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}],
|
}],
|
||||||
..common::new_tx(0)
|
..common::new_tx(0)
|
||||||
@@ -1004,12 +1024,12 @@ fn test_chain_spends() {
|
|||||||
|
|
||||||
// Insert in graph and mark it as seen.
|
// Insert in graph and mark it as seen.
|
||||||
let _ = graph.insert_tx(tx_2_conflict.clone());
|
let _ = graph.insert_tx(tx_2_conflict.clone());
|
||||||
let _ = graph.insert_seen_at(tx_2_conflict.txid(), 1234568);
|
let _ = graph.insert_seen_at(tx_2_conflict.compute_txid(), 1234568);
|
||||||
|
|
||||||
// This should return a valid observation with correct last seen.
|
// This should return a valid observation with correct last seen.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph
|
graph
|
||||||
.get_chain_position(&local_chain, tip.block_id(), tx_2_conflict.txid())
|
.get_chain_position(&local_chain, tip.block_id(), tx_2_conflict.compute_txid())
|
||||||
.expect("position expected"),
|
.expect("position expected"),
|
||||||
ChainPosition::Unconfirmed(1234568)
|
ChainPosition::Unconfirmed(1234568)
|
||||||
);
|
);
|
||||||
@@ -1017,20 +1037,27 @@ fn test_chain_spends() {
|
|||||||
// Chain_spend now catches the new transaction as the spend.
|
// Chain_spend now catches the new transaction as the spend.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
graph
|
graph
|
||||||
.get_chain_spend(&local_chain, tip.block_id(), OutPoint::new(tx_0.txid(), 1))
|
.get_chain_spend(
|
||||||
|
&local_chain,
|
||||||
|
tip.block_id(),
|
||||||
|
OutPoint::new(tx_0.compute_txid(), 1)
|
||||||
|
)
|
||||||
.expect("expect observation"),
|
.expect("expect observation"),
|
||||||
(ChainPosition::Unconfirmed(1234568), tx_2_conflict.txid())
|
(
|
||||||
|
ChainPosition::Unconfirmed(1234568),
|
||||||
|
tx_2_conflict.compute_txid()
|
||||||
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
// Chain position of the `tx_2` is now none, as it is older than `tx_2_conflict`
|
// Chain position of the `tx_2` is now none, as it is older than `tx_2_conflict`
|
||||||
assert!(graph
|
assert!(graph
|
||||||
.get_chain_position(&local_chain, tip.block_id(), tx_2.txid())
|
.get_chain_position(&local_chain, tip.block_id(), tx_2.compute_txid())
|
||||||
.is_none());
|
.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ensure that `last_seen` values only increase during [`Append::append`].
|
/// Ensure that `last_seen` values only increase during [`Merge::merge`].
|
||||||
#[test]
|
#[test]
|
||||||
fn test_changeset_last_seen_append() {
|
fn test_changeset_last_seen_merge() {
|
||||||
let txid: Txid = h!("test txid");
|
let txid: Txid = h!("test txid");
|
||||||
|
|
||||||
let test_cases: &[(Option<u64>, Option<u64>)] = &[
|
let test_cases: &[(Option<u64>, Option<u64>)] = &[
|
||||||
@@ -1053,7 +1080,7 @@ fn test_changeset_last_seen_append() {
|
|||||||
};
|
};
|
||||||
assert!(!update.is_empty() || update_ls.is_none());
|
assert!(!update.is_empty() || update_ls.is_none());
|
||||||
|
|
||||||
original.append(update);
|
original.merge(update);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&original.last_seen.get(&txid).cloned(),
|
&original.last_seen.get(&txid).cloned(),
|
||||||
Ord::max(original_ls, update_ls),
|
Ord::max(original_ls, update_ls),
|
||||||
@@ -1065,13 +1092,13 @@ fn test_changeset_last_seen_append() {
|
|||||||
fn update_last_seen_unconfirmed() {
|
fn update_last_seen_unconfirmed() {
|
||||||
let mut graph = TxGraph::<()>::default();
|
let mut graph = TxGraph::<()>::default();
|
||||||
let tx = new_tx(0);
|
let tx = new_tx(0);
|
||||||
let txid = tx.txid();
|
let txid = tx.compute_txid();
|
||||||
|
|
||||||
// insert a new tx
|
// insert a new tx
|
||||||
// initially we have a last_seen of 0, and no anchors
|
// initially we have a last_seen of None and no anchors
|
||||||
let _ = graph.insert_tx(tx);
|
let _ = graph.insert_tx(tx);
|
||||||
let tx = graph.full_txs().next().unwrap();
|
let tx = graph.full_txs().next().unwrap();
|
||||||
assert_eq!(tx.last_seen_unconfirmed, 0);
|
assert_eq!(tx.last_seen_unconfirmed, None);
|
||||||
assert!(tx.anchors.is_empty());
|
assert!(tx.anchors.is_empty());
|
||||||
|
|
||||||
// higher timestamp should update last seen
|
// higher timestamp should update last seen
|
||||||
@@ -1086,7 +1113,56 @@ fn update_last_seen_unconfirmed() {
|
|||||||
let _ = graph.insert_anchor(txid, ());
|
let _ = graph.insert_anchor(txid, ());
|
||||||
let changeset = graph.update_last_seen_unconfirmed(4);
|
let changeset = graph.update_last_seen_unconfirmed(4);
|
||||||
assert!(changeset.is_empty());
|
assert!(changeset.is_empty());
|
||||||
assert_eq!(graph.full_txs().next().unwrap().last_seen_unconfirmed, 2);
|
assert_eq!(
|
||||||
|
graph
|
||||||
|
.full_txs()
|
||||||
|
.next()
|
||||||
|
.unwrap()
|
||||||
|
.last_seen_unconfirmed
|
||||||
|
.unwrap(),
|
||||||
|
2
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn transactions_inserted_into_tx_graph_are_not_canonical_until_they_have_an_anchor_in_best_chain() {
|
||||||
|
let txs = vec![new_tx(0), new_tx(1)];
|
||||||
|
let txids: Vec<Txid> = txs.iter().map(Transaction::compute_txid).collect();
|
||||||
|
|
||||||
|
// graph
|
||||||
|
let mut graph = TxGraph::<BlockId>::new(txs);
|
||||||
|
let full_txs: Vec<_> = graph.full_txs().collect();
|
||||||
|
assert_eq!(full_txs.len(), 2);
|
||||||
|
let unseen_txs: Vec<_> = graph.txs_with_no_anchor_or_last_seen().collect();
|
||||||
|
assert_eq!(unseen_txs.len(), 2);
|
||||||
|
|
||||||
|
// chain
|
||||||
|
let blocks: BTreeMap<u32, BlockHash> = [(0, h!("g")), (1, h!("A")), (2, h!("B"))]
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
let chain = LocalChain::from_blocks(blocks).unwrap();
|
||||||
|
let canonical_txs: Vec<_> = graph
|
||||||
|
.list_canonical_txs(&chain, chain.tip().block_id())
|
||||||
|
.collect();
|
||||||
|
assert!(canonical_txs.is_empty());
|
||||||
|
|
||||||
|
// tx0 with seen_at should be returned by canonical txs
|
||||||
|
let _ = graph.insert_seen_at(txids[0], 2);
|
||||||
|
let mut canonical_txs = graph.list_canonical_txs(&chain, chain.tip().block_id());
|
||||||
|
assert_eq!(
|
||||||
|
canonical_txs.next().map(|tx| tx.tx_node.txid).unwrap(),
|
||||||
|
txids[0]
|
||||||
|
);
|
||||||
|
drop(canonical_txs);
|
||||||
|
|
||||||
|
// tx1 with anchor is also canonical
|
||||||
|
let _ = graph.insert_anchor(txids[1], block_id!(2, "B"));
|
||||||
|
let canonical_txids: Vec<_> = graph
|
||||||
|
.list_canonical_txs(&chain, chain.tip().block_id())
|
||||||
|
.map(|tx| tx.tx_node.txid)
|
||||||
|
.collect();
|
||||||
|
assert!(canonical_txids.contains(&txids[1]));
|
||||||
|
assert!(graph.txs_with_no_anchor_or_last_seen().next().is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ mod common;
|
|||||||
|
|
||||||
use std::collections::{BTreeSet, HashSet};
|
use std::collections::{BTreeSet, HashSet};
|
||||||
|
|
||||||
use bdk_chain::{keychain::Balance, BlockId};
|
use bdk_chain::{Balance, BlockId};
|
||||||
use bitcoin::{Amount, OutPoint, Script};
|
use bitcoin::{Amount, OutPoint, ScriptBuf};
|
||||||
use common::*;
|
use common::*;
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
@@ -15,7 +15,7 @@ struct Scenario<'a> {
|
|||||||
name: &'a str,
|
name: &'a str,
|
||||||
/// Transaction templates
|
/// Transaction templates
|
||||||
tx_templates: &'a [TxTemplate<'a, BlockId>],
|
tx_templates: &'a [TxTemplate<'a, BlockId>],
|
||||||
/// Names of txs that must exist in the output of `list_chain_txs`
|
/// Names of txs that must exist in the output of `list_canonical_txs`
|
||||||
exp_chain_txs: HashSet<&'a str>,
|
exp_chain_txs: HashSet<&'a str>,
|
||||||
/// Outpoints that must exist in the output of `filter_chain_txouts`
|
/// Outpoints that must exist in the output of `filter_chain_txouts`
|
||||||
exp_chain_txouts: HashSet<(&'a str, u32)>,
|
exp_chain_txouts: HashSet<(&'a str, u32)>,
|
||||||
@@ -27,7 +27,7 @@ struct Scenario<'a> {
|
|||||||
|
|
||||||
/// This test ensures that [`TxGraph`] will reliably filter out irrelevant transactions when
|
/// This test ensures that [`TxGraph`] will reliably filter out irrelevant transactions when
|
||||||
/// presented with multiple conflicting transaction scenarios using the [`TxTemplate`] structure.
|
/// presented with multiple conflicting transaction scenarios using the [`TxTemplate`] structure.
|
||||||
/// This test also checks that [`TxGraph::list_chain_txs`], [`TxGraph::filter_chain_txouts`],
|
/// This test also checks that [`TxGraph::list_canonical_txs`], [`TxGraph::filter_chain_txouts`],
|
||||||
/// [`TxGraph::filter_chain_unspents`], and [`TxGraph::balance`] return correct data.
|
/// [`TxGraph::filter_chain_unspents`], and [`TxGraph::balance`] return correct data.
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tx_conflict_handling() {
|
fn test_tx_conflict_handling() {
|
||||||
@@ -597,7 +597,7 @@ fn test_tx_conflict_handling() {
|
|||||||
let (tx_graph, spk_index, exp_tx_ids) = init_graph(scenario.tx_templates.iter());
|
let (tx_graph, spk_index, exp_tx_ids) = init_graph(scenario.tx_templates.iter());
|
||||||
|
|
||||||
let txs = tx_graph
|
let txs = tx_graph
|
||||||
.list_chain_txs(&local_chain, chain_tip)
|
.list_canonical_txs(&local_chain, chain_tip)
|
||||||
.map(|tx| tx.tx_node.txid)
|
.map(|tx| tx.tx_node.txid)
|
||||||
.collect::<BTreeSet<_>>();
|
.collect::<BTreeSet<_>>();
|
||||||
let exp_txs = scenario
|
let exp_txs = scenario
|
||||||
@@ -607,7 +607,7 @@ fn test_tx_conflict_handling() {
|
|||||||
.collect::<BTreeSet<_>>();
|
.collect::<BTreeSet<_>>();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
txs, exp_txs,
|
txs, exp_txs,
|
||||||
"\n[{}] 'list_chain_txs' failed",
|
"\n[{}] 'list_canonical_txs' failed",
|
||||||
scenario.name
|
scenario.name
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -659,7 +659,7 @@ fn test_tx_conflict_handling() {
|
|||||||
&local_chain,
|
&local_chain,
|
||||||
chain_tip,
|
chain_tip,
|
||||||
spk_index.outpoints().iter().cloned(),
|
spk_index.outpoints().iter().cloned(),
|
||||||
|_, spk: &Script| spk_index.index_of_spk(spk).is_some(),
|
|_, spk: ScriptBuf| spk_index.index_of_spk(spk).is_some(),
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
balance, scenario.exp_balance,
|
balance, scenario.exp_balance,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bdk_electrum"
|
name = "bdk_electrum"
|
||||||
version = "0.13.0"
|
version = "0.16.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
homepage = "https://bitcoindevkit.org"
|
homepage = "https://bitcoindevkit.org"
|
||||||
repository = "https://github.com/bitcoindevkit/bdk"
|
repository = "https://github.com/bitcoindevkit/bdk"
|
||||||
@@ -12,9 +12,9 @@ readme = "README.md"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk_chain = { path = "../chain", version = "0.14.0" }
|
bdk_chain = { path = "../chain", version = "0.17.0" }
|
||||||
electrum-client = { version = "0.19" }
|
electrum-client = { version = "0.20" }
|
||||||
#rustls = { version = "=0.21.1", optional = true, features = ["dangerous_configuration"] }
|
#rustls = { version = "=0.21.1", optional = true, features = ["dangerous_configuration"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
bdk_testenv = { path = "../testenv", default-features = false }
|
bdk_testenv = { path = "../testenv", default-features = false }
|
||||||
|
|||||||
491
crates/electrum/src/bdk_electrum_client.rs
Normal file
491
crates/electrum/src/bdk_electrum_client.rs
Normal file
@@ -0,0 +1,491 @@
|
|||||||
|
use bdk_chain::{
|
||||||
|
bitcoin::{block::Header, BlockHash, OutPoint, ScriptBuf, Transaction, Txid},
|
||||||
|
collections::{BTreeMap, HashMap},
|
||||||
|
local_chain::CheckPoint,
|
||||||
|
spk_client::{FullScanRequest, FullScanResult, SyncRequest, SyncResult},
|
||||||
|
tx_graph::TxGraph,
|
||||||
|
Anchor, BlockId, ConfirmationBlockTime,
|
||||||
|
};
|
||||||
|
use electrum_client::{ElectrumApi, Error, HeaderNotification};
|
||||||
|
use std::{
|
||||||
|
collections::BTreeSet,
|
||||||
|
sync::{Arc, Mutex},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// We include a chain suffix of a certain length for the purpose of robustness.
|
||||||
|
const CHAIN_SUFFIX_LENGTH: u32 = 8;
|
||||||
|
|
||||||
|
/// Wrapper around an [`electrum_client::ElectrumApi`] which includes an internal in-memory
|
||||||
|
/// transaction cache to avoid re-fetching already downloaded transactions.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BdkElectrumClient<E> {
|
||||||
|
/// The internal [`electrum_client::ElectrumApi`]
|
||||||
|
pub inner: E,
|
||||||
|
/// The transaction cache
|
||||||
|
tx_cache: Mutex<HashMap<Txid, Arc<Transaction>>>,
|
||||||
|
/// The header cache
|
||||||
|
block_header_cache: Mutex<HashMap<u32, Header>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<E: ElectrumApi> BdkElectrumClient<E> {
|
||||||
|
/// Creates a new bdk client from a [`electrum_client::ElectrumApi`]
|
||||||
|
pub fn new(client: E) -> Self {
|
||||||
|
Self {
|
||||||
|
inner: client,
|
||||||
|
tx_cache: Default::default(),
|
||||||
|
block_header_cache: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inserts transactions into the transaction cache so that the client will not fetch these
|
||||||
|
/// transactions.
|
||||||
|
pub fn populate_tx_cache<A>(&self, tx_graph: impl AsRef<TxGraph<A>>) {
|
||||||
|
let txs = tx_graph
|
||||||
|
.as_ref()
|
||||||
|
.full_txs()
|
||||||
|
.map(|tx_node| (tx_node.txid, tx_node.tx));
|
||||||
|
|
||||||
|
let mut tx_cache = self.tx_cache.lock().unwrap();
|
||||||
|
for (txid, tx) in txs {
|
||||||
|
tx_cache.insert(txid, tx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch transaction of given `txid`.
|
||||||
|
///
|
||||||
|
/// If it hits the cache it will return the cached version and avoid making the request.
|
||||||
|
pub fn fetch_tx(&self, txid: Txid) -> Result<Arc<Transaction>, Error> {
|
||||||
|
let tx_cache = self.tx_cache.lock().unwrap();
|
||||||
|
|
||||||
|
if let Some(tx) = tx_cache.get(&txid) {
|
||||||
|
return Ok(Arc::clone(tx));
|
||||||
|
}
|
||||||
|
|
||||||
|
drop(tx_cache);
|
||||||
|
|
||||||
|
let tx = Arc::new(self.inner.transaction_get(&txid)?);
|
||||||
|
|
||||||
|
self.tx_cache.lock().unwrap().insert(txid, Arc::clone(&tx));
|
||||||
|
|
||||||
|
Ok(tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch block header of given `height`.
|
||||||
|
///
|
||||||
|
/// If it hits the cache it will return the cached version and avoid making the request.
|
||||||
|
fn fetch_header(&self, height: u32) -> Result<Header, Error> {
|
||||||
|
let block_header_cache = self.block_header_cache.lock().unwrap();
|
||||||
|
|
||||||
|
if let Some(header) = block_header_cache.get(&height) {
|
||||||
|
return Ok(*header);
|
||||||
|
}
|
||||||
|
|
||||||
|
drop(block_header_cache);
|
||||||
|
|
||||||
|
self.update_header(height)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update a block header at given `height`. Returns the updated header.
|
||||||
|
fn update_header(&self, height: u32) -> Result<Header, Error> {
|
||||||
|
let header = self.inner.block_header(height as usize)?;
|
||||||
|
|
||||||
|
self.block_header_cache
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.insert(height, header);
|
||||||
|
|
||||||
|
Ok(header)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Broadcasts a transaction to the network.
|
||||||
|
///
|
||||||
|
/// This is a re-export of [`ElectrumApi::transaction_broadcast`].
|
||||||
|
pub fn transaction_broadcast(&self, tx: &Transaction) -> Result<Txid, Error> {
|
||||||
|
self.inner.transaction_broadcast(tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Full scan the keychain scripts specified with the blockchain (via an Electrum client) and
|
||||||
|
/// returns updates for [`bdk_chain`] data structures.
|
||||||
|
///
|
||||||
|
/// - `request`: struct with data required to perform a spk-based blockchain client full scan,
|
||||||
|
/// see [`FullScanRequest`]
|
||||||
|
/// - `stop_gap`: the full scan for each keychain stops after a gap of script pubkeys with no
|
||||||
|
/// associated transactions
|
||||||
|
/// - `batch_size`: specifies the max number of script pubkeys to request for in a single batch
|
||||||
|
/// request
|
||||||
|
/// - `fetch_prev_txouts`: specifies whether or not we want previous `TxOut`s for fee
|
||||||
|
pub fn full_scan<K: Ord + Clone>(
|
||||||
|
&self,
|
||||||
|
request: FullScanRequest<K>,
|
||||||
|
stop_gap: usize,
|
||||||
|
batch_size: usize,
|
||||||
|
fetch_prev_txouts: bool,
|
||||||
|
) -> Result<FullScanResult<K>, Error> {
|
||||||
|
let (tip, latest_blocks) =
|
||||||
|
fetch_tip_and_latest_blocks(&self.inner, request.chain_tip.clone())?;
|
||||||
|
let mut graph_update = TxGraph::<ConfirmationBlockTime>::default();
|
||||||
|
let mut last_active_indices = BTreeMap::<K, u32>::new();
|
||||||
|
|
||||||
|
for (keychain, spks) in request.spks_by_keychain {
|
||||||
|
if let Some(last_active_index) =
|
||||||
|
self.populate_with_spks(&mut graph_update, spks, stop_gap, batch_size)?
|
||||||
|
{
|
||||||
|
last_active_indices.insert(keychain, last_active_index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let chain_update = chain_update(tip, &latest_blocks, graph_update.all_anchors())?;
|
||||||
|
|
||||||
|
// Fetch previous `TxOut`s for fee calculation if flag is enabled.
|
||||||
|
if fetch_prev_txouts {
|
||||||
|
self.fetch_prev_txout(&mut graph_update)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(FullScanResult {
|
||||||
|
graph_update,
|
||||||
|
chain_update,
|
||||||
|
last_active_indices,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sync a set of scripts with the blockchain (via an Electrum client) for the data specified
|
||||||
|
/// and returns updates for [`bdk_chain`] data structures.
|
||||||
|
///
|
||||||
|
/// - `request`: struct with data required to perform a spk-based blockchain client sync,
|
||||||
|
/// see [`SyncRequest`]
|
||||||
|
/// - `batch_size`: specifies the max number of script pubkeys to request for in a single batch
|
||||||
|
/// request
|
||||||
|
/// - `fetch_prev_txouts`: specifies whether or not we want previous `TxOut`s for fee
|
||||||
|
/// calculation
|
||||||
|
///
|
||||||
|
/// If the scripts to sync are unknown, such as when restoring or importing a keychain that
|
||||||
|
/// may include scripts that have been used, use [`full_scan`] with the keychain.
|
||||||
|
///
|
||||||
|
/// [`full_scan`]: Self::full_scan
|
||||||
|
pub fn sync(
|
||||||
|
&self,
|
||||||
|
request: SyncRequest,
|
||||||
|
batch_size: usize,
|
||||||
|
fetch_prev_txouts: bool,
|
||||||
|
) -> Result<SyncResult, Error> {
|
||||||
|
let full_scan_req = FullScanRequest::from_chain_tip(request.chain_tip.clone())
|
||||||
|
.set_spks_for_keychain((), request.spks.enumerate().map(|(i, spk)| (i as u32, spk)));
|
||||||
|
let mut full_scan_res = self.full_scan(full_scan_req, usize::MAX, batch_size, false)?;
|
||||||
|
let (tip, latest_blocks) =
|
||||||
|
fetch_tip_and_latest_blocks(&self.inner, request.chain_tip.clone())?;
|
||||||
|
|
||||||
|
self.populate_with_txids(&mut full_scan_res.graph_update, request.txids)?;
|
||||||
|
self.populate_with_outpoints(&mut full_scan_res.graph_update, request.outpoints)?;
|
||||||
|
|
||||||
|
let chain_update = chain_update(
|
||||||
|
tip,
|
||||||
|
&latest_blocks,
|
||||||
|
full_scan_res.graph_update.all_anchors(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Fetch previous `TxOut`s for fee calculation if flag is enabled.
|
||||||
|
if fetch_prev_txouts {
|
||||||
|
self.fetch_prev_txout(&mut full_scan_res.graph_update)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(SyncResult {
|
||||||
|
chain_update,
|
||||||
|
graph_update: full_scan_res.graph_update,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Populate the `graph_update` with transactions/anchors associated with the given `spks`.
|
||||||
|
///
|
||||||
|
/// Transactions that contains an output with requested spk, or spends form an output with
|
||||||
|
/// requested spk will be added to `graph_update`. Anchors of the aforementioned transactions are
|
||||||
|
/// also included.
|
||||||
|
fn populate_with_spks(
|
||||||
|
&self,
|
||||||
|
graph_update: &mut TxGraph<ConfirmationBlockTime>,
|
||||||
|
mut spks: impl Iterator<Item = (u32, ScriptBuf)>,
|
||||||
|
stop_gap: usize,
|
||||||
|
batch_size: usize,
|
||||||
|
) -> Result<Option<u32>, Error> {
|
||||||
|
let mut unused_spk_count = 0_usize;
|
||||||
|
let mut last_active_index = Option::<u32>::None;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let spks = (0..batch_size)
|
||||||
|
.map_while(|_| spks.next())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
if spks.is_empty() {
|
||||||
|
return Ok(last_active_index);
|
||||||
|
}
|
||||||
|
|
||||||
|
let spk_histories = self
|
||||||
|
.inner
|
||||||
|
.batch_script_get_history(spks.iter().map(|(_, s)| s.as_script()))?;
|
||||||
|
|
||||||
|
for ((spk_index, _spk), spk_history) in spks.into_iter().zip(spk_histories) {
|
||||||
|
if spk_history.is_empty() {
|
||||||
|
unused_spk_count = unused_spk_count.saturating_add(1);
|
||||||
|
if unused_spk_count >= stop_gap {
|
||||||
|
return Ok(last_active_index);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
last_active_index = Some(spk_index);
|
||||||
|
unused_spk_count = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
for tx_res in spk_history {
|
||||||
|
let _ = graph_update.insert_tx(self.fetch_tx(tx_res.tx_hash)?);
|
||||||
|
self.validate_merkle_for_anchor(graph_update, tx_res.tx_hash, tx_res.height)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Populate the `graph_update` with associated transactions/anchors of `outpoints`.
|
||||||
|
///
|
||||||
|
/// Transactions in which the outpoint resides, and transactions that spend from the outpoint are
|
||||||
|
/// included. Anchors of the aforementioned transactions are included.
|
||||||
|
fn populate_with_outpoints(
|
||||||
|
&self,
|
||||||
|
graph_update: &mut TxGraph<ConfirmationBlockTime>,
|
||||||
|
outpoints: impl IntoIterator<Item = OutPoint>,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
for outpoint in outpoints {
|
||||||
|
let op_txid = outpoint.txid;
|
||||||
|
let op_tx = self.fetch_tx(op_txid)?;
|
||||||
|
let op_txout = match op_tx.output.get(outpoint.vout as usize) {
|
||||||
|
Some(txout) => txout,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
debug_assert_eq!(op_tx.compute_txid(), op_txid);
|
||||||
|
|
||||||
|
// attempt to find the following transactions (alongside their chain positions), and
|
||||||
|
// add to our sparsechain `update`:
|
||||||
|
let mut has_residing = false; // tx in which the outpoint resides
|
||||||
|
let mut has_spending = false; // tx that spends the outpoint
|
||||||
|
for res in self.inner.script_get_history(&op_txout.script_pubkey)? {
|
||||||
|
if has_residing && has_spending {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !has_residing && res.tx_hash == op_txid {
|
||||||
|
has_residing = true;
|
||||||
|
let _ = graph_update.insert_tx(Arc::clone(&op_tx));
|
||||||
|
self.validate_merkle_for_anchor(graph_update, res.tx_hash, res.height)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !has_spending && res.tx_hash != op_txid {
|
||||||
|
let res_tx = self.fetch_tx(res.tx_hash)?;
|
||||||
|
// we exclude txs/anchors that do not spend our specified outpoint(s)
|
||||||
|
has_spending = res_tx
|
||||||
|
.input
|
||||||
|
.iter()
|
||||||
|
.any(|txin| txin.previous_output == outpoint);
|
||||||
|
if !has_spending {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let _ = graph_update.insert_tx(Arc::clone(&res_tx));
|
||||||
|
self.validate_merkle_for_anchor(graph_update, res.tx_hash, res.height)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Populate the `graph_update` with transactions/anchors of the provided `txids`.
|
||||||
|
fn populate_with_txids(
|
||||||
|
&self,
|
||||||
|
graph_update: &mut TxGraph<ConfirmationBlockTime>,
|
||||||
|
txids: impl IntoIterator<Item = Txid>,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
for txid in txids {
|
||||||
|
let tx = match self.fetch_tx(txid) {
|
||||||
|
Ok(tx) => tx,
|
||||||
|
Err(electrum_client::Error::Protocol(_)) => continue,
|
||||||
|
Err(other_err) => return Err(other_err),
|
||||||
|
};
|
||||||
|
|
||||||
|
let spk = tx
|
||||||
|
.output
|
||||||
|
.first()
|
||||||
|
.map(|txo| &txo.script_pubkey)
|
||||||
|
.expect("tx must have an output");
|
||||||
|
|
||||||
|
// because of restrictions of the Electrum API, we have to use the `script_get_history`
|
||||||
|
// call to get confirmation status of our transaction
|
||||||
|
if let Some(r) = self
|
||||||
|
.inner
|
||||||
|
.script_get_history(spk)?
|
||||||
|
.into_iter()
|
||||||
|
.find(|r| r.tx_hash == txid)
|
||||||
|
{
|
||||||
|
self.validate_merkle_for_anchor(graph_update, txid, r.height)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = graph_update.insert_tx(tx);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function which checks if a transaction is confirmed by validating the merkle proof.
|
||||||
|
// An anchor is inserted if the transaction is validated to be in a confirmed block.
|
||||||
|
fn validate_merkle_for_anchor(
|
||||||
|
&self,
|
||||||
|
graph_update: &mut TxGraph<ConfirmationBlockTime>,
|
||||||
|
txid: Txid,
|
||||||
|
confirmation_height: i32,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
if let Ok(merkle_res) = self
|
||||||
|
.inner
|
||||||
|
.transaction_get_merkle(&txid, confirmation_height as usize)
|
||||||
|
{
|
||||||
|
let mut header = self.fetch_header(merkle_res.block_height as u32)?;
|
||||||
|
let mut is_confirmed_tx = electrum_client::utils::validate_merkle_proof(
|
||||||
|
&txid,
|
||||||
|
&header.merkle_root,
|
||||||
|
&merkle_res,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Merkle validation will fail if the header in `block_header_cache` is outdated, so we
|
||||||
|
// want to check if there is a new header and validate against the new one.
|
||||||
|
if !is_confirmed_tx {
|
||||||
|
header = self.update_header(merkle_res.block_height as u32)?;
|
||||||
|
is_confirmed_tx = electrum_client::utils::validate_merkle_proof(
|
||||||
|
&txid,
|
||||||
|
&header.merkle_root,
|
||||||
|
&merkle_res,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_confirmed_tx {
|
||||||
|
let _ = graph_update.insert_anchor(
|
||||||
|
txid,
|
||||||
|
ConfirmationBlockTime {
|
||||||
|
confirmation_time: header.time as u64,
|
||||||
|
block_id: BlockId {
|
||||||
|
height: merkle_res.block_height as u32,
|
||||||
|
hash: header.block_hash(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function which fetches the `TxOut`s of our relevant transactions' previous transactions,
|
||||||
|
// which we do not have by default. This data is needed to calculate the transaction fee.
|
||||||
|
fn fetch_prev_txout(
|
||||||
|
&self,
|
||||||
|
graph_update: &mut TxGraph<ConfirmationBlockTime>,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let full_txs: Vec<Arc<Transaction>> =
|
||||||
|
graph_update.full_txs().map(|tx_node| tx_node.tx).collect();
|
||||||
|
for tx in full_txs {
|
||||||
|
for vin in &tx.input {
|
||||||
|
let outpoint = vin.previous_output;
|
||||||
|
let vout = outpoint.vout;
|
||||||
|
let prev_tx = self.fetch_tx(outpoint.txid)?;
|
||||||
|
let txout = prev_tx.output[vout as usize].clone();
|
||||||
|
let _ = graph_update.insert_txout(outpoint, txout);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a [`CheckPoint`] of the latest tip, that connects with `prev_tip`. The latest blocks are
|
||||||
|
/// fetched to construct checkpoint updates with the proper [`BlockHash`] in case of re-org.
|
||||||
|
fn fetch_tip_and_latest_blocks(
|
||||||
|
client: &impl ElectrumApi,
|
||||||
|
prev_tip: CheckPoint,
|
||||||
|
) -> Result<(CheckPoint, BTreeMap<u32, BlockHash>), Error> {
|
||||||
|
let HeaderNotification { height, .. } = client.block_headers_subscribe()?;
|
||||||
|
let new_tip_height = height as u32;
|
||||||
|
|
||||||
|
// If electrum returns a tip height that is lower than our previous tip, then checkpoints do
|
||||||
|
// not need updating. We just return the previous tip and use that as the point of agreement.
|
||||||
|
if new_tip_height < prev_tip.height() {
|
||||||
|
return Ok((prev_tip, BTreeMap::new()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Atomically fetch the latest `CHAIN_SUFFIX_LENGTH` count of blocks from Electrum. We use this
|
||||||
|
// to construct our checkpoint update.
|
||||||
|
let mut new_blocks = {
|
||||||
|
let start_height = new_tip_height.saturating_sub(CHAIN_SUFFIX_LENGTH - 1);
|
||||||
|
let hashes = client
|
||||||
|
.block_headers(start_height as _, CHAIN_SUFFIX_LENGTH as _)?
|
||||||
|
.headers
|
||||||
|
.into_iter()
|
||||||
|
.map(|h| h.block_hash());
|
||||||
|
(start_height..).zip(hashes).collect::<BTreeMap<u32, _>>()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Find the "point of agreement" (if any).
|
||||||
|
let agreement_cp = {
|
||||||
|
let mut agreement_cp = Option::<CheckPoint>::None;
|
||||||
|
for cp in prev_tip.iter() {
|
||||||
|
let cp_block = cp.block_id();
|
||||||
|
let hash = match new_blocks.get(&cp_block.height) {
|
||||||
|
Some(&hash) => hash,
|
||||||
|
None => {
|
||||||
|
assert!(
|
||||||
|
new_tip_height >= cp_block.height,
|
||||||
|
"already checked that electrum's tip cannot be smaller"
|
||||||
|
);
|
||||||
|
let hash = client.block_header(cp_block.height as _)?.block_hash();
|
||||||
|
new_blocks.insert(cp_block.height, hash);
|
||||||
|
hash
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if hash == cp_block.hash {
|
||||||
|
agreement_cp = Some(cp);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
agreement_cp
|
||||||
|
};
|
||||||
|
|
||||||
|
let agreement_height = agreement_cp.as_ref().map(CheckPoint::height);
|
||||||
|
|
||||||
|
let new_tip = new_blocks
|
||||||
|
.iter()
|
||||||
|
// Prune `new_blocks` to only include blocks that are actually new.
|
||||||
|
.filter(|(height, _)| Some(*<&u32>::clone(height)) > agreement_height)
|
||||||
|
.map(|(height, hash)| BlockId {
|
||||||
|
height: *height,
|
||||||
|
hash: *hash,
|
||||||
|
})
|
||||||
|
.fold(agreement_cp, |prev_cp, block| {
|
||||||
|
Some(match prev_cp {
|
||||||
|
Some(cp) => cp.push(block).expect("must extend checkpoint"),
|
||||||
|
None => CheckPoint::new(block),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.expect("must have at least one checkpoint");
|
||||||
|
|
||||||
|
Ok((new_tip, new_blocks))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a corresponding checkpoint per anchor height if it does not yet exist. Checkpoints should not
|
||||||
|
// surpass `latest_blocks`.
|
||||||
|
fn chain_update<A: Anchor>(
|
||||||
|
mut tip: CheckPoint,
|
||||||
|
latest_blocks: &BTreeMap<u32, BlockHash>,
|
||||||
|
anchors: &BTreeSet<(A, Txid)>,
|
||||||
|
) -> Result<CheckPoint, Error> {
|
||||||
|
for anchor in anchors {
|
||||||
|
let height = anchor.0.anchor_block().height;
|
||||||
|
|
||||||
|
// Checkpoint uses the `BlockHash` from `latest_blocks` so that the hash will be consistent
|
||||||
|
// in case of a re-org.
|
||||||
|
if tip.get(height).is_none() && height <= tip.height() {
|
||||||
|
let hash = match latest_blocks.get(&height) {
|
||||||
|
Some(&hash) => hash,
|
||||||
|
None => anchor.0.anchor_block().hash,
|
||||||
|
};
|
||||||
|
tip = tip.insert(BlockId { hash, height });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(tip)
|
||||||
|
}
|
||||||
@@ -1,586 +0,0 @@
|
|||||||
use bdk_chain::{
|
|
||||||
bitcoin::{OutPoint, ScriptBuf, Transaction, Txid},
|
|
||||||
collections::{BTreeMap, HashMap, HashSet},
|
|
||||||
local_chain::CheckPoint,
|
|
||||||
spk_client::{FullScanRequest, FullScanResult, SyncRequest, SyncResult, TxCache},
|
|
||||||
tx_graph::TxGraph,
|
|
||||||
BlockId, ConfirmationHeightAnchor, ConfirmationTimeHeightAnchor,
|
|
||||||
};
|
|
||||||
use core::str::FromStr;
|
|
||||||
use electrum_client::{ElectrumApi, Error, HeaderNotification};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
/// We include a chain suffix of a certain length for the purpose of robustness.
|
|
||||||
const CHAIN_SUFFIX_LENGTH: u32 = 8;
|
|
||||||
|
|
||||||
/// Trait to extend [`electrum_client::Client`] functionality.
|
|
||||||
pub trait ElectrumExt {
|
|
||||||
/// Full scan the keychain scripts specified with the blockchain (via an Electrum client) and
|
|
||||||
/// returns updates for [`bdk_chain`] data structures.
|
|
||||||
///
|
|
||||||
/// - `request`: struct with data required to perform a spk-based blockchain client full scan,
|
|
||||||
/// see [`FullScanRequest`]
|
|
||||||
/// - `stop_gap`: the full scan for each keychain stops after a gap of script pubkeys with no
|
|
||||||
/// associated transactions
|
|
||||||
/// - `batch_size`: specifies the max number of script pubkeys to request for in a single batch
|
|
||||||
/// request
|
|
||||||
/// - `fetch_prev_txouts`: specifies whether or not we want previous `TxOut`s for fee
|
|
||||||
/// calculation
|
|
||||||
fn full_scan<K: Ord + Clone>(
|
|
||||||
&self,
|
|
||||||
request: FullScanRequest<K>,
|
|
||||||
stop_gap: usize,
|
|
||||||
batch_size: usize,
|
|
||||||
fetch_prev_txouts: bool,
|
|
||||||
) -> Result<ElectrumFullScanResult<K>, Error>;
|
|
||||||
|
|
||||||
/// Sync a set of scripts with the blockchain (via an Electrum client) for the data specified
|
|
||||||
/// and returns updates for [`bdk_chain`] data structures.
|
|
||||||
///
|
|
||||||
/// - `request`: struct with data required to perform a spk-based blockchain client sync,
|
|
||||||
/// see [`SyncRequest`]
|
|
||||||
/// - `batch_size`: specifies the max number of script pubkeys to request for in a single batch
|
|
||||||
/// request
|
|
||||||
/// - `fetch_prev_txouts`: specifies whether or not we want previous `TxOut`s for fee
|
|
||||||
/// calculation
|
|
||||||
///
|
|
||||||
/// If the scripts to sync are unknown, such as when restoring or importing a keychain that
|
|
||||||
/// may include scripts that have been used, use [`full_scan`] with the keychain.
|
|
||||||
///
|
|
||||||
/// [`full_scan`]: ElectrumExt::full_scan
|
|
||||||
fn sync(
|
|
||||||
&self,
|
|
||||||
request: SyncRequest,
|
|
||||||
batch_size: usize,
|
|
||||||
fetch_prev_txouts: bool,
|
|
||||||
) -> Result<ElectrumSyncResult, Error>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<E: ElectrumApi> ElectrumExt for E {
|
|
||||||
fn full_scan<K: Ord + Clone>(
|
|
||||||
&self,
|
|
||||||
mut request: FullScanRequest<K>,
|
|
||||||
stop_gap: usize,
|
|
||||||
batch_size: usize,
|
|
||||||
fetch_prev_txouts: bool,
|
|
||||||
) -> Result<ElectrumFullScanResult<K>, Error> {
|
|
||||||
let mut request_spks = request.spks_by_keychain;
|
|
||||||
|
|
||||||
// We keep track of already-scanned spks just in case a reorg happens and we need to do a
|
|
||||||
// rescan. We need to keep track of this as iterators in `keychain_spks` are "unbounded" so
|
|
||||||
// cannot be collected. In addition, we keep track of whether an spk has an active tx
|
|
||||||
// history for determining the `last_active_index`.
|
|
||||||
// * key: (keychain, spk_index) that identifies the spk.
|
|
||||||
// * val: (script_pubkey, has_tx_history).
|
|
||||||
let mut scanned_spks = BTreeMap::<(K, u32), (ScriptBuf, bool)>::new();
|
|
||||||
|
|
||||||
let update = loop {
|
|
||||||
let (tip, _) = construct_update_tip(self, request.chain_tip.clone())?;
|
|
||||||
let mut graph_update = TxGraph::<ConfirmationHeightAnchor>::default();
|
|
||||||
let cps = tip
|
|
||||||
.iter()
|
|
||||||
.take(10)
|
|
||||||
.map(|cp| (cp.height(), cp))
|
|
||||||
.collect::<BTreeMap<u32, CheckPoint>>();
|
|
||||||
|
|
||||||
if !request_spks.is_empty() {
|
|
||||||
if !scanned_spks.is_empty() {
|
|
||||||
scanned_spks.append(&mut populate_with_spks(
|
|
||||||
self,
|
|
||||||
&cps,
|
|
||||||
&mut request.tx_cache,
|
|
||||||
&mut graph_update,
|
|
||||||
&mut scanned_spks
|
|
||||||
.iter()
|
|
||||||
.map(|(i, (spk, _))| (i.clone(), spk.clone())),
|
|
||||||
stop_gap,
|
|
||||||
batch_size,
|
|
||||||
)?);
|
|
||||||
}
|
|
||||||
for (keychain, keychain_spks) in &mut request_spks {
|
|
||||||
scanned_spks.extend(
|
|
||||||
populate_with_spks(
|
|
||||||
self,
|
|
||||||
&cps,
|
|
||||||
&mut request.tx_cache,
|
|
||||||
&mut graph_update,
|
|
||||||
keychain_spks,
|
|
||||||
stop_gap,
|
|
||||||
batch_size,
|
|
||||||
)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// check for reorgs during scan process
|
|
||||||
let server_blockhash = self.block_header(tip.height() as usize)?.block_hash();
|
|
||||||
if tip.hash() != server_blockhash {
|
|
||||||
continue; // reorg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch previous `TxOut`s for fee calculation if flag is enabled.
|
|
||||||
if fetch_prev_txouts {
|
|
||||||
fetch_prev_txout(self, &mut request.tx_cache, &mut graph_update)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let chain_update = tip;
|
|
||||||
|
|
||||||
let keychain_update = request_spks
|
|
||||||
.into_keys()
|
|
||||||
.filter_map(|k| {
|
|
||||||
scanned_spks
|
|
||||||
.range((k.clone(), u32::MIN)..=(k.clone(), u32::MAX))
|
|
||||||
.rev()
|
|
||||||
.find(|(_, (_, active))| *active)
|
|
||||||
.map(|((_, i), _)| (k, *i))
|
|
||||||
})
|
|
||||||
.collect::<BTreeMap<_, _>>();
|
|
||||||
|
|
||||||
break FullScanResult {
|
|
||||||
graph_update,
|
|
||||||
chain_update,
|
|
||||||
last_active_indices: keychain_update,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(ElectrumFullScanResult(update))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sync(
|
|
||||||
&self,
|
|
||||||
request: SyncRequest,
|
|
||||||
batch_size: usize,
|
|
||||||
fetch_prev_txouts: bool,
|
|
||||||
) -> Result<ElectrumSyncResult, Error> {
|
|
||||||
let mut tx_cache = request.tx_cache.clone();
|
|
||||||
|
|
||||||
let full_scan_req = FullScanRequest::from_chain_tip(request.chain_tip.clone())
|
|
||||||
.cache_txs(request.tx_cache)
|
|
||||||
.set_spks_for_keychain((), request.spks.enumerate().map(|(i, spk)| (i as u32, spk)));
|
|
||||||
let mut full_scan_res = self
|
|
||||||
.full_scan(full_scan_req, usize::MAX, batch_size, false)?
|
|
||||||
.with_confirmation_height_anchor();
|
|
||||||
|
|
||||||
let (tip, _) = construct_update_tip(self, request.chain_tip)?;
|
|
||||||
let cps = tip
|
|
||||||
.iter()
|
|
||||||
.take(10)
|
|
||||||
.map(|cp| (cp.height(), cp))
|
|
||||||
.collect::<BTreeMap<u32, CheckPoint>>();
|
|
||||||
|
|
||||||
populate_with_txids(
|
|
||||||
self,
|
|
||||||
&cps,
|
|
||||||
&mut tx_cache,
|
|
||||||
&mut full_scan_res.graph_update,
|
|
||||||
request.txids,
|
|
||||||
)?;
|
|
||||||
populate_with_outpoints(
|
|
||||||
self,
|
|
||||||
&cps,
|
|
||||||
&mut tx_cache,
|
|
||||||
&mut full_scan_res.graph_update,
|
|
||||||
request.outpoints,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Fetch previous `TxOut`s for fee calculation if flag is enabled.
|
|
||||||
if fetch_prev_txouts {
|
|
||||||
fetch_prev_txout(self, &mut tx_cache, &mut full_scan_res.graph_update)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(ElectrumSyncResult(SyncResult {
|
|
||||||
chain_update: full_scan_res.chain_update,
|
|
||||||
graph_update: full_scan_res.graph_update,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The result of [`ElectrumExt::full_scan`].
|
|
||||||
///
|
|
||||||
/// This can be transformed into a [`FullScanResult`] with either [`ConfirmationHeightAnchor`] or
|
|
||||||
/// [`ConfirmationTimeHeightAnchor`] anchor types.
|
|
||||||
pub struct ElectrumFullScanResult<K>(FullScanResult<K, ConfirmationHeightAnchor>);
|
|
||||||
|
|
||||||
impl<K> ElectrumFullScanResult<K> {
|
|
||||||
/// Return [`FullScanResult`] with [`ConfirmationHeightAnchor`].
|
|
||||||
pub fn with_confirmation_height_anchor(self) -> FullScanResult<K, ConfirmationHeightAnchor> {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return [`FullScanResult`] with [`ConfirmationTimeHeightAnchor`].
|
|
||||||
///
|
|
||||||
/// This requires additional calls to the Electrum server.
|
|
||||||
pub fn with_confirmation_time_height_anchor(
|
|
||||||
self,
|
|
||||||
client: &impl ElectrumApi,
|
|
||||||
) -> Result<FullScanResult<K, ConfirmationTimeHeightAnchor>, Error> {
|
|
||||||
let res = self.0;
|
|
||||||
Ok(FullScanResult {
|
|
||||||
graph_update: try_into_confirmation_time_result(res.graph_update, client)?,
|
|
||||||
chain_update: res.chain_update,
|
|
||||||
last_active_indices: res.last_active_indices,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The result of [`ElectrumExt::sync`].
|
|
||||||
///
|
|
||||||
/// This can be transformed into a [`SyncResult`] with either [`ConfirmationHeightAnchor`] or
|
|
||||||
/// [`ConfirmationTimeHeightAnchor`] anchor types.
|
|
||||||
pub struct ElectrumSyncResult(SyncResult<ConfirmationHeightAnchor>);
|
|
||||||
|
|
||||||
impl ElectrumSyncResult {
|
|
||||||
/// Return [`SyncResult`] with [`ConfirmationHeightAnchor`].
|
|
||||||
pub fn with_confirmation_height_anchor(self) -> SyncResult<ConfirmationHeightAnchor> {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return [`SyncResult`] with [`ConfirmationTimeHeightAnchor`].
|
|
||||||
///
|
|
||||||
/// This requires additional calls to the Electrum server.
|
|
||||||
pub fn with_confirmation_time_height_anchor(
|
|
||||||
self,
|
|
||||||
client: &impl ElectrumApi,
|
|
||||||
) -> Result<SyncResult<ConfirmationTimeHeightAnchor>, Error> {
|
|
||||||
let res = self.0;
|
|
||||||
Ok(SyncResult {
|
|
||||||
graph_update: try_into_confirmation_time_result(res.graph_update, client)?,
|
|
||||||
chain_update: res.chain_update,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn try_into_confirmation_time_result(
|
|
||||||
graph_update: TxGraph<ConfirmationHeightAnchor>,
|
|
||||||
client: &impl ElectrumApi,
|
|
||||||
) -> Result<TxGraph<ConfirmationTimeHeightAnchor>, Error> {
|
|
||||||
let relevant_heights = graph_update
|
|
||||||
.all_anchors()
|
|
||||||
.iter()
|
|
||||||
.map(|(a, _)| a.confirmation_height)
|
|
||||||
.collect::<HashSet<_>>();
|
|
||||||
|
|
||||||
let height_to_time = relevant_heights
|
|
||||||
.clone()
|
|
||||||
.into_iter()
|
|
||||||
.zip(
|
|
||||||
client
|
|
||||||
.batch_block_header(relevant_heights)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|bh| bh.time as u64),
|
|
||||||
)
|
|
||||||
.collect::<HashMap<u32, u64>>();
|
|
||||||
|
|
||||||
Ok(graph_update.map_anchors(|a| ConfirmationTimeHeightAnchor {
|
|
||||||
anchor_block: a.anchor_block,
|
|
||||||
confirmation_height: a.confirmation_height,
|
|
||||||
confirmation_time: height_to_time[&a.confirmation_height],
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a [`CheckPoint`] of the latest tip, that connects with `prev_tip`.
|
|
||||||
fn construct_update_tip(
|
|
||||||
client: &impl ElectrumApi,
|
|
||||||
prev_tip: CheckPoint,
|
|
||||||
) -> Result<(CheckPoint, Option<u32>), Error> {
|
|
||||||
let HeaderNotification { height, .. } = client.block_headers_subscribe()?;
|
|
||||||
let new_tip_height = height as u32;
|
|
||||||
|
|
||||||
// If electrum returns a tip height that is lower than our previous tip, then checkpoints do
|
|
||||||
// not need updating. We just return the previous tip and use that as the point of agreement.
|
|
||||||
if new_tip_height < prev_tip.height() {
|
|
||||||
return Ok((prev_tip.clone(), Some(prev_tip.height())));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Atomically fetch the latest `CHAIN_SUFFIX_LENGTH` count of blocks from Electrum. We use this
|
|
||||||
// to construct our checkpoint update.
|
|
||||||
let mut new_blocks = {
|
|
||||||
let start_height = new_tip_height.saturating_sub(CHAIN_SUFFIX_LENGTH - 1);
|
|
||||||
let hashes = client
|
|
||||||
.block_headers(start_height as _, CHAIN_SUFFIX_LENGTH as _)?
|
|
||||||
.headers
|
|
||||||
.into_iter()
|
|
||||||
.map(|h| h.block_hash());
|
|
||||||
(start_height..).zip(hashes).collect::<BTreeMap<u32, _>>()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Find the "point of agreement" (if any).
|
|
||||||
let agreement_cp = {
|
|
||||||
let mut agreement_cp = Option::<CheckPoint>::None;
|
|
||||||
for cp in prev_tip.iter() {
|
|
||||||
let cp_block = cp.block_id();
|
|
||||||
let hash = match new_blocks.get(&cp_block.height) {
|
|
||||||
Some(&hash) => hash,
|
|
||||||
None => {
|
|
||||||
assert!(
|
|
||||||
new_tip_height >= cp_block.height,
|
|
||||||
"already checked that electrum's tip cannot be smaller"
|
|
||||||
);
|
|
||||||
let hash = client.block_header(cp_block.height as _)?.block_hash();
|
|
||||||
new_blocks.insert(cp_block.height, hash);
|
|
||||||
hash
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if hash == cp_block.hash {
|
|
||||||
agreement_cp = Some(cp);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
agreement_cp
|
|
||||||
};
|
|
||||||
|
|
||||||
let agreement_height = agreement_cp.as_ref().map(CheckPoint::height);
|
|
||||||
|
|
||||||
let new_tip = new_blocks
|
|
||||||
.into_iter()
|
|
||||||
// Prune `new_blocks` to only include blocks that are actually new.
|
|
||||||
.filter(|(height, _)| Some(*height) > agreement_height)
|
|
||||||
.map(|(height, hash)| BlockId { height, hash })
|
|
||||||
.fold(agreement_cp, |prev_cp, block| {
|
|
||||||
Some(match prev_cp {
|
|
||||||
Some(cp) => cp.push(block).expect("must extend checkpoint"),
|
|
||||||
None => CheckPoint::new(block),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.expect("must have at least one checkpoint");
|
|
||||||
|
|
||||||
Ok((new_tip, agreement_height))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A [tx status] comprises of a concatenation of `tx_hash:height:`s. We transform a single one of
|
|
||||||
/// these concatenations into a [`ConfirmationHeightAnchor`] if possible.
|
|
||||||
///
|
|
||||||
/// We use the lowest possible checkpoint as the anchor block (from `cps`). If an anchor block
|
|
||||||
/// cannot be found, or the transaction is unconfirmed, [`None`] is returned.
|
|
||||||
///
|
|
||||||
/// [tx status](https://electrumx-spesmilo.readthedocs.io/en/latest/protocol-basics.html#status)
|
|
||||||
fn determine_tx_anchor(
|
|
||||||
cps: &BTreeMap<u32, CheckPoint>,
|
|
||||||
raw_height: i32,
|
|
||||||
txid: Txid,
|
|
||||||
) -> Option<ConfirmationHeightAnchor> {
|
|
||||||
// The electrum API has a weird quirk where an unconfirmed transaction is presented with a
|
|
||||||
// height of 0. To avoid invalid representation in our data structures, we manually set
|
|
||||||
// transactions residing in the genesis block to have height 0, then interpret a height of 0 as
|
|
||||||
// unconfirmed for all other transactions.
|
|
||||||
if txid
|
|
||||||
== Txid::from_str("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b")
|
|
||||||
.expect("must deserialize genesis coinbase txid")
|
|
||||||
{
|
|
||||||
let anchor_block = cps.values().next()?.block_id();
|
|
||||||
return Some(ConfirmationHeightAnchor {
|
|
||||||
anchor_block,
|
|
||||||
confirmation_height: 0,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
match raw_height {
|
|
||||||
h if h <= 0 => {
|
|
||||||
debug_assert!(h == 0 || h == -1, "unexpected height ({}) from electrum", h);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
h => {
|
|
||||||
let h = h as u32;
|
|
||||||
let anchor_block = cps.range(h..).next().map(|(_, cp)| cp.block_id())?;
|
|
||||||
if h > anchor_block.height {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(ConfirmationHeightAnchor {
|
|
||||||
anchor_block,
|
|
||||||
confirmation_height: h,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Populate the `graph_update` with associated transactions/anchors of `outpoints`.
|
|
||||||
///
|
|
||||||
/// Transactions in which the outpoint resides, and transactions that spend from the outpoint are
|
|
||||||
/// included. Anchors of the aforementioned transactions are included.
|
|
||||||
///
|
|
||||||
/// Checkpoints (in `cps`) are used to create anchors. The `tx_cache` is self-explanatory.
|
|
||||||
fn populate_with_outpoints(
|
|
||||||
client: &impl ElectrumApi,
|
|
||||||
cps: &BTreeMap<u32, CheckPoint>,
|
|
||||||
tx_cache: &mut TxCache,
|
|
||||||
graph_update: &mut TxGraph<ConfirmationHeightAnchor>,
|
|
||||||
outpoints: impl IntoIterator<Item = OutPoint>,
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
for outpoint in outpoints {
|
|
||||||
let op_txid = outpoint.txid;
|
|
||||||
let op_tx = fetch_tx(client, tx_cache, op_txid)?;
|
|
||||||
let op_txout = match op_tx.output.get(outpoint.vout as usize) {
|
|
||||||
Some(txout) => txout,
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
debug_assert_eq!(op_tx.txid(), op_txid);
|
|
||||||
|
|
||||||
// attempt to find the following transactions (alongside their chain positions), and
|
|
||||||
// add to our sparsechain `update`:
|
|
||||||
let mut has_residing = false; // tx in which the outpoint resides
|
|
||||||
let mut has_spending = false; // tx that spends the outpoint
|
|
||||||
for res in client.script_get_history(&op_txout.script_pubkey)? {
|
|
||||||
if has_residing && has_spending {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !has_residing && res.tx_hash == op_txid {
|
|
||||||
has_residing = true;
|
|
||||||
let _ = graph_update.insert_tx(Arc::clone(&op_tx));
|
|
||||||
if let Some(anchor) = determine_tx_anchor(cps, res.height, res.tx_hash) {
|
|
||||||
let _ = graph_update.insert_anchor(res.tx_hash, anchor);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !has_spending && res.tx_hash != op_txid {
|
|
||||||
let res_tx = fetch_tx(client, tx_cache, res.tx_hash)?;
|
|
||||||
// we exclude txs/anchors that do not spend our specified outpoint(s)
|
|
||||||
has_spending = res_tx
|
|
||||||
.input
|
|
||||||
.iter()
|
|
||||||
.any(|txin| txin.previous_output == outpoint);
|
|
||||||
if !has_spending {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let _ = graph_update.insert_tx(Arc::clone(&res_tx));
|
|
||||||
if let Some(anchor) = determine_tx_anchor(cps, res.height, res.tx_hash) {
|
|
||||||
let _ = graph_update.insert_anchor(res.tx_hash, anchor);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Populate the `graph_update` with transactions/anchors of the provided `txids`.
|
|
||||||
fn populate_with_txids(
|
|
||||||
client: &impl ElectrumApi,
|
|
||||||
cps: &BTreeMap<u32, CheckPoint>,
|
|
||||||
tx_cache: &mut TxCache,
|
|
||||||
graph_update: &mut TxGraph<ConfirmationHeightAnchor>,
|
|
||||||
txids: impl IntoIterator<Item = Txid>,
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
for txid in txids {
|
|
||||||
let tx = match fetch_tx(client, tx_cache, txid) {
|
|
||||||
Ok(tx) => tx,
|
|
||||||
Err(electrum_client::Error::Protocol(_)) => continue,
|
|
||||||
Err(other_err) => return Err(other_err),
|
|
||||||
};
|
|
||||||
|
|
||||||
let spk = tx
|
|
||||||
.output
|
|
||||||
.first()
|
|
||||||
.map(|txo| &txo.script_pubkey)
|
|
||||||
.expect("tx must have an output");
|
|
||||||
|
|
||||||
// because of restrictions of the Electrum API, we have to use the `script_get_history`
|
|
||||||
// call to get confirmation status of our transaction
|
|
||||||
let anchor = match client
|
|
||||||
.script_get_history(spk)?
|
|
||||||
.into_iter()
|
|
||||||
.find(|r| r.tx_hash == txid)
|
|
||||||
{
|
|
||||||
Some(r) => determine_tx_anchor(cps, r.height, txid),
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
let _ = graph_update.insert_tx(tx);
|
|
||||||
if let Some(anchor) = anchor {
|
|
||||||
let _ = graph_update.insert_anchor(txid, anchor);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch transaction of given `txid`.
|
|
||||||
///
|
|
||||||
/// We maintain a `tx_cache` so that we won't need to fetch from Electrum with every call.
|
|
||||||
fn fetch_tx<C: ElectrumApi>(
|
|
||||||
client: &C,
|
|
||||||
tx_cache: &mut TxCache,
|
|
||||||
txid: Txid,
|
|
||||||
) -> Result<Arc<Transaction>, Error> {
|
|
||||||
use bdk_chain::collections::hash_map::Entry;
|
|
||||||
Ok(match tx_cache.entry(txid) {
|
|
||||||
Entry::Occupied(entry) => entry.get().clone(),
|
|
||||||
Entry::Vacant(entry) => entry
|
|
||||||
.insert(Arc::new(client.transaction_get(&txid)?))
|
|
||||||
.clone(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function which fetches the `TxOut`s of our relevant transactions' previous transactions,
|
|
||||||
// which we do not have by default. This data is needed to calculate the transaction fee.
|
|
||||||
fn fetch_prev_txout<C: ElectrumApi>(
|
|
||||||
client: &C,
|
|
||||||
tx_cache: &mut TxCache,
|
|
||||||
graph_update: &mut TxGraph<ConfirmationHeightAnchor>,
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
let full_txs: Vec<Arc<Transaction>> =
|
|
||||||
graph_update.full_txs().map(|tx_node| tx_node.tx).collect();
|
|
||||||
for tx in full_txs {
|
|
||||||
for vin in &tx.input {
|
|
||||||
let outpoint = vin.previous_output;
|
|
||||||
let prev_tx = fetch_tx(client, tx_cache, outpoint.txid)?;
|
|
||||||
for txout in prev_tx.output.clone() {
|
|
||||||
let _ = graph_update.insert_txout(outpoint, txout);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Populate the `graph_update` with transactions/anchors associated with the given `spks`.
|
|
||||||
///
|
|
||||||
/// Transactions that contains an output with requested spk, or spends form an output with
|
|
||||||
/// requested spk will be added to `graph_update`. Anchors of the aforementioned transactions are
|
|
||||||
/// also included.
|
|
||||||
///
|
|
||||||
/// Checkpoints (in `cps`) are used to create anchors. The `tx_cache` is self-explanatory.
|
|
||||||
fn populate_with_spks<I: Ord + Clone>(
|
|
||||||
client: &impl ElectrumApi,
|
|
||||||
cps: &BTreeMap<u32, CheckPoint>,
|
|
||||||
tx_cache: &mut TxCache,
|
|
||||||
graph_update: &mut TxGraph<ConfirmationHeightAnchor>,
|
|
||||||
spks: &mut impl Iterator<Item = (I, ScriptBuf)>,
|
|
||||||
stop_gap: usize,
|
|
||||||
batch_size: usize,
|
|
||||||
) -> Result<BTreeMap<I, (ScriptBuf, bool)>, Error> {
|
|
||||||
let mut unused_spk_count = 0_usize;
|
|
||||||
let mut scanned_spks = BTreeMap::new();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let spks = (0..batch_size)
|
|
||||||
.map_while(|_| spks.next())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if spks.is_empty() {
|
|
||||||
return Ok(scanned_spks);
|
|
||||||
}
|
|
||||||
|
|
||||||
let spk_histories =
|
|
||||||
client.batch_script_get_history(spks.iter().map(|(_, s)| s.as_script()))?;
|
|
||||||
|
|
||||||
for ((spk_index, spk), spk_history) in spks.into_iter().zip(spk_histories) {
|
|
||||||
if spk_history.is_empty() {
|
|
||||||
scanned_spks.insert(spk_index, (spk, false));
|
|
||||||
unused_spk_count += 1;
|
|
||||||
if unused_spk_count > stop_gap {
|
|
||||||
return Ok(scanned_spks);
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
scanned_spks.insert(spk_index, (spk, true));
|
|
||||||
unused_spk_count = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
for tx_res in spk_history {
|
|
||||||
let _ = graph_update.insert_tx(fetch_tx(client, tx_cache, tx_res.tx_hash)?);
|
|
||||||
if let Some(anchor) = determine_tx_anchor(cps, tx_res.height, tx_res.tx_hash) {
|
|
||||||
let _ = graph_update.insert_anchor(tx_res.tx_hash, anchor);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
//! This crate is used for updating structures of [`bdk_chain`] with data from an Electrum server.
|
//! This crate is used for updating structures of [`bdk_chain`] with data from an Electrum server.
|
||||||
//!
|
//!
|
||||||
//! The two primary methods are [`ElectrumExt::sync`] and [`ElectrumExt::full_scan`]. In most cases
|
//! The two primary methods are [`BdkElectrumClient::sync`] and [`BdkElectrumClient::full_scan`]. In most cases
|
||||||
//! [`ElectrumExt::sync`] is used to sync the transaction histories of scripts that the application
|
//! [`BdkElectrumClient::sync`] is used to sync the transaction histories of scripts that the application
|
||||||
//! cares about, for example the scripts for all the receive addresses of a Wallet's keychain that it
|
//! cares about, for example the scripts for all the receive addresses of a Wallet's keychain that it
|
||||||
//! has shown a user. [`ElectrumExt::full_scan`] is meant to be used when importing or restoring a
|
//! has shown a user. [`BdkElectrumClient::full_scan`] is meant to be used when importing or restoring a
|
||||||
//! keychain where the range of possibly used scripts is not known. In this case it is necessary to
|
//! keychain where the range of possibly used scripts is not known. In this case it is necessary to
|
||||||
//! scan all keychain scripts until a number (the "stop gap") of unused scripts is discovered. For a
|
//! scan all keychain scripts until a number (the "stop gap") of unused scripts is discovered. For a
|
||||||
//! sync or full scan the user receives relevant blockchain data and output updates for
|
//! sync or full scan the user receives relevant blockchain data and output updates for
|
||||||
@@ -15,7 +15,8 @@
|
|||||||
|
|
||||||
#![warn(missing_docs)]
|
#![warn(missing_docs)]
|
||||||
|
|
||||||
mod electrum_ext;
|
mod bdk_electrum_client;
|
||||||
|
pub use bdk_electrum_client::*;
|
||||||
|
|
||||||
pub use bdk_chain;
|
pub use bdk_chain;
|
||||||
pub use electrum_client;
|
pub use electrum_client;
|
||||||
pub use electrum_ext::*;
|
|
||||||
|
|||||||
@@ -1,16 +1,18 @@
|
|||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
bitcoin::{hashes::Hash, Address, Amount, ScriptBuf, WScriptHash},
|
bitcoin::{hashes::Hash, Address, Amount, ScriptBuf, Txid, WScriptHash},
|
||||||
keychain::Balance,
|
|
||||||
local_chain::LocalChain,
|
local_chain::LocalChain,
|
||||||
spk_client::SyncRequest,
|
spk_client::{FullScanRequest, SyncRequest},
|
||||||
ConfirmationTimeHeightAnchor, IndexedTxGraph, SpkTxOutIndex,
|
spk_txout::SpkTxOutIndex,
|
||||||
|
Balance, ConfirmationBlockTime, IndexedTxGraph,
|
||||||
};
|
};
|
||||||
use bdk_electrum::ElectrumExt;
|
use bdk_electrum::BdkElectrumClient;
|
||||||
use bdk_testenv::{anyhow, bitcoincore_rpc::RpcApi, TestEnv};
|
use bdk_testenv::{anyhow, bitcoincore_rpc::RpcApi, TestEnv};
|
||||||
|
use std::collections::{BTreeSet, HashSet};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
fn get_balance(
|
fn get_balance(
|
||||||
recv_chain: &LocalChain,
|
recv_chain: &LocalChain,
|
||||||
recv_graph: &IndexedTxGraph<ConfirmationTimeHeightAnchor, SpkTxOutIndex<()>>,
|
recv_graph: &IndexedTxGraph<ConfirmationBlockTime, SpkTxOutIndex<()>>,
|
||||||
) -> anyhow::Result<Balance> {
|
) -> anyhow::Result<Balance> {
|
||||||
let chain_tip = recv_chain.tip().block_id();
|
let chain_tip = recv_chain.tip().block_id();
|
||||||
let outpoints = recv_graph.index.outpoints().clone();
|
let outpoints = recv_graph.index.outpoints().clone();
|
||||||
@@ -20,6 +22,222 @@ fn get_balance(
|
|||||||
Ok(balance)
|
Ok(balance)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_update_tx_graph_without_keychain() -> anyhow::Result<()> {
|
||||||
|
let env = TestEnv::new()?;
|
||||||
|
let electrum_client = electrum_client::Client::new(env.electrsd.electrum_url.as_str())?;
|
||||||
|
let client = BdkElectrumClient::new(electrum_client);
|
||||||
|
|
||||||
|
let receive_address0 =
|
||||||
|
Address::from_str("bcrt1qc6fweuf4xjvz4x3gx3t9e0fh4hvqyu2qw4wvxm")?.assume_checked();
|
||||||
|
let receive_address1 =
|
||||||
|
Address::from_str("bcrt1qfjg5lv3dvc9az8patec8fjddrs4aqtauadnagr")?.assume_checked();
|
||||||
|
|
||||||
|
let misc_spks = [
|
||||||
|
receive_address0.script_pubkey(),
|
||||||
|
receive_address1.script_pubkey(),
|
||||||
|
];
|
||||||
|
|
||||||
|
let _block_hashes = env.mine_blocks(101, None)?;
|
||||||
|
let txid1 = env.bitcoind.client.send_to_address(
|
||||||
|
&receive_address1,
|
||||||
|
Amount::from_sat(10000),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some(1),
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
let txid2 = env.bitcoind.client.send_to_address(
|
||||||
|
&receive_address0,
|
||||||
|
Amount::from_sat(20000),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some(1),
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
env.mine_blocks(1, None)?;
|
||||||
|
env.wait_until_electrum_sees_block()?;
|
||||||
|
|
||||||
|
// use a full checkpoint linked list (since this is not what we are testing)
|
||||||
|
let cp_tip = env.make_checkpoint_tip();
|
||||||
|
|
||||||
|
let sync_update = {
|
||||||
|
let request = SyncRequest::from_chain_tip(cp_tip.clone()).set_spks(misc_spks);
|
||||||
|
client.sync(request, 1, true)?
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
{
|
||||||
|
let update_cps = sync_update
|
||||||
|
.chain_update
|
||||||
|
.iter()
|
||||||
|
.map(|cp| cp.block_id())
|
||||||
|
.collect::<BTreeSet<_>>();
|
||||||
|
let superset_cps = cp_tip
|
||||||
|
.iter()
|
||||||
|
.map(|cp| cp.block_id())
|
||||||
|
.collect::<BTreeSet<_>>();
|
||||||
|
superset_cps.is_superset(&update_cps)
|
||||||
|
},
|
||||||
|
"update should not alter original checkpoint tip since we already started with all checkpoints",
|
||||||
|
);
|
||||||
|
|
||||||
|
let graph_update = sync_update.graph_update;
|
||||||
|
// Check to see if we have the floating txouts available from our two created transactions'
|
||||||
|
// previous outputs in order to calculate transaction fees.
|
||||||
|
for tx in graph_update.full_txs() {
|
||||||
|
// Retrieve the calculated fee from `TxGraph`, which will panic if we do not have the
|
||||||
|
// floating txouts available from the transactions' previous outputs.
|
||||||
|
let fee = graph_update.calculate_fee(&tx.tx).expect("Fee must exist");
|
||||||
|
|
||||||
|
// Retrieve the fee in the transaction data from `bitcoind`.
|
||||||
|
let tx_fee = env
|
||||||
|
.bitcoind
|
||||||
|
.client
|
||||||
|
.get_transaction(&tx.txid, None)
|
||||||
|
.expect("Tx must exist")
|
||||||
|
.fee
|
||||||
|
.expect("Fee must exist")
|
||||||
|
.abs()
|
||||||
|
.to_unsigned()
|
||||||
|
.expect("valid `Amount`");
|
||||||
|
|
||||||
|
// Check that the calculated fee matches the fee from the transaction data.
|
||||||
|
assert_eq!(fee, tx_fee);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut graph_update_txids: Vec<Txid> = graph_update.full_txs().map(|tx| tx.txid).collect();
|
||||||
|
graph_update_txids.sort();
|
||||||
|
let mut expected_txids = vec![txid1, txid2];
|
||||||
|
expected_txids.sort();
|
||||||
|
assert_eq!(graph_update_txids, expected_txids);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test the bounds of the address scan depending on the `stop_gap`.
|
||||||
|
#[test]
|
||||||
|
pub fn test_update_tx_graph_stop_gap() -> anyhow::Result<()> {
|
||||||
|
let env = TestEnv::new()?;
|
||||||
|
let electrum_client = electrum_client::Client::new(env.electrsd.electrum_url.as_str())?;
|
||||||
|
let client = BdkElectrumClient::new(electrum_client);
|
||||||
|
let _block_hashes = env.mine_blocks(101, None)?;
|
||||||
|
|
||||||
|
// Now let's test the gap limit. First of all get a chain of 10 addresses.
|
||||||
|
let addresses = [
|
||||||
|
"bcrt1qj9f7r8r3p2y0sqf4r3r62qysmkuh0fzep473d2ar7rcz64wqvhssjgf0z4",
|
||||||
|
"bcrt1qmm5t0ch7vh2hryx9ctq3mswexcugqe4atkpkl2tetm8merqkthas3w7q30",
|
||||||
|
"bcrt1qut9p7ej7l7lhyvekj28xknn8gnugtym4d5qvnp5shrsr4nksmfqsmyn87g",
|
||||||
|
"bcrt1qqz0xtn3m235p2k96f5wa2dqukg6shxn9n3txe8arlrhjh5p744hsd957ww",
|
||||||
|
"bcrt1q9c0t62a8l6wfytmf2t9lfj35avadk3mm8g4p3l84tp6rl66m48sqrme7wu",
|
||||||
|
"bcrt1qkmh8yrk2v47cklt8dytk8f3ammcwa4q7dzattedzfhqzvfwwgyzsg59zrh",
|
||||||
|
"bcrt1qvgrsrzy07gjkkfr5luplt0azxtfwmwq5t62gum5jr7zwcvep2acs8hhnp2",
|
||||||
|
"bcrt1qw57edarcg50ansq8mk3guyrk78rk0fwvrds5xvqeupteu848zayq549av8",
|
||||||
|
"bcrt1qvtve5ekf6e5kzs68knvnt2phfw6a0yjqrlgat392m6zt9jsvyxhqfx67ef",
|
||||||
|
"bcrt1qw03ddumfs9z0kcu76ln7jrjfdwam20qtffmkcral3qtza90sp9kqm787uk",
|
||||||
|
];
|
||||||
|
let addresses: Vec<_> = addresses
|
||||||
|
.into_iter()
|
||||||
|
.map(|s| Address::from_str(s).unwrap().assume_checked())
|
||||||
|
.collect();
|
||||||
|
let spks: Vec<_> = addresses
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, addr)| (i as u32, addr.script_pubkey()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Then receive coins on the 4th address.
|
||||||
|
let txid_4th_addr = env.bitcoind.client.send_to_address(
|
||||||
|
&addresses[3],
|
||||||
|
Amount::from_sat(10000),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some(1),
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
env.mine_blocks(1, None)?;
|
||||||
|
env.wait_until_electrum_sees_block()?;
|
||||||
|
|
||||||
|
// use a full checkpoint linked list (since this is not what we are testing)
|
||||||
|
let cp_tip = env.make_checkpoint_tip();
|
||||||
|
|
||||||
|
// A scan with a stop_gap of 3 won't find the transaction, but a scan with a gap limit of 4
|
||||||
|
// will.
|
||||||
|
let full_scan_update = {
|
||||||
|
let request =
|
||||||
|
FullScanRequest::from_chain_tip(cp_tip.clone()).set_spks_for_keychain(0, spks.clone());
|
||||||
|
client.full_scan(request, 3, 1, false)?
|
||||||
|
};
|
||||||
|
assert!(full_scan_update.graph_update.full_txs().next().is_none());
|
||||||
|
assert!(full_scan_update.last_active_indices.is_empty());
|
||||||
|
let full_scan_update = {
|
||||||
|
let request =
|
||||||
|
FullScanRequest::from_chain_tip(cp_tip.clone()).set_spks_for_keychain(0, spks.clone());
|
||||||
|
client.full_scan(request, 4, 1, false)?
|
||||||
|
};
|
||||||
|
assert_eq!(
|
||||||
|
full_scan_update
|
||||||
|
.graph_update
|
||||||
|
.full_txs()
|
||||||
|
.next()
|
||||||
|
.unwrap()
|
||||||
|
.txid,
|
||||||
|
txid_4th_addr
|
||||||
|
);
|
||||||
|
assert_eq!(full_scan_update.last_active_indices[&0], 3);
|
||||||
|
|
||||||
|
// Now receive a coin on the last address.
|
||||||
|
let txid_last_addr = env.bitcoind.client.send_to_address(
|
||||||
|
&addresses[addresses.len() - 1],
|
||||||
|
Amount::from_sat(10000),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some(1),
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
env.mine_blocks(1, None)?;
|
||||||
|
env.wait_until_electrum_sees_block()?;
|
||||||
|
|
||||||
|
// A scan with gap limit 5 won't find the second transaction, but a scan with gap limit 6 will.
|
||||||
|
// The last active indice won't be updated in the first case but will in the second one.
|
||||||
|
let full_scan_update = {
|
||||||
|
let request =
|
||||||
|
FullScanRequest::from_chain_tip(cp_tip.clone()).set_spks_for_keychain(0, spks.clone());
|
||||||
|
client.full_scan(request, 5, 1, false)?
|
||||||
|
};
|
||||||
|
let txs: HashSet<_> = full_scan_update
|
||||||
|
.graph_update
|
||||||
|
.full_txs()
|
||||||
|
.map(|tx| tx.txid)
|
||||||
|
.collect();
|
||||||
|
assert_eq!(txs.len(), 1);
|
||||||
|
assert!(txs.contains(&txid_4th_addr));
|
||||||
|
assert_eq!(full_scan_update.last_active_indices[&0], 3);
|
||||||
|
let full_scan_update = {
|
||||||
|
let request =
|
||||||
|
FullScanRequest::from_chain_tip(cp_tip.clone()).set_spks_for_keychain(0, spks.clone());
|
||||||
|
client.full_scan(request, 6, 1, false)?
|
||||||
|
};
|
||||||
|
let txs: HashSet<_> = full_scan_update
|
||||||
|
.graph_update
|
||||||
|
.full_txs()
|
||||||
|
.map(|tx| tx.txid)
|
||||||
|
.collect();
|
||||||
|
assert_eq!(txs.len(), 2);
|
||||||
|
assert!(txs.contains(&txid_4th_addr) && txs.contains(&txid_last_addr));
|
||||||
|
assert_eq!(full_scan_update.last_active_indices[&0], 9);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Ensure that [`ElectrumExt`] can sync properly.
|
/// Ensure that [`ElectrumExt`] can sync properly.
|
||||||
///
|
///
|
||||||
/// 1. Mine 101 blocks.
|
/// 1. Mine 101 blocks.
|
||||||
@@ -31,7 +249,8 @@ fn scan_detects_confirmed_tx() -> anyhow::Result<()> {
|
|||||||
const SEND_AMOUNT: Amount = Amount::from_sat(10_000);
|
const SEND_AMOUNT: Amount = Amount::from_sat(10_000);
|
||||||
|
|
||||||
let env = TestEnv::new()?;
|
let env = TestEnv::new()?;
|
||||||
let client = electrum_client::Client::new(env.electrsd.electrum_url.as_str())?;
|
let electrum_client = electrum_client::Client::new(env.electrsd.electrum_url.as_str())?;
|
||||||
|
let client = BdkElectrumClient::new(electrum_client);
|
||||||
|
|
||||||
// Setup addresses.
|
// Setup addresses.
|
||||||
let addr_to_mine = env
|
let addr_to_mine = env
|
||||||
@@ -44,7 +263,7 @@ fn scan_detects_confirmed_tx() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
// Setup receiver.
|
// Setup receiver.
|
||||||
let (mut recv_chain, _) = LocalChain::from_genesis_hash(env.bitcoind.client.get_block_hash(0)?);
|
let (mut recv_chain, _) = LocalChain::from_genesis_hash(env.bitcoind.client.get_block_hash(0)?);
|
||||||
let mut recv_graph = IndexedTxGraph::<ConfirmationTimeHeightAnchor, _>::new({
|
let mut recv_graph = IndexedTxGraph::<ConfirmationBlockTime, _>::new({
|
||||||
let mut recv_index = SpkTxOutIndex::default();
|
let mut recv_index = SpkTxOutIndex::default();
|
||||||
recv_index.insert_spk((), spk_to_track.clone());
|
recv_index.insert_spk((), spk_to_track.clone());
|
||||||
recv_index
|
recv_index
|
||||||
@@ -61,14 +280,11 @@ fn scan_detects_confirmed_tx() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
// Sync up to tip.
|
// Sync up to tip.
|
||||||
env.wait_until_electrum_sees_block()?;
|
env.wait_until_electrum_sees_block()?;
|
||||||
let update = client
|
let update = client.sync(
|
||||||
.sync(
|
SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks(core::iter::once(spk_to_track)),
|
||||||
SyncRequest::from_chain_tip(recv_chain.tip())
|
5,
|
||||||
.chain_spks(core::iter::once(spk_to_track)),
|
true,
|
||||||
5,
|
)?;
|
||||||
true,
|
|
||||||
)?
|
|
||||||
.with_confirmation_time_height_anchor(&client)?;
|
|
||||||
|
|
||||||
let _ = recv_chain
|
let _ = recv_chain
|
||||||
.apply_update(update.chain_update)
|
.apply_update(update.chain_update)
|
||||||
@@ -101,7 +317,8 @@ fn scan_detects_confirmed_tx() -> anyhow::Result<()> {
|
|||||||
.fee
|
.fee
|
||||||
.expect("Fee must exist")
|
.expect("Fee must exist")
|
||||||
.abs()
|
.abs()
|
||||||
.to_sat() as u64;
|
.to_unsigned()
|
||||||
|
.expect("valid `Amount`");
|
||||||
|
|
||||||
// Check that the calculated fee matches the fee from the transaction data.
|
// Check that the calculated fee matches the fee from the transaction data.
|
||||||
assert_eq!(fee, tx_fee);
|
assert_eq!(fee, tx_fee);
|
||||||
@@ -122,7 +339,8 @@ fn tx_can_become_unconfirmed_after_reorg() -> anyhow::Result<()> {
|
|||||||
const SEND_AMOUNT: Amount = Amount::from_sat(10_000);
|
const SEND_AMOUNT: Amount = Amount::from_sat(10_000);
|
||||||
|
|
||||||
let env = TestEnv::new()?;
|
let env = TestEnv::new()?;
|
||||||
let client = electrum_client::Client::new(env.electrsd.electrum_url.as_str())?;
|
let electrum_client = electrum_client::Client::new(env.electrsd.electrum_url.as_str())?;
|
||||||
|
let client = BdkElectrumClient::new(electrum_client);
|
||||||
|
|
||||||
// Setup addresses.
|
// Setup addresses.
|
||||||
let addr_to_mine = env
|
let addr_to_mine = env
|
||||||
@@ -135,7 +353,7 @@ fn tx_can_become_unconfirmed_after_reorg() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
// Setup receiver.
|
// Setup receiver.
|
||||||
let (mut recv_chain, _) = LocalChain::from_genesis_hash(env.bitcoind.client.get_block_hash(0)?);
|
let (mut recv_chain, _) = LocalChain::from_genesis_hash(env.bitcoind.client.get_block_hash(0)?);
|
||||||
let mut recv_graph = IndexedTxGraph::<ConfirmationTimeHeightAnchor, _>::new({
|
let mut recv_graph = IndexedTxGraph::<ConfirmationBlockTime, _>::new({
|
||||||
let mut recv_index = SpkTxOutIndex::default();
|
let mut recv_index = SpkTxOutIndex::default();
|
||||||
recv_index.insert_spk((), spk_to_track.clone());
|
recv_index.insert_spk((), spk_to_track.clone());
|
||||||
recv_index
|
recv_index
|
||||||
@@ -145,20 +363,20 @@ fn tx_can_become_unconfirmed_after_reorg() -> anyhow::Result<()> {
|
|||||||
env.mine_blocks(101, Some(addr_to_mine))?;
|
env.mine_blocks(101, Some(addr_to_mine))?;
|
||||||
|
|
||||||
// Create transactions that are tracked by our receiver.
|
// Create transactions that are tracked by our receiver.
|
||||||
|
let mut txids = vec![];
|
||||||
|
let mut hashes = vec![];
|
||||||
for _ in 0..REORG_COUNT {
|
for _ in 0..REORG_COUNT {
|
||||||
env.send(&addr_to_track, SEND_AMOUNT)?;
|
txids.push(env.send(&addr_to_track, SEND_AMOUNT)?);
|
||||||
env.mine_blocks(1, None)?;
|
hashes.extend(env.mine_blocks(1, None)?);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sync up to tip.
|
// Sync up to tip.
|
||||||
env.wait_until_electrum_sees_block()?;
|
env.wait_until_electrum_sees_block()?;
|
||||||
let update = client
|
let update = client.sync(
|
||||||
.sync(
|
SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks([spk_to_track.clone()]),
|
||||||
SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks([spk_to_track.clone()]),
|
5,
|
||||||
5,
|
false,
|
||||||
false,
|
)?;
|
||||||
)?
|
|
||||||
.with_confirmation_time_height_anchor(&client)?;
|
|
||||||
|
|
||||||
let _ = recv_chain
|
let _ = recv_chain
|
||||||
.apply_update(update.chain_update)
|
.apply_update(update.chain_update)
|
||||||
@@ -167,6 +385,13 @@ fn tx_can_become_unconfirmed_after_reorg() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
// Retain a snapshot of all anchors before reorg process.
|
// Retain a snapshot of all anchors before reorg process.
|
||||||
let initial_anchors = update.graph_update.all_anchors();
|
let initial_anchors = update.graph_update.all_anchors();
|
||||||
|
let anchors: Vec<_> = initial_anchors.iter().cloned().collect();
|
||||||
|
assert_eq!(anchors.len(), REORG_COUNT);
|
||||||
|
for i in 0..REORG_COUNT {
|
||||||
|
let (anchor, txid) = anchors[i];
|
||||||
|
assert_eq!(anchor.block_id.hash, hashes[i]);
|
||||||
|
assert_eq!(txid, txids[i]);
|
||||||
|
}
|
||||||
|
|
||||||
// Check if initial balance is correct.
|
// Check if initial balance is correct.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -183,29 +408,24 @@ fn tx_can_become_unconfirmed_after_reorg() -> anyhow::Result<()> {
|
|||||||
env.reorg_empty_blocks(depth)?;
|
env.reorg_empty_blocks(depth)?;
|
||||||
|
|
||||||
env.wait_until_electrum_sees_block()?;
|
env.wait_until_electrum_sees_block()?;
|
||||||
let update = client
|
let update = client.sync(
|
||||||
.sync(
|
SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks([spk_to_track.clone()]),
|
||||||
SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks([spk_to_track.clone()]),
|
5,
|
||||||
5,
|
false,
|
||||||
false,
|
)?;
|
||||||
)?
|
|
||||||
.with_confirmation_time_height_anchor(&client)?;
|
|
||||||
|
|
||||||
let _ = recv_chain
|
let _ = recv_chain
|
||||||
.apply_update(update.chain_update)
|
.apply_update(update.chain_update)
|
||||||
.map_err(|err| anyhow::anyhow!("LocalChain update error: {:?}", err))?;
|
.map_err(|err| anyhow::anyhow!("LocalChain update error: {:?}", err))?;
|
||||||
|
|
||||||
// Check to see if a new anchor is added during current reorg.
|
// Check that no new anchors are added during current reorg.
|
||||||
if !initial_anchors.is_superset(update.graph_update.all_anchors()) {
|
assert!(initial_anchors.is_superset(update.graph_update.all_anchors()));
|
||||||
println!("New anchor added at reorg depth {}", depth);
|
|
||||||
}
|
|
||||||
let _ = recv_graph.apply_update(update.graph_update);
|
let _ = recv_graph.apply_update(update.graph_update);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_balance(&recv_chain, &recv_graph)?,
|
get_balance(&recv_chain, &recv_graph)?,
|
||||||
Balance {
|
Balance {
|
||||||
confirmed: SEND_AMOUNT * (REORG_COUNT - depth) as u64,
|
confirmed: SEND_AMOUNT * (REORG_COUNT - depth) as u64,
|
||||||
trusted_pending: SEND_AMOUNT * depth as u64,
|
|
||||||
..Balance::default()
|
..Balance::default()
|
||||||
},
|
},
|
||||||
"reorg_count: {}",
|
"reorg_count: {}",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bdk_esplora"
|
name = "bdk_esplora"
|
||||||
version = "0.13.0"
|
version = "0.16.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
homepage = "https://bitcoindevkit.org"
|
homepage = "https://bitcoindevkit.org"
|
||||||
repository = "https://github.com/bitcoindevkit/bdk"
|
repository = "https://github.com/bitcoindevkit/bdk"
|
||||||
@@ -12,22 +12,21 @@ readme = "README.md"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk_chain = { path = "../chain", version = "0.14.0", default-features = false }
|
bdk_chain = { path = "../chain", version = "0.17.0", default-features = false }
|
||||||
esplora-client = { version = "0.7.0", default-features = false }
|
esplora-client = { version = "0.8.0", default-features = false }
|
||||||
async-trait = { version = "0.1.66", optional = true }
|
async-trait = { version = "0.1.66", optional = true }
|
||||||
futures = { version = "0.3.26", optional = true }
|
futures = { version = "0.3.26", optional = true }
|
||||||
|
|
||||||
# use these dependencies if you need to enable their /no-std features
|
bitcoin = { version = "0.32.0", optional = true, default-features = false }
|
||||||
bitcoin = { version = "0.31.0", optional = true, default-features = false }
|
miniscript = { version = "12.0.0", optional = true, default-features = false }
|
||||||
miniscript = { version = "11.0.0", optional = true, default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
bdk_testenv = { path = "../testenv", default_features = false }
|
bdk_testenv = { path = "../testenv", default-features = false }
|
||||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] }
|
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std", "async-https", "blocking-https-rustls"]
|
default = ["std", "async-https", "blocking-https-rustls"]
|
||||||
std = ["bdk_chain/std"]
|
std = ["bdk_chain/std", "miniscript?/std"]
|
||||||
async = ["async-trait", "futures", "esplora-client/async"]
|
async = ["async-trait", "futures", "esplora-client/async"]
|
||||||
async-https = ["async", "esplora-client/async-https"]
|
async-https = ["async", "esplora-client/async-https"]
|
||||||
async-https-rustls = ["async", "esplora-client/async-https-rustls"]
|
async-https-rustls = ["async", "esplora-client/async-https-rustls"]
|
||||||
|
|||||||
@@ -2,13 +2,13 @@ use std::collections::BTreeSet;
|
|||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use bdk_chain::spk_client::{FullScanRequest, FullScanResult, SyncRequest, SyncResult};
|
use bdk_chain::spk_client::{FullScanRequest, FullScanResult, SyncRequest, SyncResult};
|
||||||
use bdk_chain::Anchor;
|
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
bitcoin::{BlockHash, OutPoint, ScriptBuf, TxOut, Txid},
|
bitcoin::{BlockHash, OutPoint, ScriptBuf, TxOut, Txid},
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
local_chain::CheckPoint,
|
local_chain::CheckPoint,
|
||||||
BlockId, ConfirmationTimeHeightAnchor, TxGraph,
|
BlockId, ConfirmationBlockTime, TxGraph,
|
||||||
};
|
};
|
||||||
|
use bdk_chain::{Anchor, Indexed};
|
||||||
use esplora_client::{Amount, TxStatus};
|
use esplora_client::{Amount, TxStatus};
|
||||||
use futures::{stream::FuturesOrdered, TryStreamExt};
|
use futures::{stream::FuturesOrdered, TryStreamExt};
|
||||||
|
|
||||||
@@ -231,19 +231,19 @@ async fn chain_update<A: Anchor>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// This performs a full scan to get an update for the [`TxGraph`] and
|
/// This performs a full scan to get an update for the [`TxGraph`] and
|
||||||
/// [`KeychainTxOutIndex`](bdk_chain::keychain::KeychainTxOutIndex).
|
/// [`KeychainTxOutIndex`](bdk_chain::indexer::keychain_txout::KeychainTxOutIndex).
|
||||||
async fn full_scan_for_index_and_graph<K: Ord + Clone + Send>(
|
async fn full_scan_for_index_and_graph<K: Ord + Clone + Send>(
|
||||||
client: &esplora_client::AsyncClient,
|
client: &esplora_client::AsyncClient,
|
||||||
keychain_spks: BTreeMap<
|
keychain_spks: BTreeMap<
|
||||||
K,
|
K,
|
||||||
impl IntoIterator<IntoIter = impl Iterator<Item = (u32, ScriptBuf)> + Send> + Send,
|
impl IntoIterator<IntoIter = impl Iterator<Item = Indexed<ScriptBuf>> + Send> + Send,
|
||||||
>,
|
>,
|
||||||
stop_gap: usize,
|
stop_gap: usize,
|
||||||
parallel_requests: usize,
|
parallel_requests: usize,
|
||||||
) -> Result<(TxGraph<ConfirmationTimeHeightAnchor>, BTreeMap<K, u32>), Error> {
|
) -> Result<(TxGraph<ConfirmationBlockTime>, BTreeMap<K, u32>), Error> {
|
||||||
type TxsOfSpkIndex = (u32, Vec<esplora_client::Tx>);
|
type TxsOfSpkIndex = (u32, Vec<esplora_client::Tx>);
|
||||||
let parallel_requests = Ord::max(parallel_requests, 1);
|
let parallel_requests = Ord::max(parallel_requests, 1);
|
||||||
let mut graph = TxGraph::<ConfirmationTimeHeightAnchor>::default();
|
let mut graph = TxGraph::<ConfirmationBlockTime>::default();
|
||||||
let mut last_active_indexes = BTreeMap::<K, u32>::new();
|
let mut last_active_indexes = BTreeMap::<K, u32>::new();
|
||||||
|
|
||||||
for (keychain, spks) in keychain_spks {
|
for (keychain, spks) in keychain_spks {
|
||||||
@@ -333,7 +333,7 @@ async fn sync_for_index_and_graph(
|
|||||||
txids: impl IntoIterator<IntoIter = impl Iterator<Item = Txid> + Send> + Send,
|
txids: impl IntoIterator<IntoIter = impl Iterator<Item = Txid> + Send> + Send,
|
||||||
outpoints: impl IntoIterator<IntoIter = impl Iterator<Item = OutPoint> + Send> + Send,
|
outpoints: impl IntoIterator<IntoIter = impl Iterator<Item = OutPoint> + Send> + Send,
|
||||||
parallel_requests: usize,
|
parallel_requests: usize,
|
||||||
) -> Result<TxGraph<ConfirmationTimeHeightAnchor>, Error> {
|
) -> Result<TxGraph<ConfirmationBlockTime>, Error> {
|
||||||
let mut graph = full_scan_for_index_and_graph(
|
let mut graph = full_scan_for_index_and_graph(
|
||||||
client,
|
client,
|
||||||
[(
|
[(
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::thread::JoinHandle;
|
use std::thread::JoinHandle;
|
||||||
use std::usize;
|
|
||||||
|
|
||||||
use bdk_chain::collections::BTreeMap;
|
use bdk_chain::collections::BTreeMap;
|
||||||
use bdk_chain::spk_client::{FullScanRequest, FullScanResult, SyncRequest, SyncResult};
|
use bdk_chain::spk_client::{FullScanRequest, FullScanResult, SyncRequest, SyncResult};
|
||||||
use bdk_chain::Anchor;
|
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
bitcoin::{Amount, BlockHash, OutPoint, ScriptBuf, TxOut, Txid},
|
bitcoin::{Amount, BlockHash, OutPoint, ScriptBuf, TxOut, Txid},
|
||||||
local_chain::CheckPoint,
|
local_chain::CheckPoint,
|
||||||
BlockId, ConfirmationTimeHeightAnchor, TxGraph,
|
BlockId, ConfirmationBlockTime, TxGraph,
|
||||||
};
|
};
|
||||||
|
use bdk_chain::{Anchor, Indexed};
|
||||||
use esplora_client::TxStatus;
|
use esplora_client::TxStatus;
|
||||||
|
|
||||||
use crate::anchor_from_status;
|
use crate::anchor_from_status;
|
||||||
@@ -214,16 +213,16 @@ fn chain_update<A: Anchor>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// This performs a full scan to get an update for the [`TxGraph`] and
|
/// This performs a full scan to get an update for the [`TxGraph`] and
|
||||||
/// [`KeychainTxOutIndex`](bdk_chain::keychain::KeychainTxOutIndex).
|
/// [`KeychainTxOutIndex`](bdk_chain::indexer::keychain_txout::KeychainTxOutIndex).
|
||||||
fn full_scan_for_index_and_graph_blocking<K: Ord + Clone>(
|
fn full_scan_for_index_and_graph_blocking<K: Ord + Clone>(
|
||||||
client: &esplora_client::BlockingClient,
|
client: &esplora_client::BlockingClient,
|
||||||
keychain_spks: BTreeMap<K, impl IntoIterator<Item = (u32, ScriptBuf)>>,
|
keychain_spks: BTreeMap<K, impl IntoIterator<Item = Indexed<ScriptBuf>>>,
|
||||||
stop_gap: usize,
|
stop_gap: usize,
|
||||||
parallel_requests: usize,
|
parallel_requests: usize,
|
||||||
) -> Result<(TxGraph<ConfirmationTimeHeightAnchor>, BTreeMap<K, u32>), Error> {
|
) -> Result<(TxGraph<ConfirmationBlockTime>, BTreeMap<K, u32>), Error> {
|
||||||
type TxsOfSpkIndex = (u32, Vec<esplora_client::Tx>);
|
type TxsOfSpkIndex = (u32, Vec<esplora_client::Tx>);
|
||||||
let parallel_requests = Ord::max(parallel_requests, 1);
|
let parallel_requests = Ord::max(parallel_requests, 1);
|
||||||
let mut tx_graph = TxGraph::<ConfirmationTimeHeightAnchor>::default();
|
let mut tx_graph = TxGraph::<ConfirmationBlockTime>::default();
|
||||||
let mut last_active_indices = BTreeMap::<K, u32>::new();
|
let mut last_active_indices = BTreeMap::<K, u32>::new();
|
||||||
|
|
||||||
for (keychain, spks) in keychain_spks {
|
for (keychain, spks) in keychain_spks {
|
||||||
@@ -316,7 +315,7 @@ fn sync_for_index_and_graph_blocking(
|
|||||||
txids: impl IntoIterator<Item = Txid>,
|
txids: impl IntoIterator<Item = Txid>,
|
||||||
outpoints: impl IntoIterator<Item = OutPoint>,
|
outpoints: impl IntoIterator<Item = OutPoint>,
|
||||||
parallel_requests: usize,
|
parallel_requests: usize,
|
||||||
) -> Result<TxGraph<ConfirmationTimeHeightAnchor>, Error> {
|
) -> Result<TxGraph<ConfirmationBlockTime>, Error> {
|
||||||
let (mut tx_graph, _) = full_scan_for_index_and_graph_blocking(
|
let (mut tx_graph, _) = full_scan_for_index_and_graph_blocking(
|
||||||
client,
|
client,
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -16,7 +16,7 @@
|
|||||||
//! [`TxGraph`]: bdk_chain::tx_graph::TxGraph
|
//! [`TxGraph`]: bdk_chain::tx_graph::TxGraph
|
||||||
//! [`example_esplora`]: https://github.com/bitcoindevkit/bdk/tree/master/example-crates/example_esplora
|
//! [`example_esplora`]: https://github.com/bitcoindevkit/bdk/tree/master/example-crates/example_esplora
|
||||||
|
|
||||||
use bdk_chain::{BlockId, ConfirmationTimeHeightAnchor};
|
use bdk_chain::{BlockId, ConfirmationBlockTime};
|
||||||
use esplora_client::TxStatus;
|
use esplora_client::TxStatus;
|
||||||
|
|
||||||
pub use esplora_client;
|
pub use esplora_client;
|
||||||
@@ -31,7 +31,7 @@ mod async_ext;
|
|||||||
#[cfg(feature = "async")]
|
#[cfg(feature = "async")]
|
||||||
pub use async_ext::*;
|
pub use async_ext::*;
|
||||||
|
|
||||||
fn anchor_from_status(status: &TxStatus) -> Option<ConfirmationTimeHeightAnchor> {
|
fn anchor_from_status(status: &TxStatus) -> Option<ConfirmationBlockTime> {
|
||||||
if let TxStatus {
|
if let TxStatus {
|
||||||
block_height: Some(height),
|
block_height: Some(height),
|
||||||
block_hash: Some(hash),
|
block_hash: Some(hash),
|
||||||
@@ -39,9 +39,8 @@ fn anchor_from_status(status: &TxStatus) -> Option<ConfirmationTimeHeightAnchor>
|
|||||||
..
|
..
|
||||||
} = status.clone()
|
} = status.clone()
|
||||||
{
|
{
|
||||||
Some(ConfirmationTimeHeightAnchor {
|
Some(ConfirmationBlockTime {
|
||||||
anchor_block: BlockId { height, hash },
|
block_id: BlockId { height, hash },
|
||||||
confirmation_height: height,
|
|
||||||
confirmation_time: time,
|
confirmation_time: time,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -92,7 +92,8 @@ pub async fn test_update_tx_graph_without_keychain() -> anyhow::Result<()> {
|
|||||||
.fee
|
.fee
|
||||||
.expect("Fee must exist")
|
.expect("Fee must exist")
|
||||||
.abs()
|
.abs()
|
||||||
.to_sat() as u64;
|
.to_unsigned()
|
||||||
|
.expect("valid `Amount`");
|
||||||
|
|
||||||
// Check that the calculated fee matches the fee from the transaction data.
|
// Check that the calculated fee matches the fee from the transaction data.
|
||||||
assert_eq!(fee, tx_fee);
|
assert_eq!(fee, tx_fee);
|
||||||
|
|||||||
@@ -92,7 +92,8 @@ pub fn test_update_tx_graph_without_keychain() -> anyhow::Result<()> {
|
|||||||
.fee
|
.fee
|
||||||
.expect("Fee must exist")
|
.expect("Fee must exist")
|
||||||
.abs()
|
.abs()
|
||||||
.to_sat() as u64;
|
.to_unsigned()
|
||||||
|
.expect("valid `Amount`");
|
||||||
|
|
||||||
// Check that the calculated fee matches the fee from the transaction data.
|
// Check that the calculated fee matches the fee from the transaction data.
|
||||||
assert_eq!(fee, tx_fee);
|
assert_eq!(fee, tx_fee);
|
||||||
|
|||||||
@@ -1,19 +1,17 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bdk_file_store"
|
name = "bdk_file_store"
|
||||||
version = "0.11.0"
|
version = "0.14.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
repository = "https://github.com/bitcoindevkit/bdk"
|
repository = "https://github.com/bitcoindevkit/bdk"
|
||||||
documentation = "https://docs.rs/bdk_file_store"
|
documentation = "https://docs.rs/bdk_file_store"
|
||||||
description = "A simple append-only flat file implementation of Persist for Bitcoin Dev Kit."
|
description = "A simple append-only flat file database for persisting bdk_chain data."
|
||||||
keywords = ["bitcoin", "persist", "persistence", "bdk", "file"]
|
keywords = ["bitcoin", "persist", "persistence", "bdk", "file"]
|
||||||
authors = ["Bitcoin Dev Kit Developers"]
|
authors = ["Bitcoin Dev Kit Developers"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = { version = "1", default-features = false }
|
bdk_chain = { path = "../chain", version = "0.17.0", features = [ "serde", "miniscript" ] }
|
||||||
bdk_chain = { path = "../chain", version = "0.14.0", features = [ "serde", "miniscript" ] }
|
|
||||||
bdk_persist = { path = "../persist", version = "0.2.0"}
|
|
||||||
bincode = { version = "1" }
|
bincode = { version = "1" }
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,7 @@
|
|||||||
# BDK File Store
|
# BDK File Store
|
||||||
|
|
||||||
This is a simple append-only flat file implementation of
|
This is a simple append-only flat file database for persisting [`bdk_chain`] changesets.
|
||||||
[`PersistBackend`](bdk_persist::PersistBackend).
|
|
||||||
|
|
||||||
The main structure is [`Store`](crate::Store), which can be used with [`bdk`]'s
|
The main structure is [`Store`] which works with any [`bdk_chain`] based changesets to persist data into a flat file.
|
||||||
`Wallet` to persist wallet data into a flat file.
|
|
||||||
|
|
||||||
[`bdk`]: https://docs.rs/bdk/latest
|
[`bdk_chain`]:https://docs.rs/bdk_chain/latest/bdk_chain/
|
||||||
[`bdk_persist`]: https://docs.rs/bdk_persist/latest
|
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
use crate::{bincode_options, EntryIter, FileError, IterError};
|
use crate::{bincode_options, EntryIter, FileError, IterError};
|
||||||
use anyhow::anyhow;
|
use bdk_chain::Merge;
|
||||||
use bdk_chain::Append;
|
|
||||||
use bdk_persist::PersistBackend;
|
|
||||||
use bincode::Options;
|
use bincode::Options;
|
||||||
use std::{
|
use std::{
|
||||||
fmt::{self, Debug},
|
fmt::{self, Debug},
|
||||||
@@ -22,28 +20,9 @@ where
|
|||||||
marker: PhantomData<C>,
|
marker: PhantomData<C>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C> PersistBackend<C> for Store<C>
|
|
||||||
where
|
|
||||||
C: Append
|
|
||||||
+ serde::Serialize
|
|
||||||
+ serde::de::DeserializeOwned
|
|
||||||
+ core::marker::Send
|
|
||||||
+ core::marker::Sync,
|
|
||||||
{
|
|
||||||
fn write_changes(&mut self, changeset: &C) -> anyhow::Result<()> {
|
|
||||||
self.append_changeset(changeset)
|
|
||||||
.map_err(|e| anyhow!(e).context("failed to write changes to persistence backend"))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_from_persistence(&mut self) -> anyhow::Result<Option<C>> {
|
|
||||||
self.aggregate_changesets()
|
|
||||||
.map_err(|e| anyhow!(e.iter_error).context("error loading from persistence backend"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<C> Store<C>
|
impl<C> Store<C>
|
||||||
where
|
where
|
||||||
C: Append
|
C: Merge
|
||||||
+ serde::Serialize
|
+ serde::Serialize
|
||||||
+ serde::de::DeserializeOwned
|
+ serde::de::DeserializeOwned
|
||||||
+ core::marker::Send
|
+ core::marker::Send
|
||||||
@@ -168,7 +147,7 @@ where
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
match &mut changeset {
|
match &mut changeset {
|
||||||
Some(changeset) => changeset.append(next_changeset),
|
Some(changeset) => changeset.merge(next_changeset),
|
||||||
changeset => *changeset = Some(next_changeset),
|
changeset => *changeset = Some(next_changeset),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -386,7 +365,7 @@ mod test {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
err.changeset,
|
err.changeset,
|
||||||
changesets.iter().cloned().reduce(|mut acc, cs| {
|
changesets.iter().cloned().reduce(|mut acc, cs| {
|
||||||
Append::append(&mut acc, cs);
|
Merge::merge(&mut acc, cs);
|
||||||
acc
|
acc
|
||||||
}),
|
}),
|
||||||
"should recover all changesets that are written in full",
|
"should recover all changesets that are written in full",
|
||||||
@@ -407,7 +386,7 @@ mod test {
|
|||||||
.cloned()
|
.cloned()
|
||||||
.chain(core::iter::once(last_changeset.clone()))
|
.chain(core::iter::once(last_changeset.clone()))
|
||||||
.reduce(|mut acc, cs| {
|
.reduce(|mut acc, cs| {
|
||||||
Append::append(&mut acc, cs);
|
Merge::merge(&mut acc, cs);
|
||||||
acc
|
acc
|
||||||
}),
|
}),
|
||||||
"should recover all changesets",
|
"should recover all changesets",
|
||||||
@@ -443,13 +422,13 @@ mod test {
|
|||||||
.take(read_count)
|
.take(read_count)
|
||||||
.map(|r| r.expect("must read valid changeset"))
|
.map(|r| r.expect("must read valid changeset"))
|
||||||
.fold(TestChangeSet::default(), |mut acc, v| {
|
.fold(TestChangeSet::default(), |mut acc, v| {
|
||||||
Append::append(&mut acc, v);
|
Merge::merge(&mut acc, v);
|
||||||
acc
|
acc
|
||||||
});
|
});
|
||||||
// We write after a short read.
|
// We write after a short read.
|
||||||
db.write_changes(&last_changeset)
|
db.append_changeset(&last_changeset)
|
||||||
.expect("last write must succeed");
|
.expect("last write must succeed");
|
||||||
Append::append(&mut exp_aggregation, last_changeset.clone());
|
Merge::merge(&mut exp_aggregation, last_changeset.clone());
|
||||||
drop(db);
|
drop(db);
|
||||||
|
|
||||||
// We open the file again and check whether aggregate changeset is expected.
|
// We open the file again and check whether aggregate changeset is expected.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bdk_hwi"
|
name = "bdk_hwi"
|
||||||
version = "0.2.0"
|
version = "0.4.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
homepage = "https://bitcoindevkit.org"
|
homepage = "https://bitcoindevkit.org"
|
||||||
repository = "https://github.com/bitcoindevkit/bdk"
|
repository = "https://github.com/bitcoindevkit/bdk"
|
||||||
@@ -9,5 +9,5 @@ license = "MIT OR Apache-2.0"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk = { path = "../bdk" }
|
bdk_wallet = { path = "../wallet", version = "1.0.0-beta.1" }
|
||||||
hwi = { version = "0.8.0", features = [ "miniscript"] }
|
hwi = { version = "0.9.0", features = [ "miniscript"] }
|
||||||
|
|||||||
3
crates/hwi/README.md
Normal file
3
crates/hwi/README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# BDK HWI Signer
|
||||||
|
|
||||||
|
This crate contains `HWISigner`, an implementation of a `TransactionSigner` to be used with hardware wallets.
|
||||||
@@ -3,12 +3,14 @@
|
|||||||
//! This crate contains HWISigner, an implementation of a [`TransactionSigner`] to be
|
//! This crate contains HWISigner, an implementation of a [`TransactionSigner`] to be
|
||||||
//! used with hardware wallets.
|
//! used with hardware wallets.
|
||||||
//! ```no_run
|
//! ```no_run
|
||||||
//! # use bdk::bitcoin::Network;
|
//! # use bdk_wallet::bitcoin::Network;
|
||||||
//! # use bdk::signer::SignerOrdering;
|
//! # use bdk_wallet::descriptor::Descriptor;
|
||||||
|
//! # use bdk_wallet::signer::SignerOrdering;
|
||||||
//! # use bdk_hwi::HWISigner;
|
//! # use bdk_hwi::HWISigner;
|
||||||
//! # use bdk::{KeychainKind, SignOptions, Wallet};
|
//! # use bdk_wallet::{KeychainKind, SignOptions, Wallet};
|
||||||
//! # use hwi::HWIClient;
|
//! # use hwi::HWIClient;
|
||||||
//! # use std::sync::Arc;
|
//! # use std::sync::Arc;
|
||||||
|
//! # use std::str::FromStr;
|
||||||
//! #
|
//! #
|
||||||
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
|
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
//! let mut devices = HWIClient::enumerate()?;
|
//! let mut devices = HWIClient::enumerate()?;
|
||||||
@@ -18,11 +20,7 @@
|
|||||||
//! let first_device = devices.remove(0)?;
|
//! let first_device = devices.remove(0)?;
|
||||||
//! let custom_signer = HWISigner::from_device(&first_device, Network::Testnet.into())?;
|
//! let custom_signer = HWISigner::from_device(&first_device, Network::Testnet.into())?;
|
||||||
//!
|
//!
|
||||||
//! # let mut wallet = Wallet::new_no_persist(
|
//! # let mut wallet = Wallet::create("", "").network(Network::Testnet).create_wallet_no_persist()?;
|
||||||
//! # "",
|
|
||||||
//! # None,
|
|
||||||
//! # Network::Testnet,
|
|
||||||
//! # )?;
|
|
||||||
//! #
|
//! #
|
||||||
//! // Adding the hardware signer to the BDK wallet
|
//! // Adding the hardware signer to the BDK wallet
|
||||||
//! wallet.add_signer(
|
//! wallet.add_signer(
|
||||||
@@ -35,7 +33,7 @@
|
|||||||
//! # }
|
//! # }
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! [`TransactionSigner`]: bdk::wallet::signer::TransactionSigner
|
//! [`TransactionSigner`]: bdk_wallet::signer::TransactionSigner
|
||||||
|
|
||||||
mod signer;
|
mod signer;
|
||||||
pub use signer::*;
|
pub use signer::*;
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
use bdk::bitcoin::bip32::Fingerprint;
|
use bdk_wallet::bitcoin::bip32::Fingerprint;
|
||||||
use bdk::bitcoin::secp256k1::{All, Secp256k1};
|
use bdk_wallet::bitcoin::secp256k1::{All, Secp256k1};
|
||||||
use bdk::bitcoin::Psbt;
|
use bdk_wallet::bitcoin::Psbt;
|
||||||
|
|
||||||
use hwi::error::Error;
|
use hwi::error::Error;
|
||||||
use hwi::types::{HWIChain, HWIDevice};
|
use hwi::types::{HWIChain, HWIDevice};
|
||||||
use hwi::HWIClient;
|
use hwi::HWIClient;
|
||||||
|
|
||||||
use bdk::signer::{SignerCommon, SignerError, SignerId, TransactionSigner};
|
use bdk_wallet::signer::{SignerCommon, SignerError, SignerId, TransactionSigner};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// Custom signer for Hardware Wallets
|
/// Custom signer for Hardware Wallets
|
||||||
@@ -38,7 +38,7 @@ impl TransactionSigner for HWISigner {
|
|||||||
fn sign_transaction(
|
fn sign_transaction(
|
||||||
&self,
|
&self,
|
||||||
psbt: &mut Psbt,
|
psbt: &mut Psbt,
|
||||||
_sign_options: &bdk::SignOptions,
|
_sign_options: &bdk_wallet::SignOptions,
|
||||||
_secp: &Secp256k1<All>,
|
_secp: &Secp256k1<All>,
|
||||||
) -> Result<(), SignerError> {
|
) -> Result<(), SignerError> {
|
||||||
psbt.combine(
|
psbt.combine(
|
||||||
@@ -61,9 +61,9 @@ impl TransactionSigner for HWISigner {
|
|||||||
// fn test_hardware_signer() {
|
// fn test_hardware_signer() {
|
||||||
// use std::sync::Arc;
|
// use std::sync::Arc;
|
||||||
//
|
//
|
||||||
// use bdk::tests::get_funded_wallet;
|
// use bdk_wallet::tests::get_funded_wallet;
|
||||||
// use bdk::signer::SignerOrdering;
|
// use bdk_wallet::signer::SignerOrdering;
|
||||||
// use bdk::bitcoin::Network;
|
// use bdk_wallet::bitcoin::Network;
|
||||||
// use crate::HWISigner;
|
// use crate::HWISigner;
|
||||||
// use hwi::HWIClient;
|
// use hwi::HWIClient;
|
||||||
//
|
//
|
||||||
@@ -78,12 +78,12 @@ impl TransactionSigner for HWISigner {
|
|||||||
//
|
//
|
||||||
// let (mut wallet, _) = get_funded_wallet(&descriptors.internal[0]);
|
// let (mut wallet, _) = get_funded_wallet(&descriptors.internal[0]);
|
||||||
// wallet.add_signer(
|
// wallet.add_signer(
|
||||||
// bdk::KeychainKind::External,
|
// bdk_wallet::KeychainKind::External,
|
||||||
// SignerOrdering(200),
|
// SignerOrdering(200),
|
||||||
// Arc::new(custom_signer),
|
// Arc::new(custom_signer),
|
||||||
// );
|
// );
|
||||||
//
|
//
|
||||||
// let addr = wallet.get_address(bdk::wallet::AddressIndex::LastUnused);
|
// let addr = wallet.get_address(bdk_wallet::AddressIndex::LastUnused);
|
||||||
// let mut builder = wallet.build_tx();
|
// let mut builder = wallet.build_tx();
|
||||||
// builder.drain_to(addr.script_pubkey()).drain_wallet();
|
// builder.drain_to(addr.script_pubkey()).drain_wallet();
|
||||||
// let (mut psbt, _) = builder.finish().unwrap();
|
// let (mut psbt, _) = builder.finish().unwrap();
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "bdk_persist"
|
|
||||||
homepage = "https://bitcoindevkit.org"
|
|
||||||
version = "0.2.0"
|
|
||||||
repository = "https://github.com/bitcoindevkit/bdk"
|
|
||||||
documentation = "https://docs.rs/bdk_persist"
|
|
||||||
description = "Types that define data persistence of a BDK wallet"
|
|
||||||
keywords = ["bitcoin", "wallet", "persistence", "database"]
|
|
||||||
readme = "README.md"
|
|
||||||
license = "MIT OR Apache-2.0"
|
|
||||||
authors = ["Bitcoin Dev Kit Developers"]
|
|
||||||
edition = "2021"
|
|
||||||
rust-version = "1.63"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
anyhow = { version = "1", default-features = false }
|
|
||||||
bdk_chain = { path = "../chain", version = "0.14.0", default-features = false }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
default = ["bdk_chain/std"]
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# BDK Persist
|
|
||||||
|
|
||||||
This crate is home to the [`PersistBackend`](crate::PersistBackend) trait which defines the behavior of a database to perform the task of persisting changes made to BDK data structures. The [`Persist`](crate::Persist) type provides a convenient wrapper around a `PersistBackend` that allows staging changes before committing them.
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#![doc = include_str!("../README.md")]
|
|
||||||
#![no_std]
|
|
||||||
#![warn(missing_docs)]
|
|
||||||
mod persist;
|
|
||||||
pub use persist::*;
|
|
||||||
@@ -1,106 +0,0 @@
|
|||||||
extern crate alloc;
|
|
||||||
use alloc::boxed::Box;
|
|
||||||
use bdk_chain::Append;
|
|
||||||
use core::fmt;
|
|
||||||
|
|
||||||
/// `Persist` wraps a [`PersistBackend`] to create a convenient staging area for changes (`C`)
|
|
||||||
/// before they are persisted.
|
|
||||||
///
|
|
||||||
/// Not all changes to the in-memory representation needs to be written to disk right away, so
|
|
||||||
/// [`Persist::stage`] can be used to *stage* changes first and then [`Persist::commit`] can be used
|
|
||||||
/// to write changes to disk.
|
|
||||||
pub struct Persist<C> {
|
|
||||||
backend: Box<dyn PersistBackend<C> + Send + Sync>,
|
|
||||||
stage: C,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<C: fmt::Debug> fmt::Debug for Persist<C> {
|
|
||||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
|
||||||
write!(fmt, "{:?}", self.stage)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<C> Persist<C>
|
|
||||||
where
|
|
||||||
C: Default + Append,
|
|
||||||
{
|
|
||||||
/// Create a new [`Persist`] from [`PersistBackend`].
|
|
||||||
pub fn new(backend: impl PersistBackend<C> + Send + Sync + 'static) -> Self {
|
|
||||||
let backend = Box::new(backend);
|
|
||||||
Self {
|
|
||||||
backend,
|
|
||||||
stage: Default::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Stage a `changeset` to be committed later with [`commit`].
|
|
||||||
///
|
|
||||||
/// [`commit`]: Self::commit
|
|
||||||
pub fn stage(&mut self, changeset: C) {
|
|
||||||
self.stage.append(changeset)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the changes that have not been committed yet.
|
|
||||||
pub fn staged(&self) -> &C {
|
|
||||||
&self.stage
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Commit the staged changes to the underlying persistence backend.
|
|
||||||
///
|
|
||||||
/// Changes that are committed (if any) are returned.
|
|
||||||
///
|
|
||||||
/// # Error
|
|
||||||
///
|
|
||||||
/// Returns a backend-defined error if this fails.
|
|
||||||
pub fn commit(&mut self) -> anyhow::Result<Option<C>> {
|
|
||||||
if self.stage.is_empty() {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
self.backend
|
|
||||||
.write_changes(&self.stage)
|
|
||||||
// if written successfully, take and return `self.stage`
|
|
||||||
.map(|_| Some(core::mem::take(&mut self.stage)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Stages a new changeset and commits it (along with any other previously staged changes) to
|
|
||||||
/// the persistence backend
|
|
||||||
///
|
|
||||||
/// Convenience method for calling [`stage`] and then [`commit`].
|
|
||||||
///
|
|
||||||
/// [`stage`]: Self::stage
|
|
||||||
/// [`commit`]: Self::commit
|
|
||||||
pub fn stage_and_commit(&mut self, changeset: C) -> anyhow::Result<Option<C>> {
|
|
||||||
self.stage(changeset);
|
|
||||||
self.commit()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A persistence backend for [`Persist`].
|
|
||||||
///
|
|
||||||
/// `C` represents the changeset; a datatype that records changes made to in-memory data structures
|
|
||||||
/// that are to be persisted, or retrieved from persistence.
|
|
||||||
pub trait PersistBackend<C> {
|
|
||||||
/// Writes a changeset to the persistence backend.
|
|
||||||
///
|
|
||||||
/// It is up to the backend what it does with this. It could store every changeset in a list or
|
|
||||||
/// it inserts the actual changes into a more structured database. All it needs to guarantee is
|
|
||||||
/// that [`load_from_persistence`] restores a keychain tracker to what it should be if all
|
|
||||||
/// changesets had been applied sequentially.
|
|
||||||
///
|
|
||||||
/// [`load_from_persistence`]: Self::load_from_persistence
|
|
||||||
fn write_changes(&mut self, changeset: &C) -> anyhow::Result<()>;
|
|
||||||
|
|
||||||
/// Return the aggregate changeset `C` from persistence.
|
|
||||||
fn load_from_persistence(&mut self) -> anyhow::Result<Option<C>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<C> PersistBackend<C> for () {
|
|
||||||
fn write_changes(&mut self, _changeset: &C) -> anyhow::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_from_persistence(&mut self) -> anyhow::Result<Option<C>> {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bdk_testenv"
|
name = "bdk_testenv"
|
||||||
version = "0.4.0"
|
version = "0.7.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.63"
|
rust-version = "1.63"
|
||||||
homepage = "https://bitcoindevkit.org"
|
homepage = "https://bitcoindevkit.org"
|
||||||
@@ -13,8 +13,8 @@ readme = "README.md"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk_chain = { path = "../chain", version = "0.14", default-features = false }
|
bdk_chain = { path = "../chain", version = "0.17", default-features = false }
|
||||||
electrsd = { version= "0.27.1", features = ["bitcoind_25_0", "esplora_a33e97e1", "legacy"] }
|
electrsd = { version = "0.28.0", features = ["bitcoind_25_0", "esplora_a33e97e1", "legacy"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bdk"
|
name = "bdk_wallet"
|
||||||
homepage = "https://bitcoindevkit.org"
|
homepage = "https://bitcoindevkit.org"
|
||||||
version = "1.0.0-alpha.11"
|
version = "1.0.0-beta.1"
|
||||||
repository = "https://github.com/bitcoindevkit/bdk"
|
repository = "https://github.com/bitcoindevkit/bdk"
|
||||||
documentation = "https://docs.rs/bdk"
|
documentation = "https://docs.rs/bdk"
|
||||||
description = "A modern, lightweight, descriptor-based wallet library"
|
description = "A modern, lightweight, descriptor-based wallet library"
|
||||||
@@ -13,40 +13,35 @@ edition = "2021"
|
|||||||
rust-version = "1.63"
|
rust-version = "1.63"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = { version = "1", default-features = false }
|
rand_core = { version = "0.6.0" }
|
||||||
rand = "^0.8"
|
miniscript = { version = "12.0.0", features = ["serde"], default-features = false }
|
||||||
miniscript = { version = "11.0.0", features = ["serde"], default-features = false }
|
bitcoin = { version = "0.32.0", features = ["serde", "base64"], default-features = false }
|
||||||
bitcoin = { version = "0.31.0", features = ["serde", "base64", "rand-std"], default-features = false }
|
|
||||||
serde = { version = "^1.0", features = ["derive"] }
|
serde = { version = "^1.0", features = ["derive"] }
|
||||||
serde_json = { version = "^1.0" }
|
serde_json = { version = "^1.0" }
|
||||||
bdk_chain = { path = "../chain", version = "0.14.0", features = ["miniscript", "serde"], default-features = false }
|
bdk_chain = { path = "../chain", version = "0.17.0", features = ["miniscript", "serde"], default-features = false }
|
||||||
bdk_persist = { path = "../persist", version = "0.2.0" }
|
bdk_file_store = { path = "../file_store", version = "0.14.0", optional = true }
|
||||||
|
|
||||||
# Optional dependencies
|
# Optional dependencies
|
||||||
bip39 = { version = "2.0", optional = true }
|
bip39 = { version = "2.0", optional = true }
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
|
||||||
getrandom = "0.2"
|
|
||||||
js-sys = "0.3"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
std = ["bitcoin/std", "miniscript/std", "bdk_chain/std"]
|
std = ["bitcoin/std", "bitcoin/rand-std", "miniscript/std", "bdk_chain/std"]
|
||||||
compiler = ["miniscript/compiler"]
|
compiler = ["miniscript/compiler"]
|
||||||
all-keys = ["keys-bip39"]
|
all-keys = ["keys-bip39"]
|
||||||
keys-bip39 = ["bip39"]
|
keys-bip39 = ["bip39"]
|
||||||
|
rusqlite = ["bdk_chain/rusqlite"]
|
||||||
# This feature is used to run `cargo check` in our CI targeting wasm. It's not recommended
|
file_store = ["bdk_file_store"]
|
||||||
# for libraries to explicitly include the "getrandom/js" feature, so we only do it when
|
|
||||||
# necessary for running our CI. See: https://docs.rs/getrandom/0.2.8/getrandom/#webassembly-support
|
|
||||||
dev-getrandom-wasm = ["getrandom/js"]
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
lazy_static = "1.4"
|
lazy_static = "1.4"
|
||||||
assert_matches = "1.5.0"
|
assert_matches = "1.5.0"
|
||||||
tempfile = "3"
|
tempfile = "3"
|
||||||
|
bdk_chain = { path = "../chain", features = ["rusqlite"] }
|
||||||
|
bdk_wallet = { path = ".", features = ["rusqlite", "file_store"] }
|
||||||
bdk_file_store = { path = "../file_store" }
|
bdk_file_store = { path = "../file_store" }
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
|
rand = "^0.8"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
@@ -8,11 +8,11 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<a href="https://crates.io/crates/bdk"><img alt="Crate Info" src="https://img.shields.io/crates/v/bdk.svg"/></a>
|
<a href="https://crates.io/crates/bdk_wallet"><img alt="Crate Info" src="https://img.shields.io/crates/v/bdk_wallet.svg"/></a>
|
||||||
<a href="https://github.com/bitcoindevkit/bdk/blob/master/LICENSE"><img alt="MIT or Apache-2.0 Licensed" src="https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg"/></a>
|
<a href="https://github.com/bitcoindevkit/bdk/blob/master/LICENSE"><img alt="MIT or Apache-2.0 Licensed" src="https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg"/></a>
|
||||||
<a href="https://github.com/bitcoindevkit/bdk/actions?query=workflow%3ACI"><img alt="CI Status" src="https://github.com/bitcoindevkit/bdk/workflows/CI/badge.svg"></a>
|
<a href="https://github.com/bitcoindevkit/bdk/actions?query=workflow%3ACI"><img alt="CI Status" src="https://github.com/bitcoindevkit/bdk/workflows/CI/badge.svg"></a>
|
||||||
<a href="https://coveralls.io/github/bitcoindevkit/bdk?branch=master"><img src="https://coveralls.io/repos/github/bitcoindevkit/bdk/badge.svg?branch=master"/></a>
|
<a href="https://coveralls.io/github/bitcoindevkit/bdk?branch=master"><img src="https://coveralls.io/repos/github/bitcoindevkit/bdk/badge.svg?branch=master"/></a>
|
||||||
<a href="https://docs.rs/bdk"><img alt="API Docs" src="https://img.shields.io/badge/docs.rs-bdk-green"/></a>
|
<a href="https://docs.rs/bdk_wallet"><img alt="API Docs" src="https://img.shields.io/badge/docs.rs-bdk_wallet-green"/></a>
|
||||||
<a href="https://blog.rust-lang.org/2022/08/11/Rust-1.63.0.html"><img alt="Rustc Version 1.63.0+" src="https://img.shields.io/badge/rustc-1.63.0%2B-lightgrey.svg"/></a>
|
<a href="https://blog.rust-lang.org/2022/08/11/Rust-1.63.0.html"><img alt="Rustc Version 1.63.0+" src="https://img.shields.io/badge/rustc-1.63.0%2B-lightgrey.svg"/></a>
|
||||||
<a href="https://discord.gg/d7NkDKm"><img alt="Chat on Discord" src="https://img.shields.io/discord/753336465005608961?logo=discord"></a>
|
<a href="https://discord.gg/d7NkDKm"><img alt="Chat on Discord" src="https://img.shields.io/discord/753336465005608961?logo=discord"></a>
|
||||||
</p>
|
</p>
|
||||||
@@ -20,13 +20,13 @@
|
|||||||
<h4>
|
<h4>
|
||||||
<a href="https://bitcoindevkit.org">Project Homepage</a>
|
<a href="https://bitcoindevkit.org">Project Homepage</a>
|
||||||
<span> | </span>
|
<span> | </span>
|
||||||
<a href="https://docs.rs/bdk">Documentation</a>
|
<a href="https://docs.rs/bdk_wallet">Documentation</a>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
## `bdk`
|
# BDK Wallet
|
||||||
|
|
||||||
The `bdk` crate provides the [`Wallet`] type which is a simple, high-level
|
The `bdk_wallet` crate provides the [`Wallet`] type which is a simple, high-level
|
||||||
interface built from the low-level components of [`bdk_chain`]. `Wallet` is a good starting point
|
interface built from the low-level components of [`bdk_chain`]. `Wallet` is a good starting point
|
||||||
for many simple applications as well as a good demonstration of how to use the other mechanisms to
|
for many simple applications as well as a good demonstration of how to use the other mechanisms to
|
||||||
construct a wallet. It has two keychains (external and internal) which are defined by
|
construct a wallet. It has two keychains (external and internal) which are defined by
|
||||||
@@ -36,7 +36,7 @@ can create and sign transactions.
|
|||||||
|
|
||||||
For details about the API of `Wallet` see the [module-level documentation][`Wallet`].
|
For details about the API of `Wallet` see the [module-level documentation][`Wallet`].
|
||||||
|
|
||||||
### Blockchain data
|
## Blockchain data
|
||||||
|
|
||||||
In order to get blockchain data for `Wallet` to consume, you should configure a client from
|
In order to get blockchain data for `Wallet` to consume, you should configure a client from
|
||||||
an available chain source. Typically you make a request to the chain source and get a response
|
an available chain source. Typically you make a request to the chain source and get a response
|
||||||
@@ -55,43 +55,59 @@ that the `Wallet` can use to update its view of the chain.
|
|||||||
* [`example-crates/wallet_electrum`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_electrum)
|
* [`example-crates/wallet_electrum`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_electrum)
|
||||||
* [`example-crates/wallet_rpc`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_rpc)
|
* [`example-crates/wallet_rpc`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_rpc)
|
||||||
|
|
||||||
### Persistence
|
## Persistence
|
||||||
|
|
||||||
To persist the `Wallet` on disk, it must be constructed with a [`PersistBackend`] implementation.
|
To persist `Wallet` state data use a data store crate that reads and writes [`ChangeSet`].
|
||||||
|
|
||||||
**Implementations**
|
**Implementations**
|
||||||
|
|
||||||
* [`bdk_file_store`]: A simple flat-file implementation of [`PersistBackend`].
|
* [`bdk_file_store`]: Stores wallet changes in a simple flat file.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
<!-- compile_fail because outpoint and txout are fake variables -->
|
<!-- compile_fail because outpoint and txout are fake variables -->
|
||||||
```rust,compile_fail
|
```rust,no_run
|
||||||
use bdk::{bitcoin::Network, wallet::{ChangeSet, Wallet}};
|
use bdk_wallet::{bitcoin::Network, KeychainKind, ChangeSet, Wallet};
|
||||||
|
|
||||||
fn main() {
|
// Open or create a new file store for wallet data.
|
||||||
// Create a new file `Store`.
|
let mut db =
|
||||||
let db = bdk_file_store::Store::<ChangeSet>::open_or_create_new(b"magic_bytes", "path/to/my_wallet.db").expect("create store");
|
bdk_file_store::Store::<ChangeSet>::open_or_create_new(b"magic_bytes", "/tmp/my_wallet.db")
|
||||||
|
.expect("create store");
|
||||||
|
|
||||||
let descriptor = "wpkh(tprv8ZgxMBicQKsPdcAqYBpzAFwU5yxBUo88ggoBqu1qPcHUfSbKK1sKMLmC7EAk438btHQrSdu3jGGQa6PA71nvH5nkDexhLteJqkM4dQmWF9g/84'/1'/0'/0/*)";
|
// Create a wallet with initial wallet data read from the file store.
|
||||||
let mut wallet = Wallet::new_or_load(descriptor, None, db, Network::Testnet).expect("create or load wallet");
|
let network = Network::Testnet;
|
||||||
|
let descriptor = "wpkh(tprv8ZgxMBicQKsPdcAqYBpzAFwU5yxBUo88ggoBqu1qPcHUfSbKK1sKMLmC7EAk438btHQrSdu3jGGQa6PA71nvH5nkDexhLteJqkM4dQmWF9g/84'/1'/0'/0/*)";
|
||||||
|
let change_descriptor = "wpkh(tprv8ZgxMBicQKsPdcAqYBpzAFwU5yxBUo88ggoBqu1qPcHUfSbKK1sKMLmC7EAk438btHQrSdu3jGGQa6PA71nvH5nkDexhLteJqkM4dQmWF9g/84'/1'/0'/1/*)";
|
||||||
|
let wallet_opt = Wallet::load()
|
||||||
|
.descriptors(descriptor, change_descriptor)
|
||||||
|
.network(network)
|
||||||
|
.load_wallet(&mut db)
|
||||||
|
.expect("wallet");
|
||||||
|
let mut wallet = match wallet_opt {
|
||||||
|
Some(wallet) => wallet,
|
||||||
|
None => Wallet::create(descriptor, change_descriptor)
|
||||||
|
.network(network)
|
||||||
|
.create_wallet(&mut db)
|
||||||
|
.expect("wallet"),
|
||||||
|
};
|
||||||
|
|
||||||
// Insert a single `TxOut` at `OutPoint` into the wallet.
|
// Get a new address to receive bitcoin.
|
||||||
let _ = wallet.insert_txout(outpoint, txout);
|
let receive_address = wallet.reveal_next_address(KeychainKind::External);
|
||||||
wallet.commit().expect("must write to database");
|
// Persist staged wallet data changes to the file store.
|
||||||
}
|
wallet.persist(&mut db).expect("persist");
|
||||||
|
println!("Your new receive address is: {}", receive_address.address);
|
||||||
```
|
```
|
||||||
|
|
||||||
<!-- ### Sync the balance of a descriptor -->
|
<!-- ### Sync the balance of a descriptor -->
|
||||||
|
|
||||||
<!-- ```rust,no_run -->
|
<!-- ```rust,no_run -->
|
||||||
<!-- use bdk::Wallet; -->
|
<!-- use bdk_wallet::Wallet; -->
|
||||||
<!-- use bdk::blockchain::ElectrumBlockchain; -->
|
<!-- use bdk_wallet::blockchain::ElectrumBlockchain; -->
|
||||||
<!-- use bdk::SyncOptions; -->
|
<!-- use bdk_wallet::SyncOptions; -->
|
||||||
<!-- use bdk::electrum_client::Client; -->
|
<!-- use bdk_wallet::electrum_client::Client; -->
|
||||||
<!-- use bdk::bitcoin::Network; -->
|
<!-- use bdk_wallet::bitcoin::Network; -->
|
||||||
|
|
||||||
<!-- fn main() -> Result<(), bdk::Error> { -->
|
<!-- fn main() -> Result<(), bdk_wallet::Error> { -->
|
||||||
<!-- let blockchain = ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); -->
|
<!-- let blockchain = ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); -->
|
||||||
<!-- let wallet = Wallet::new( -->
|
<!-- let wallet = Wallet::new( -->
|
||||||
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
||||||
@@ -101,7 +117,7 @@ fn main() {
|
|||||||
|
|
||||||
<!-- wallet.sync(&blockchain, SyncOptions::default())?; -->
|
<!-- wallet.sync(&blockchain, SyncOptions::default())?; -->
|
||||||
|
|
||||||
<!-- println!("Descriptor balance: {} SAT", wallet.get_balance()?); -->
|
<!-- println!("Descriptor balance: {} SAT", wallet.balance()?); -->
|
||||||
|
|
||||||
<!-- Ok(()) -->
|
<!-- Ok(()) -->
|
||||||
<!-- } -->
|
<!-- } -->
|
||||||
@@ -109,12 +125,12 @@ fn main() {
|
|||||||
<!-- ### Generate a few addresses -->
|
<!-- ### Generate a few addresses -->
|
||||||
|
|
||||||
<!-- ```rust -->
|
<!-- ```rust -->
|
||||||
<!-- use bdk::Wallet; -->
|
<!-- use bdk_wallet::Wallet; -->
|
||||||
<!-- use bdk::wallet::AddressIndex::New; -->
|
<!-- use bdk_wallet::AddressIndex::New; -->
|
||||||
<!-- use bdk::bitcoin::Network; -->
|
<!-- use bdk_wallet::bitcoin::Network; -->
|
||||||
|
|
||||||
<!-- fn main() -> Result<(), bdk::Error> { -->
|
<!-- fn main() -> Result<(), bdk_wallet::Error> { -->
|
||||||
<!-- let wallet = Wallet::new_no_persist( -->
|
<!-- let wallet = Wallet::new( -->
|
||||||
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
||||||
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), -->
|
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), -->
|
||||||
<!-- Network::Testnet, -->
|
<!-- Network::Testnet, -->
|
||||||
@@ -131,19 +147,19 @@ fn main() {
|
|||||||
<!-- ### Create a transaction -->
|
<!-- ### Create a transaction -->
|
||||||
|
|
||||||
<!-- ```rust,no_run -->
|
<!-- ```rust,no_run -->
|
||||||
<!-- use bdk::{FeeRate, Wallet, SyncOptions}; -->
|
<!-- use bdk_wallet::{FeeRate, Wallet, SyncOptions}; -->
|
||||||
<!-- use bdk::blockchain::ElectrumBlockchain; -->
|
<!-- use bdk_wallet::blockchain::ElectrumBlockchain; -->
|
||||||
|
|
||||||
<!-- use bdk::electrum_client::Client; -->
|
<!-- use bdk_wallet::electrum_client::Client; -->
|
||||||
<!-- use bdk::wallet::AddressIndex::New; -->
|
<!-- use bdk_wallet::AddressIndex::New; -->
|
||||||
|
|
||||||
<!-- use bitcoin::base64; -->
|
<!-- use bitcoin::base64; -->
|
||||||
<!-- use bdk::bitcoin::consensus::serialize; -->
|
<!-- use bdk_wallet::bitcoin::consensus::serialize; -->
|
||||||
<!-- use bdk::bitcoin::Network; -->
|
<!-- use bdk_wallet::bitcoin::Network; -->
|
||||||
|
|
||||||
<!-- fn main() -> Result<(), bdk::Error> { -->
|
<!-- fn main() -> Result<(), bdk_wallet::Error> { -->
|
||||||
<!-- let blockchain = ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); -->
|
<!-- let blockchain = ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); -->
|
||||||
<!-- let wallet = Wallet::new_no_persist( -->
|
<!-- let wallet = Wallet::new( -->
|
||||||
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
<!-- "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", -->
|
||||||
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), -->
|
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), -->
|
||||||
<!-- Network::Testnet, -->
|
<!-- Network::Testnet, -->
|
||||||
@@ -172,14 +188,14 @@ fn main() {
|
|||||||
<!-- ### Sign a transaction -->
|
<!-- ### Sign a transaction -->
|
||||||
|
|
||||||
<!-- ```rust,no_run -->
|
<!-- ```rust,no_run -->
|
||||||
<!-- use bdk::{Wallet, SignOptions}; -->
|
<!-- use bdk_wallet::{Wallet, SignOptions}; -->
|
||||||
|
|
||||||
<!-- use bitcoin::base64; -->
|
<!-- use bitcoin::base64; -->
|
||||||
<!-- use bdk::bitcoin::consensus::deserialize; -->
|
<!-- use bdk_wallet::bitcoin::consensus::deserialize; -->
|
||||||
<!-- use bdk::bitcoin::Network; -->
|
<!-- use bdk_wallet::bitcoin::Network; -->
|
||||||
|
|
||||||
<!-- fn main() -> Result<(), bdk::Error> { -->
|
<!-- fn main() -> Result<(), bdk_wallet::Error> { -->
|
||||||
<!-- let wallet = Wallet::new_no_persist( -->
|
<!-- let wallet = Wallet::new( -->
|
||||||
<!-- "wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/0/*)", -->
|
<!-- "wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/0/*)", -->
|
||||||
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/1/*)"), -->
|
<!-- Some("wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/1/*)"), -->
|
||||||
<!-- Network::Testnet, -->
|
<!-- Network::Testnet, -->
|
||||||
@@ -202,7 +218,7 @@ fn main() {
|
|||||||
cargo test
|
cargo test
|
||||||
```
|
```
|
||||||
|
|
||||||
## License
|
# License
|
||||||
|
|
||||||
Licensed under either of
|
Licensed under either of
|
||||||
|
|
||||||
@@ -211,15 +227,14 @@ Licensed under either of
|
|||||||
|
|
||||||
at your option.
|
at your option.
|
||||||
|
|
||||||
### Contribution
|
# Contribution
|
||||||
|
|
||||||
Unless you explicitly state otherwise, any contribution intentionally
|
Unless you explicitly state otherwise, any contribution intentionally
|
||||||
submitted for inclusion in the work by you, as defined in the Apache-2.0
|
submitted for inclusion in the work by you, as defined in the Apache-2.0
|
||||||
license, shall be dual licensed as above, without any additional terms or
|
license, shall be dual licensed as above, without any additional terms or
|
||||||
conditions.
|
conditions.
|
||||||
|
|
||||||
[`Wallet`]: https://docs.rs/bdk/1.0.0-alpha.7/bdk/wallet/struct.Wallet.html
|
[`Wallet`]: https://docs.rs/bdk_wallet/latest/bdk_wallet/wallet/struct.Wallet.html
|
||||||
[`PersistBackend`]: https://docs.rs/bdk_persist/latest/bdk_persist/trait.PersistBackend.html
|
|
||||||
[`bdk_chain`]: https://docs.rs/bdk_chain/latest
|
[`bdk_chain`]: https://docs.rs/bdk_chain/latest
|
||||||
[`bdk_file_store`]: https://docs.rs/bdk_file_store/latest
|
[`bdk_file_store`]: https://docs.rs/bdk_file_store/latest
|
||||||
[`bdk_electrum`]: https://docs.rs/bdk_electrum/latest
|
[`bdk_electrum`]: https://docs.rs/bdk_electrum/latest
|
||||||
98
crates/wallet/examples/compiler.rs
Normal file
98
crates/wallet/examples/compiler.rs
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
// Bitcoin Dev Kit
|
||||||
|
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
|
||||||
|
//
|
||||||
|
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
|
||||||
|
//
|
||||||
|
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
|
||||||
|
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
|
||||||
|
// You may not use this file except in accordance with one or both of these
|
||||||
|
// licenses.
|
||||||
|
|
||||||
|
extern crate bdk_wallet;
|
||||||
|
extern crate bitcoin;
|
||||||
|
extern crate miniscript;
|
||||||
|
extern crate serde_json;
|
||||||
|
|
||||||
|
use std::error::Error;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use bitcoin::Network;
|
||||||
|
use miniscript::policy::Concrete;
|
||||||
|
use miniscript::Descriptor;
|
||||||
|
|
||||||
|
use bdk_wallet::{KeychainKind, Wallet};
|
||||||
|
|
||||||
|
/// Miniscript policy is a high level abstraction of spending conditions. Defined in the
|
||||||
|
/// rust-miniscript library here https://docs.rs/miniscript/7.0.0/miniscript/policy/index.html
|
||||||
|
/// rust-miniscript provides a `compile()` function that can be used to compile any miniscript policy
|
||||||
|
/// into a descriptor. This descriptor then in turn can be used in bdk a fully functioning wallet
|
||||||
|
/// can be derived from the policy.
|
||||||
|
///
|
||||||
|
/// This example demonstrates the interaction between a bdk wallet and miniscript policy.
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
// We start with a miniscript policy string
|
||||||
|
let policy_str = "or(
|
||||||
|
10@thresh(4,
|
||||||
|
pk(029ffbe722b147f3035c87cb1c60b9a5947dd49c774cc31e94773478711a929ac0),pk(025f05815e3a1a8a83bfbb03ce016c9a2ee31066b98f567f6227df1d76ec4bd143),pk(025625f41e4a065efc06d5019cbbd56fe8c07595af1231e7cbc03fafb87ebb71ec),pk(02a27c8b850a00f67da3499b60562673dcf5fdfb82b7e17652a7ac54416812aefd),pk(03e618ec5f384d6e19ca9ebdb8e2119e5bef978285076828ce054e55c4daf473e2)
|
||||||
|
),1@and(
|
||||||
|
older(4209713),
|
||||||
|
thresh(2,
|
||||||
|
pk(03deae92101c790b12653231439f27b8897264125ecb2f46f48278603102573165),pk(033841045a531e1adf9910a6ec279589a90b3b8a904ee64ffd692bd08a8996c1aa),pk(02aebf2d10b040eb936a6f02f44ee82f8b34f5c1ccb20ff3949c2b28206b7c1068)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)"
|
||||||
|
.replace(&[' ', '\n', '\t'][..], "");
|
||||||
|
|
||||||
|
println!("Compiling policy: \n{}", policy_str);
|
||||||
|
|
||||||
|
// Parse the string as a [`Concrete`] type miniscript policy.
|
||||||
|
let policy = Concrete::<String>::from_str(&policy_str)?;
|
||||||
|
|
||||||
|
// Create a `wsh` type descriptor from the policy.
|
||||||
|
// `policy.compile()` returns the resulting miniscript from the policy.
|
||||||
|
let descriptor = Descriptor::new_wsh(policy.compile()?)?.to_string();
|
||||||
|
|
||||||
|
println!("Compiled into Descriptor: \n{}", descriptor);
|
||||||
|
|
||||||
|
// Do the same for another (internal) keychain
|
||||||
|
let policy_str = "or(
|
||||||
|
10@thresh(2,
|
||||||
|
pk(029ffbe722b147f3035c87cb1c60b9a5947dd49c774cc31e94773478711a929ac0),pk(025f05815e3a1a8a83bfbb03ce016c9a2ee31066b98f567f6227df1d76ec4bd143),pk(025625f41e4a065efc06d5019cbbd56fe8c07595af1231e7cbc03fafb87ebb71ec)
|
||||||
|
),1@and(
|
||||||
|
pk(03deae92101c790b12653231439f27b8897264125ecb2f46f48278603102573165),
|
||||||
|
older(12960)
|
||||||
|
)
|
||||||
|
)"
|
||||||
|
.replace(&[' ', '\n', '\t'][..], "");
|
||||||
|
|
||||||
|
println!("Compiling internal policy: \n{}", policy_str);
|
||||||
|
|
||||||
|
let policy = Concrete::<String>::from_str(&policy_str)?;
|
||||||
|
let internal_descriptor = Descriptor::new_wsh(policy.compile()?)?.to_string();
|
||||||
|
println!(
|
||||||
|
"Compiled into internal Descriptor: \n{}",
|
||||||
|
internal_descriptor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create a new wallet from descriptors
|
||||||
|
let mut wallet = Wallet::create(descriptor, internal_descriptor)
|
||||||
|
.network(Network::Regtest)
|
||||||
|
.create_wallet_no_persist()?;
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"First derived address from the descriptor: \n{}",
|
||||||
|
wallet.next_unused_address(KeychainKind::External),
|
||||||
|
);
|
||||||
|
|
||||||
|
// BDK also has it's own `Policy` structure to represent the spending condition in a more
|
||||||
|
// human readable json format.
|
||||||
|
let spending_policy = wallet.policies(KeychainKind::External)?;
|
||||||
|
println!(
|
||||||
|
"The BDK spending policy: \n{}",
|
||||||
|
serde_json::to_string_pretty(&spending_policy)?
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -7,14 +7,14 @@
|
|||||||
// licenses.
|
// licenses.
|
||||||
|
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use bdk::bitcoin::bip32::DerivationPath;
|
use bdk_wallet::bitcoin::bip32::DerivationPath;
|
||||||
use bdk::bitcoin::secp256k1::Secp256k1;
|
use bdk_wallet::bitcoin::secp256k1::Secp256k1;
|
||||||
use bdk::bitcoin::Network;
|
use bdk_wallet::bitcoin::Network;
|
||||||
use bdk::descriptor;
|
use bdk_wallet::descriptor;
|
||||||
use bdk::descriptor::IntoWalletDescriptor;
|
use bdk_wallet::descriptor::IntoWalletDescriptor;
|
||||||
use bdk::keys::bip39::{Language, Mnemonic, WordCount};
|
use bdk_wallet::keys::bip39::{Language, Mnemonic, WordCount};
|
||||||
use bdk::keys::{GeneratableKey, GeneratedKey};
|
use bdk_wallet::keys::{GeneratableKey, GeneratedKey};
|
||||||
use bdk::miniscript::Tap;
|
use bdk_wallet::miniscript::Tap;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
/// This example demonstrates how to generate a mnemonic phrase
|
/// This example demonstrates how to generate a mnemonic phrase
|
||||||
@@ -9,14 +9,14 @@
|
|||||||
// You may not use this file except in accordance with one or both of these
|
// You may not use this file except in accordance with one or both of these
|
||||||
// licenses.
|
// licenses.
|
||||||
|
|
||||||
extern crate bdk;
|
extern crate bdk_wallet;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use bdk::bitcoin::Network;
|
use bdk_wallet::bitcoin::Network;
|
||||||
use bdk::descriptor::{policy::BuildSatisfaction, ExtractPolicy, IntoWalletDescriptor};
|
use bdk_wallet::descriptor::{policy::BuildSatisfaction, ExtractPolicy, IntoWalletDescriptor};
|
||||||
use bdk::wallet::signer::SignersContainer;
|
use bdk_wallet::signer::SignersContainer;
|
||||||
|
|
||||||
/// This example describes the use of the BDK's [`bdk::descriptor::policy`] module.
|
/// This example describes the use of the BDK's [`bdk_wallet::descriptor::policy`] module.
|
||||||
///
|
///
|
||||||
/// Policy is higher abstraction representation of the wallet descriptor spending condition.
|
/// Policy is higher abstraction representation of the wallet descriptor spending condition.
|
||||||
/// This is useful to express complex miniscript spending conditions into more human readable form.
|
/// This is useful to express complex miniscript spending conditions into more human readable form.
|
||||||
@@ -34,11 +34,11 @@ fn main() -> Result<(), Box<dyn Error>> {
|
|||||||
let desc = "wsh(multi(2,tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/*,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/*))";
|
let desc = "wsh(multi(2,tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/*,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/1/*))";
|
||||||
|
|
||||||
// Use the descriptor string to derive the full descriptor and a keymap.
|
// Use the descriptor string to derive the full descriptor and a keymap.
|
||||||
// The wallet descriptor can be used to create a new bdk::wallet.
|
// The wallet descriptor can be used to create a new bdk_wallet::wallet.
|
||||||
// While the `keymap` can be used to create a `SignerContainer`.
|
// While the `keymap` can be used to create a `SignerContainer`.
|
||||||
//
|
//
|
||||||
// The `SignerContainer` can sign for `PSBT`s.
|
// The `SignerContainer` can sign for `PSBT`s.
|
||||||
// a bdk::wallet internally uses these to handle transaction signing.
|
// a `bdk_wallet::Wallet` internally uses these to handle transaction signing.
|
||||||
// But they can be used as independent tools also.
|
// But they can be used as independent tools also.
|
||||||
let (wallet_desc, keymap) = desc.into_wallet_descriptor(&secp, Network::Testnet)?;
|
let (wallet_desc, keymap) = desc.into_wallet_descriptor(&secp, Network::Testnet)?;
|
||||||
|
|
||||||
@@ -423,7 +423,7 @@ macro_rules! apply_modifier {
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// let (my_descriptor, my_keys_map, networks) = bdk::descriptor!(sh(wsh(and_v(v:pk("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy"),older(50)))))?;
|
/// let (my_descriptor, my_keys_map, networks) = bdk_wallet::descriptor!(sh(wsh(and_v(v:pk("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy"),older(50)))))?;
|
||||||
/// # Ok::<(), Box<dyn std::error::Error>>(())
|
/// # Ok::<(), Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
@@ -444,7 +444,7 @@ macro_rules! apply_modifier {
|
|||||||
/// bitcoin::PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy")?;
|
/// bitcoin::PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy")?;
|
||||||
/// let my_timelock = 50;
|
/// let my_timelock = 50;
|
||||||
///
|
///
|
||||||
/// let (descriptor_a, key_map_a, networks) = bdk::descriptor! {
|
/// let (descriptor_a, key_map_a, networks) = bdk_wallet::descriptor! {
|
||||||
/// wsh (
|
/// wsh (
|
||||||
/// thresh(2, pk(my_key_1), s:pk(my_key_2), s:n:d:v:older(my_timelock))
|
/// thresh(2, pk(my_key_1), s:pk(my_key_2), s:n:d:v:older(my_timelock))
|
||||||
/// )
|
/// )
|
||||||
@@ -452,11 +452,12 @@ macro_rules! apply_modifier {
|
|||||||
///
|
///
|
||||||
/// #[rustfmt::skip]
|
/// #[rustfmt::skip]
|
||||||
/// let b_items = vec![
|
/// let b_items = vec![
|
||||||
/// bdk::fragment!(pk(my_key_1))?,
|
/// bdk_wallet::fragment!(pk(my_key_1))?,
|
||||||
/// bdk::fragment!(s:pk(my_key_2))?,
|
/// bdk_wallet::fragment!(s:pk(my_key_2))?,
|
||||||
/// bdk::fragment!(s:n:d:v:older(my_timelock))?,
|
/// bdk_wallet::fragment!(s:n:d:v:older(my_timelock))?,
|
||||||
/// ];
|
/// ];
|
||||||
/// let (descriptor_b, mut key_map_b, networks) = bdk::descriptor!(wsh(thresh_vec(2, b_items)))?;
|
/// let (descriptor_b, mut key_map_b, networks) =
|
||||||
|
/// bdk_wallet::descriptor!(wsh(thresh_vec(2, b_items)))?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(descriptor_a, descriptor_b);
|
/// assert_eq!(descriptor_a, descriptor_b);
|
||||||
/// assert_eq!(key_map_a.len(), key_map_b.len());
|
/// assert_eq!(key_map_a.len(), key_map_b.len());
|
||||||
@@ -475,7 +476,7 @@ macro_rules! apply_modifier {
|
|||||||
/// let my_key_2 =
|
/// let my_key_2 =
|
||||||
/// bitcoin::PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy")?;
|
/// bitcoin::PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy")?;
|
||||||
///
|
///
|
||||||
/// let (descriptor, key_map, networks) = bdk::descriptor! {
|
/// let (descriptor, key_map, networks) = bdk_wallet::descriptor! {
|
||||||
/// wsh (
|
/// wsh (
|
||||||
/// multi(2, my_key_1, my_key_2)
|
/// multi(2, my_key_1, my_key_2)
|
||||||
/// )
|
/// )
|
||||||
@@ -491,7 +492,7 @@ macro_rules! apply_modifier {
|
|||||||
/// let my_key =
|
/// let my_key =
|
||||||
/// bitcoin::PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy")?;
|
/// bitcoin::PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy")?;
|
||||||
///
|
///
|
||||||
/// let (descriptor, key_map, networks) = bdk::descriptor!(wpkh(my_key))?;
|
/// let (descriptor, key_map, networks) = bdk_wallet::descriptor!(wpkh(my_key))?;
|
||||||
/// # Ok::<(), Box<dyn std::error::Error>>(())
|
/// # Ok::<(), Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
@@ -702,10 +703,10 @@ macro_rules! fragment {
|
|||||||
$crate::keys::make_pkh($key, &secp)
|
$crate::keys::make_pkh($key, &secp)
|
||||||
});
|
});
|
||||||
( after ( $value:expr ) ) => ({
|
( after ( $value:expr ) ) => ({
|
||||||
$crate::impl_leaf_opcode_value!(After, $crate::miniscript::AbsLockTime::from_consensus($value))
|
$crate::impl_leaf_opcode_value!(After, $crate::miniscript::AbsLockTime::from_consensus($value).expect("valid `AbsLockTime`"))
|
||||||
});
|
});
|
||||||
( older ( $value:expr ) ) => ({
|
( older ( $value:expr ) ) => ({
|
||||||
$crate::impl_leaf_opcode_value!(Older, $crate::bitcoin::Sequence($value)) // TODO!!
|
$crate::impl_leaf_opcode_value!(Older, $crate::miniscript::RelLockTime::from_consensus($value).expect("valid `RelLockTime`")) // TODO!!
|
||||||
});
|
});
|
||||||
( sha256 ( $hash:expr ) ) => ({
|
( sha256 ( $hash:expr ) ) => ({
|
||||||
$crate::impl_leaf_opcode_value!(Sha256, $hash)
|
$crate::impl_leaf_opcode_value!(Sha256, $hash)
|
||||||
@@ -756,7 +757,8 @@ macro_rules! fragment {
|
|||||||
(keys_acc, net_acc)
|
(keys_acc, net_acc)
|
||||||
});
|
});
|
||||||
|
|
||||||
$crate::impl_leaf_opcode_value_two!(Thresh, $thresh, items)
|
let thresh = $crate::miniscript::Threshold::new($thresh, items).expect("valid threshold and pks collection");
|
||||||
|
$crate::impl_leaf_opcode_value!(Thresh, thresh)
|
||||||
.map(|(minisc, _, _)| (minisc, key_maps, valid_networks))
|
.map(|(minisc, _, _)| (minisc, key_maps, valid_networks))
|
||||||
});
|
});
|
||||||
( thresh ( $thresh:expr, $( $inner:tt )* ) ) => ({
|
( thresh ( $thresh:expr, $( $inner:tt )* ) ) => ({
|
||||||
@@ -768,7 +770,12 @@ macro_rules! fragment {
|
|||||||
( multi_vec ( $thresh:expr, $keys:expr ) ) => ({
|
( multi_vec ( $thresh:expr, $keys:expr ) ) => ({
|
||||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||||
|
|
||||||
$crate::keys::make_multi($thresh, $crate::miniscript::Terminal::Multi, $keys, &secp)
|
let fun = |k, pks| {
|
||||||
|
let thresh = $crate::miniscript::Threshold::new(k, pks).expect("valid threshold and pks collection");
|
||||||
|
$crate::miniscript::Terminal::Multi(thresh)
|
||||||
|
};
|
||||||
|
|
||||||
|
$crate::keys::make_multi($thresh, fun, $keys, &secp)
|
||||||
});
|
});
|
||||||
( multi ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
( multi ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
||||||
$crate::group_multi_keys!( $( $key ),* )
|
$crate::group_multi_keys!( $( $key ),* )
|
||||||
@@ -777,7 +784,12 @@ macro_rules! fragment {
|
|||||||
( multi_a_vec ( $thresh:expr, $keys:expr ) ) => ({
|
( multi_a_vec ( $thresh:expr, $keys:expr ) ) => ({
|
||||||
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
|
||||||
|
|
||||||
$crate::keys::make_multi($thresh, $crate::miniscript::Terminal::MultiA, $keys, &secp)
|
let fun = |k, pks| {
|
||||||
|
let thresh = $crate::miniscript::Threshold::new(k, pks).expect("valid threshold and pks collection");
|
||||||
|
$crate::miniscript::Terminal::MultiA(thresh)
|
||||||
|
};
|
||||||
|
|
||||||
|
$crate::keys::make_multi($thresh, fun, $keys, &secp)
|
||||||
});
|
});
|
||||||
( multi_a ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
( multi_a ( $thresh:expr $(, $key:expr )+ ) ) => ({
|
||||||
$crate::group_multi_keys!( $( $key ),* )
|
$crate::group_multi_keys!( $( $key ),* )
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
/// Errors related to the parsing and usage of descriptors
|
/// Errors related to the parsing and usage of descriptors
|
||||||
#[derive(Debug)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// Invalid HD Key path, such as having a wildcard but a length != 1
|
/// Invalid HD Key path, such as having a wildcard but a length != 1
|
||||||
InvalidHdKeyPath,
|
InvalidHdKeyPath,
|
||||||
@@ -23,7 +23,6 @@ pub enum Error {
|
|||||||
HardenedDerivationXpub,
|
HardenedDerivationXpub,
|
||||||
/// The descriptor contains multipath keys
|
/// The descriptor contains multipath keys
|
||||||
MultiPath,
|
MultiPath,
|
||||||
|
|
||||||
/// Error thrown while working with [`keys`](crate::keys)
|
/// Error thrown while working with [`keys`](crate::keys)
|
||||||
Key(crate::keys::KeyError),
|
Key(crate::keys::KeyError),
|
||||||
/// Error while extracting and manipulating policies
|
/// Error while extracting and manipulating policies
|
||||||
@@ -37,11 +36,13 @@ pub enum Error {
|
|||||||
/// Error during base58 decoding
|
/// Error during base58 decoding
|
||||||
Base58(bitcoin::base58::Error),
|
Base58(bitcoin::base58::Error),
|
||||||
/// Key-related error
|
/// Key-related error
|
||||||
Pk(bitcoin::key::Error),
|
Pk(bitcoin::key::ParsePublicKeyError),
|
||||||
/// Miniscript error
|
/// Miniscript error
|
||||||
Miniscript(miniscript::Error),
|
Miniscript(miniscript::Error),
|
||||||
/// Hex decoding error
|
/// Hex decoding error
|
||||||
Hex(bitcoin::hex::HexToBytesError),
|
Hex(bitcoin::hex::HexToBytesError),
|
||||||
|
/// The provided wallet descriptors are identical
|
||||||
|
ExternalAndInternalAreTheSame,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<crate::keys::KeyError> for Error {
|
impl From<crate::keys::KeyError> for Error {
|
||||||
@@ -79,6 +80,9 @@ impl fmt::Display for Error {
|
|||||||
Self::Pk(err) => write!(f, "Key-related error: {}", err),
|
Self::Pk(err) => write!(f, "Key-related error: {}", err),
|
||||||
Self::Miniscript(err) => write!(f, "Miniscript error: {}", err),
|
Self::Miniscript(err) => write!(f, "Miniscript error: {}", err),
|
||||||
Self::Hex(err) => write!(f, "Hex decoding error: {}", err),
|
Self::Hex(err) => write!(f, "Hex decoding error: {}", err),
|
||||||
|
Self::ExternalAndInternalAreTheSame => {
|
||||||
|
write!(f, "External and internal descriptors are the same")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -98,8 +102,8 @@ impl From<bitcoin::base58::Error> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<bitcoin::key::Error> for Error {
|
impl From<bitcoin::key::ParsePublicKeyError> for Error {
|
||||||
fn from(err: bitcoin::key::Error) -> Self {
|
fn from(err: bitcoin::key::ParsePublicKeyError) -> Self {
|
||||||
Error::Pk(err)
|
Error::Pk(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -112,6 +112,16 @@ impl IntoWalletDescriptor for &String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl IntoWalletDescriptor for String {
|
||||||
|
fn into_wallet_descriptor(
|
||||||
|
self,
|
||||||
|
secp: &SecpCtx,
|
||||||
|
network: Network,
|
||||||
|
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
||||||
|
self.as_str().into_wallet_descriptor(secp, network)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl IntoWalletDescriptor for ExtendedDescriptor {
|
impl IntoWalletDescriptor for ExtendedDescriptor {
|
||||||
fn into_wallet_descriptor(
|
fn into_wallet_descriptor(
|
||||||
self,
|
self,
|
||||||
@@ -229,7 +239,7 @@ impl IntoWalletDescriptor for DescriptorTemplateOut {
|
|||||||
let pk = match pk {
|
let pk = match pk {
|
||||||
DescriptorPublicKey::XPub(ref xpub) => {
|
DescriptorPublicKey::XPub(ref xpub) => {
|
||||||
let mut xpub = xpub.clone();
|
let mut xpub = xpub.clone();
|
||||||
xpub.xkey.network = self.network;
|
xpub.xkey.network = self.network.into();
|
||||||
|
|
||||||
DescriptorPublicKey::XPub(xpub)
|
DescriptorPublicKey::XPub(xpub)
|
||||||
}
|
}
|
||||||
@@ -264,11 +274,11 @@ impl IntoWalletDescriptor for DescriptorTemplateOut {
|
|||||||
.map(|(mut k, mut v)| {
|
.map(|(mut k, mut v)| {
|
||||||
match (&mut k, &mut v) {
|
match (&mut k, &mut v) {
|
||||||
(DescriptorPublicKey::XPub(xpub), DescriptorSecretKey::XPrv(xprv)) => {
|
(DescriptorPublicKey::XPub(xpub), DescriptorSecretKey::XPrv(xprv)) => {
|
||||||
xpub.xkey.network = network;
|
xpub.xkey.network = network.into();
|
||||||
xprv.xkey.network = network;
|
xprv.xkey.network = network.into();
|
||||||
}
|
}
|
||||||
(_, DescriptorSecretKey::Single(key)) => {
|
(_, DescriptorSecretKey::Single(key)) => {
|
||||||
key.key.network = network;
|
key.key.network = network.into();
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@@ -281,15 +291,10 @@ impl IntoWalletDescriptor for DescriptorTemplateOut {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Wrapper for `IntoWalletDescriptor` that performs additional checks on the keys contained in the
|
/// Extra checks for [`ExtendedDescriptor`].
|
||||||
/// descriptor
|
pub(crate) fn check_wallet_descriptor(
|
||||||
pub(crate) fn into_wallet_descriptor_checked<T: IntoWalletDescriptor>(
|
descriptor: &Descriptor<DescriptorPublicKey>,
|
||||||
inner: T,
|
) -> Result<(), DescriptorError> {
|
||||||
secp: &SecpCtx,
|
|
||||||
network: Network,
|
|
||||||
) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError> {
|
|
||||||
let (descriptor, keymap) = inner.into_wallet_descriptor(secp, network)?;
|
|
||||||
|
|
||||||
// Ensure the keys don't contain any hardened derivation steps or hardened wildcards
|
// Ensure the keys don't contain any hardened derivation steps or hardened wildcards
|
||||||
let descriptor_contains_hardened_steps = descriptor.for_any_key(|k| {
|
let descriptor_contains_hardened_steps = descriptor.for_any_key(|k| {
|
||||||
if let DescriptorPublicKey::XPub(DescriptorXKey {
|
if let DescriptorPublicKey::XPub(DescriptorXKey {
|
||||||
@@ -316,7 +321,7 @@ pub(crate) fn into_wallet_descriptor_checked<T: IntoWalletDescriptor>(
|
|||||||
// issues
|
// issues
|
||||||
descriptor.sanity_check()?;
|
descriptor.sanity_check()?;
|
||||||
|
|
||||||
Ok((descriptor, keymap))
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
@@ -606,8 +611,8 @@ mod test {
|
|||||||
use assert_matches::assert_matches;
|
use assert_matches::assert_matches;
|
||||||
use bitcoin::hex::FromHex;
|
use bitcoin::hex::FromHex;
|
||||||
use bitcoin::secp256k1::Secp256k1;
|
use bitcoin::secp256k1::Secp256k1;
|
||||||
use bitcoin::ScriptBuf;
|
|
||||||
use bitcoin::{bip32, Psbt};
|
use bitcoin::{bip32, Psbt};
|
||||||
|
use bitcoin::{NetworkKind, ScriptBuf};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::psbt::PsbtUtils;
|
use crate::psbt::PsbtUtils;
|
||||||
@@ -743,7 +748,7 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let mut xprv_testnet = xprv;
|
let mut xprv_testnet = xprv;
|
||||||
xprv_testnet.network = Network::Testnet;
|
xprv_testnet.network = NetworkKind::Test;
|
||||||
|
|
||||||
let xpub_testnet = bip32::Xpub::from_priv(&secp, &xprv_testnet);
|
let xpub_testnet = bip32::Xpub::from_priv(&secp, &xprv_testnet);
|
||||||
let desc_pubkey = DescriptorPublicKey::XPub(DescriptorXKey {
|
let desc_pubkey = DescriptorPublicKey::XPub(DescriptorXKey {
|
||||||
@@ -855,22 +860,31 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_into_wallet_descriptor_checked() {
|
fn test_check_wallet_descriptor() {
|
||||||
let secp = Secp256k1::new();
|
let secp = Secp256k1::new();
|
||||||
|
|
||||||
let descriptor = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0'/1/2/*)";
|
let descriptor = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0'/1/2/*)";
|
||||||
let result = into_wallet_descriptor_checked(descriptor, &secp, Network::Testnet);
|
let (descriptor, _) = descriptor
|
||||||
|
.into_wallet_descriptor(&secp, Network::Testnet)
|
||||||
|
.expect("must parse");
|
||||||
|
let result = check_wallet_descriptor(&descriptor);
|
||||||
|
|
||||||
assert_matches!(result, Err(DescriptorError::HardenedDerivationXpub));
|
assert_matches!(result, Err(DescriptorError::HardenedDerivationXpub));
|
||||||
|
|
||||||
let descriptor = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/<0;1>/*)";
|
let descriptor = "wpkh(tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/<0;1>/*)";
|
||||||
let result = into_wallet_descriptor_checked(descriptor, &secp, Network::Testnet);
|
let (descriptor, _) = descriptor
|
||||||
|
.into_wallet_descriptor(&secp, Network::Testnet)
|
||||||
|
.expect("must parse");
|
||||||
|
let result = check_wallet_descriptor(&descriptor);
|
||||||
|
|
||||||
assert_matches!(result, Err(DescriptorError::MultiPath));
|
assert_matches!(result, Err(DescriptorError::MultiPath));
|
||||||
|
|
||||||
// repeated pubkeys
|
// repeated pubkeys
|
||||||
let descriptor = "wsh(multi(2,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0/*,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0/*))";
|
let descriptor = "wsh(multi(2,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0/*,tpubD6NzVbkrYhZ4XHndKkuB8FifXm8r5FQHwrN6oZuWCz13qb93rtgKvD4PQsqC4HP4yhV3tA2fqr2RbY5mNXfM7RxXUoeABoDtsFUq2zJq6YK/0/*))";
|
||||||
let result = into_wallet_descriptor_checked(descriptor, &secp, Network::Testnet);
|
let (descriptor, _) = descriptor
|
||||||
|
.into_wallet_descriptor(&secp, Network::Testnet)
|
||||||
|
.expect("must parse");
|
||||||
|
let result = check_wallet_descriptor(&descriptor);
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
}
|
}
|
||||||
@@ -882,8 +896,10 @@ mod test {
|
|||||||
let secp = Secp256k1::new();
|
let secp = Secp256k1::new();
|
||||||
|
|
||||||
let descriptor = "sh(wsh(sortedmulti(3,tpubDEsqS36T4DVsKJd9UH8pAKzrkGBYPLEt9jZMwpKtzh1G6mgYehfHt9WCgk7MJG5QGSFWf176KaBNoXbcuFcuadAFKxDpUdMDKGBha7bY3QM/0/*,tpubDF3cpwfs7fMvXXuoQbohXtLjNM6ehwYT287LWtmLsd4r77YLg6MZg4vTETx5MSJ2zkfigbYWu31VA2Z2Vc1cZugCYXgS7FQu6pE8V6TriEH/0/*,tpubDE1SKfcW76Tb2AASv5bQWMuScYNAdoqLHoexw13sNDXwmUhQDBbCD3QAedKGLhxMrWQdMDKENzYtnXPDRvexQPNuDrLj52wAjHhNEm8sJ4p/0/*,tpubDFLc6oXwJmhm3FGGzXkfJNTh2KitoY3WhmmQvuAjMhD8YbyWn5mAqckbxXfm2etM3p5J6JoTpSrMqRSTfMLtNW46poDaEZJ1kjd3csRSjwH/0/*,tpubDEWD9NBeWP59xXmdqSNt4VYdtTGwbpyP8WS962BuqpQeMZmX9Pur14dhXdZT5a7wR1pK6dPtZ9fP5WR493hPzemnBvkfLLYxnUjAKj1JCQV/0/*,tpubDEHyZkkwd7gZWCTgQuYQ9C4myF2hMEmyHsBCCmLssGqoqUxeT3gzohF5uEVURkf9TtmeepJgkSUmteac38FwZqirjApzNX59XSHLcwaTZCH/0/*,tpubDEqLouCekwnMUWN486kxGzD44qVgeyuqHyxUypNEiQt5RnUZNJe386TKPK99fqRV1vRkZjYAjtXGTECz98MCsdLcnkM67U6KdYRzVubeCgZ/0/*)))";
|
let descriptor = "sh(wsh(sortedmulti(3,tpubDEsqS36T4DVsKJd9UH8pAKzrkGBYPLEt9jZMwpKtzh1G6mgYehfHt9WCgk7MJG5QGSFWf176KaBNoXbcuFcuadAFKxDpUdMDKGBha7bY3QM/0/*,tpubDF3cpwfs7fMvXXuoQbohXtLjNM6ehwYT287LWtmLsd4r77YLg6MZg4vTETx5MSJ2zkfigbYWu31VA2Z2Vc1cZugCYXgS7FQu6pE8V6TriEH/0/*,tpubDE1SKfcW76Tb2AASv5bQWMuScYNAdoqLHoexw13sNDXwmUhQDBbCD3QAedKGLhxMrWQdMDKENzYtnXPDRvexQPNuDrLj52wAjHhNEm8sJ4p/0/*,tpubDFLc6oXwJmhm3FGGzXkfJNTh2KitoY3WhmmQvuAjMhD8YbyWn5mAqckbxXfm2etM3p5J6JoTpSrMqRSTfMLtNW46poDaEZJ1kjd3csRSjwH/0/*,tpubDEWD9NBeWP59xXmdqSNt4VYdtTGwbpyP8WS962BuqpQeMZmX9Pur14dhXdZT5a7wR1pK6dPtZ9fP5WR493hPzemnBvkfLLYxnUjAKj1JCQV/0/*,tpubDEHyZkkwd7gZWCTgQuYQ9C4myF2hMEmyHsBCCmLssGqoqUxeT3gzohF5uEVURkf9TtmeepJgkSUmteac38FwZqirjApzNX59XSHLcwaTZCH/0/*,tpubDEqLouCekwnMUWN486kxGzD44qVgeyuqHyxUypNEiQt5RnUZNJe386TKPK99fqRV1vRkZjYAjtXGTECz98MCsdLcnkM67U6KdYRzVubeCgZ/0/*)))";
|
||||||
let (descriptor, _) =
|
let (descriptor, _) = descriptor
|
||||||
into_wallet_descriptor_checked(descriptor, &secp, Network::Testnet).unwrap();
|
.into_wallet_descriptor(&secp, Network::Testnet)
|
||||||
|
.unwrap();
|
||||||
|
check_wallet_descriptor(&descriptor).expect("descriptor");
|
||||||
|
|
||||||
let descriptor = descriptor.at_derivation_index(0).unwrap();
|
let descriptor = descriptor.at_derivation_index(0).unwrap();
|
||||||
|
|
||||||
@@ -20,10 +20,10 @@
|
|||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
//! # use std::sync::Arc;
|
//! # use std::sync::Arc;
|
||||||
//! # use bdk::descriptor::*;
|
//! # use bdk_wallet::descriptor::*;
|
||||||
//! # use bdk::wallet::signer::*;
|
//! # use bdk_wallet::signer::*;
|
||||||
//! # use bdk::bitcoin::secp256k1::Secp256k1;
|
//! # use bdk_wallet::bitcoin::secp256k1::Secp256k1;
|
||||||
//! use bdk::descriptor::policy::BuildSatisfaction;
|
//! use bdk_wallet::descriptor::policy::BuildSatisfaction;
|
||||||
//! let secp = Secp256k1::new();
|
//! let secp = Secp256k1::new();
|
||||||
//! let desc = "wsh(and_v(v:pk(cV3oCth6zxZ1UVsHLnGothsWNsaoxRhC6aeNi5VbSdFpwUkgkEci),or_d(pk(cVMTy7uebJgvFaSBwcgvwk8qn8xSLc97dKow4MBetjrrahZoimm2),older(12960))))";
|
//! let desc = "wsh(and_v(v:pk(cV3oCth6zxZ1UVsHLnGothsWNsaoxRhC6aeNi5VbSdFpwUkgkEci),or_d(pk(cVMTy7uebJgvFaSBwcgvwk8qn8xSLc97dKow4MBetjrrahZoimm2),older(12960))))";
|
||||||
//!
|
//!
|
||||||
@@ -40,6 +40,7 @@ use crate::collections::{BTreeMap, HashSet, VecDeque};
|
|||||||
use alloc::string::String;
|
use alloc::string::String;
|
||||||
use alloc::vec::Vec;
|
use alloc::vec::Vec;
|
||||||
use core::cmp::max;
|
use core::cmp::max;
|
||||||
|
use miniscript::miniscript::limits::{MAX_PUBKEYS_IN_CHECKSIGADD, MAX_PUBKEYS_PER_MULTISIG};
|
||||||
|
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
@@ -48,12 +49,12 @@ use serde::{Serialize, Serializer};
|
|||||||
|
|
||||||
use bitcoin::bip32::Fingerprint;
|
use bitcoin::bip32::Fingerprint;
|
||||||
use bitcoin::hashes::{hash160, ripemd160, sha256};
|
use bitcoin::hashes::{hash160, ripemd160, sha256};
|
||||||
use bitcoin::{absolute, key::XOnlyPublicKey, PublicKey, Sequence};
|
use bitcoin::{absolute, key::XOnlyPublicKey, relative, PublicKey, Sequence};
|
||||||
|
|
||||||
use miniscript::descriptor::{
|
use miniscript::descriptor::{
|
||||||
DescriptorPublicKey, ShInner, SinglePub, SinglePubKey, SortedMultiVec, WshInner,
|
DescriptorPublicKey, ShInner, SinglePub, SinglePubKey, SortedMultiVec, WshInner,
|
||||||
};
|
};
|
||||||
use miniscript::hash256;
|
use miniscript::{hash256, Threshold};
|
||||||
use miniscript::{
|
use miniscript::{
|
||||||
Descriptor, Miniscript, Satisfier, ScriptContext, SigType, Terminal, ToPublicKey,
|
Descriptor, Miniscript, Satisfier, ScriptContext, SigType, Terminal, ToPublicKey,
|
||||||
};
|
};
|
||||||
@@ -137,7 +138,7 @@ pub enum SatisfiableItem {
|
|||||||
/// Relative timelock locktime
|
/// Relative timelock locktime
|
||||||
RelativeTimelock {
|
RelativeTimelock {
|
||||||
/// The timelock value
|
/// The timelock value
|
||||||
value: Sequence,
|
value: relative::LockTime,
|
||||||
},
|
},
|
||||||
/// Multi-signature public keys with threshold count
|
/// Multi-signature public keys with threshold count
|
||||||
Multisig {
|
Multisig {
|
||||||
@@ -586,30 +587,25 @@ impl Policy {
|
|||||||
Ok(Some(policy))
|
Ok(Some(policy))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_multisig<Ctx: ScriptContext + 'static>(
|
fn make_multi<Ctx: ScriptContext + 'static, const MAX: usize>(
|
||||||
keys: &[DescriptorPublicKey],
|
threshold: &Threshold<DescriptorPublicKey, MAX>,
|
||||||
signers: &SignersContainer,
|
signers: &SignersContainer,
|
||||||
build_sat: BuildSatisfaction,
|
build_sat: BuildSatisfaction,
|
||||||
threshold: usize,
|
|
||||||
sorted: bool,
|
sorted: bool,
|
||||||
secp: &SecpCtx,
|
secp: &SecpCtx,
|
||||||
) -> Result<Option<Policy>, PolicyError> {
|
) -> Result<Option<Policy>, PolicyError> {
|
||||||
if threshold == 0 {
|
let parsed_keys = threshold.iter().map(|k| PkOrF::from_key(k, secp)).collect();
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let parsed_keys = keys.iter().map(|k| PkOrF::from_key(k, secp)).collect();
|
|
||||||
|
|
||||||
let mut contribution = Satisfaction::Partial {
|
let mut contribution = Satisfaction::Partial {
|
||||||
n: keys.len(),
|
n: threshold.n(),
|
||||||
m: threshold,
|
m: threshold.k(),
|
||||||
items: vec![],
|
items: vec![],
|
||||||
conditions: Default::default(),
|
conditions: Default::default(),
|
||||||
sorted: Some(sorted),
|
sorted: Some(sorted),
|
||||||
};
|
};
|
||||||
let mut satisfaction = contribution.clone();
|
let mut satisfaction = contribution.clone();
|
||||||
|
|
||||||
for (index, key) in keys.iter().enumerate() {
|
for (index, key) in threshold.iter().enumerate() {
|
||||||
if signers.find(signer_id(key, secp)).is_some() {
|
if signers.find(signer_id(key, secp)).is_some() {
|
||||||
contribution.add(
|
contribution.add(
|
||||||
&Satisfaction::Complete {
|
&Satisfaction::Complete {
|
||||||
@@ -618,7 +614,6 @@ impl Policy {
|
|||||||
index,
|
index,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(psbt) = build_sat.psbt() {
|
if let Some(psbt) = build_sat.psbt() {
|
||||||
if Ctx::find_signature(psbt, key, secp) {
|
if Ctx::find_signature(psbt, key, secp) {
|
||||||
satisfaction.add(
|
satisfaction.add(
|
||||||
@@ -635,12 +630,11 @@ impl Policy {
|
|||||||
|
|
||||||
let mut policy: Policy = SatisfiableItem::Multisig {
|
let mut policy: Policy = SatisfiableItem::Multisig {
|
||||||
keys: parsed_keys,
|
keys: parsed_keys,
|
||||||
threshold,
|
threshold: threshold.k(),
|
||||||
}
|
}
|
||||||
.into();
|
.into();
|
||||||
policy.contribution = contribution;
|
policy.contribution = contribution;
|
||||||
policy.satisfaction = satisfaction;
|
policy.satisfaction = satisfaction;
|
||||||
|
|
||||||
Ok(Some(policy))
|
Ok(Some(policy))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -725,7 +719,7 @@ impl Policy {
|
|||||||
timelock: Some(*value),
|
timelock: Some(*value),
|
||||||
}),
|
}),
|
||||||
SatisfiableItem::RelativeTimelock { value } => Ok(Condition {
|
SatisfiableItem::RelativeTimelock { value } => Ok(Condition {
|
||||||
csv: Some(*value),
|
csv: Some((*value).into()),
|
||||||
timelock: None,
|
timelock: None,
|
||||||
}),
|
}),
|
||||||
_ => Ok(Condition::default()),
|
_ => Ok(Condition::default()),
|
||||||
@@ -952,11 +946,14 @@ impl<Ctx: ScriptContext + 'static> ExtractPolicy for Miniscript<DescriptorPublic
|
|||||||
Some(policy)
|
Some(policy)
|
||||||
}
|
}
|
||||||
Terminal::Older(value) => {
|
Terminal::Older(value) => {
|
||||||
let mut policy: Policy = SatisfiableItem::RelativeTimelock { value: *value }.into();
|
let mut policy: Policy = SatisfiableItem::RelativeTimelock {
|
||||||
|
value: (*value).into(),
|
||||||
|
}
|
||||||
|
.into();
|
||||||
policy.contribution = Satisfaction::Complete {
|
policy.contribution = Satisfaction::Complete {
|
||||||
condition: Condition {
|
condition: Condition {
|
||||||
timelock: None,
|
timelock: None,
|
||||||
csv: Some(*value),
|
csv: Some((*value).into()),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
if let BuildSatisfaction::PsbtTimelocks {
|
if let BuildSatisfaction::PsbtTimelocks {
|
||||||
@@ -966,9 +963,11 @@ impl<Ctx: ScriptContext + 'static> ExtractPolicy for Miniscript<DescriptorPublic
|
|||||||
} = build_sat
|
} = build_sat
|
||||||
{
|
{
|
||||||
let older = Older::new(Some(current_height), Some(input_max_height), false);
|
let older = Older::new(Some(current_height), Some(input_max_height), false);
|
||||||
let older_sat = Satisfier::<bitcoin::PublicKey>::check_older(&older, *value);
|
let older_sat =
|
||||||
let inputs_sat = psbt_inputs_sat(psbt)
|
Satisfier::<bitcoin::PublicKey>::check_older(&older, (*value).into());
|
||||||
.all(|sat| Satisfier::<bitcoin::PublicKey>::check_older(&sat, *value));
|
let inputs_sat = psbt_inputs_sat(psbt).all(|sat| {
|
||||||
|
Satisfier::<bitcoin::PublicKey>::check_older(&sat, (*value).into())
|
||||||
|
});
|
||||||
if older_sat && inputs_sat {
|
if older_sat && inputs_sat {
|
||||||
policy.satisfaction = policy.contribution.clone();
|
policy.satisfaction = policy.contribution.clone();
|
||||||
}
|
}
|
||||||
@@ -986,9 +985,12 @@ impl<Ctx: ScriptContext + 'static> ExtractPolicy for Miniscript<DescriptorPublic
|
|||||||
Terminal::Hash160(hash) => {
|
Terminal::Hash160(hash) => {
|
||||||
Some(SatisfiableItem::Hash160Preimage { hash: *hash }.into())
|
Some(SatisfiableItem::Hash160Preimage { hash: *hash }.into())
|
||||||
}
|
}
|
||||||
Terminal::Multi(k, pks) | Terminal::MultiA(k, pks) => {
|
Terminal::Multi(threshold) => Policy::make_multi::<Ctx, MAX_PUBKEYS_PER_MULTISIG>(
|
||||||
Policy::make_multisig::<Ctx>(pks, signers, build_sat, *k, false, secp)?
|
threshold, signers, build_sat, false, secp,
|
||||||
}
|
)?,
|
||||||
|
Terminal::MultiA(threshold) => Policy::make_multi::<Ctx, MAX_PUBKEYS_IN_CHECKSIGADD>(
|
||||||
|
threshold, signers, build_sat, false, secp,
|
||||||
|
)?,
|
||||||
// Identities
|
// Identities
|
||||||
Terminal::Alt(inner)
|
Terminal::Alt(inner)
|
||||||
| Terminal::Swap(inner)
|
| Terminal::Swap(inner)
|
||||||
@@ -1016,8 +1018,9 @@ impl<Ctx: ScriptContext + 'static> ExtractPolicy for Miniscript<DescriptorPublic
|
|||||||
a.extract_policy(signers, build_sat, secp)?,
|
a.extract_policy(signers, build_sat, secp)?,
|
||||||
b.extract_policy(signers, build_sat, secp)?,
|
b.extract_policy(signers, build_sat, secp)?,
|
||||||
)?,
|
)?,
|
||||||
Terminal::Thresh(k, nodes) => {
|
Terminal::Thresh(threshold) => {
|
||||||
let mut threshold = *k;
|
let mut k = threshold.k();
|
||||||
|
let nodes = threshold.data();
|
||||||
let mapped: Vec<_> = nodes
|
let mapped: Vec<_> = nodes
|
||||||
.iter()
|
.iter()
|
||||||
.map(|n| n.extract_policy(signers, build_sat, secp))
|
.map(|n| n.extract_policy(signers, build_sat, secp))
|
||||||
@@ -1027,13 +1030,13 @@ impl<Ctx: ScriptContext + 'static> ExtractPolicy for Miniscript<DescriptorPublic
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
if mapped.len() < nodes.len() {
|
if mapped.len() < nodes.len() {
|
||||||
threshold = match threshold.checked_sub(nodes.len() - mapped.len()) {
|
k = match k.checked_sub(nodes.len() - mapped.len()) {
|
||||||
None => return Ok(None),
|
None => return Ok(None),
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Policy::make_thresh(mapped, threshold)?
|
Policy::make_thresh(mapped, k)?
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unsupported
|
// Unsupported
|
||||||
@@ -1087,13 +1090,10 @@ impl ExtractPolicy for Descriptor<DescriptorPublicKey> {
|
|||||||
build_sat: BuildSatisfaction,
|
build_sat: BuildSatisfaction,
|
||||||
secp: &SecpCtx,
|
secp: &SecpCtx,
|
||||||
) -> Result<Option<Policy>, Error> {
|
) -> Result<Option<Policy>, Error> {
|
||||||
Ok(Policy::make_multisig::<Ctx>(
|
let threshold = Threshold::new(keys.k(), keys.pks().to_vec())
|
||||||
keys.pks.as_ref(),
|
.expect("valid threshold and pks collection");
|
||||||
signers,
|
Ok(Policy::make_multi::<Ctx, MAX_PUBKEYS_PER_MULTISIG>(
|
||||||
build_sat,
|
&threshold, signers, build_sat, true, secp,
|
||||||
keys.k,
|
|
||||||
true,
|
|
||||||
secp,
|
|
||||||
)?)
|
)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -36,17 +36,17 @@ pub type DescriptorTemplateOut = (ExtendedDescriptor, KeyMap, ValidNetworks);
|
|||||||
/// ## Example
|
/// ## Example
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use bdk::descriptor::error::Error as DescriptorError;
|
/// use bdk_wallet::descriptor::error::Error as DescriptorError;
|
||||||
/// use bdk::keys::{IntoDescriptorKey, KeyError};
|
/// use bdk_wallet::keys::{IntoDescriptorKey, KeyError};
|
||||||
/// use bdk::miniscript::Legacy;
|
/// use bdk_wallet::miniscript::Legacy;
|
||||||
/// use bdk::template::{DescriptorTemplate, DescriptorTemplateOut};
|
/// use bdk_wallet::template::{DescriptorTemplate, DescriptorTemplateOut};
|
||||||
/// use bitcoin::Network;
|
/// use bitcoin::Network;
|
||||||
///
|
///
|
||||||
/// struct MyP2PKH<K: IntoDescriptorKey<Legacy>>(K);
|
/// struct MyP2PKH<K: IntoDescriptorKey<Legacy>>(K);
|
||||||
///
|
///
|
||||||
/// impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for MyP2PKH<K> {
|
/// impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for MyP2PKH<K> {
|
||||||
/// fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
/// fn build(self, network: Network) -> Result<DescriptorTemplateOut, DescriptorError> {
|
||||||
/// Ok(bdk::descriptor!(pkh(self.0))?)
|
/// Ok(bdk_wallet::descriptor!(pkh(self.0))?)
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
@@ -72,23 +72,28 @@ impl<T: DescriptorTemplate> IntoWalletDescriptor for T {
|
|||||||
/// ## Example
|
/// ## Example
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::Wallet;
|
/// # use bdk_wallet::Wallet;
|
||||||
/// # use bdk::KeychainKind;
|
/// # use bdk_wallet::KeychainKind;
|
||||||
/// use bdk::template::P2Pkh;
|
/// use bdk_wallet::template::P2Pkh;
|
||||||
///
|
///
|
||||||
/// let key =
|
/// let key_external =
|
||||||
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(P2Pkh(key), None, Network::Testnet)?;
|
/// let key_internal =
|
||||||
|
/// bitcoin::PrivateKey::from_wif("cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW")?;
|
||||||
|
/// let mut wallet = Wallet::create(P2Pkh(key_external), P2Pkh(key_internal))
|
||||||
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(
|
/// assert_eq!(
|
||||||
/// wallet
|
/// wallet
|
||||||
/// .next_unused_address(KeychainKind::External)?
|
/// .next_unused_address(KeychainKind::External)
|
||||||
/// .to_string(),
|
/// .to_string(),
|
||||||
/// "mwJ8hxFYW19JLuc65RCTaP4v1rzVU8cVMT"
|
/// "mwJ8hxFYW19JLuc65RCTaP4v1rzVU8cVMT"
|
||||||
/// );
|
/// );
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct P2Pkh<K: IntoDescriptorKey<Legacy>>(pub K);
|
pub struct P2Pkh<K: IntoDescriptorKey<Legacy>>(pub K);
|
||||||
|
|
||||||
impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for P2Pkh<K> {
|
impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for P2Pkh<K> {
|
||||||
@@ -102,24 +107,29 @@ impl<K: IntoDescriptorKey<Legacy>> DescriptorTemplate for P2Pkh<K> {
|
|||||||
/// ## Example
|
/// ## Example
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::Wallet;
|
/// # use bdk_wallet::Wallet;
|
||||||
/// # use bdk::KeychainKind;
|
/// # use bdk_wallet::KeychainKind;
|
||||||
/// use bdk::template::P2Wpkh_P2Sh;
|
/// use bdk_wallet::template::P2Wpkh_P2Sh;
|
||||||
///
|
///
|
||||||
/// let key =
|
/// let key_external =
|
||||||
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(P2Wpkh_P2Sh(key), None, Network::Testnet)?;
|
/// let key_internal =
|
||||||
|
/// bitcoin::PrivateKey::from_wif("cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW")?;
|
||||||
|
/// let mut wallet = Wallet::create(P2Wpkh_P2Sh(key_external), P2Wpkh_P2Sh(key_internal))
|
||||||
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(
|
/// assert_eq!(
|
||||||
/// wallet
|
/// wallet
|
||||||
/// .next_unused_address(KeychainKind::External)?
|
/// .next_unused_address(KeychainKind::External)
|
||||||
/// .to_string(),
|
/// .to_string(),
|
||||||
/// "2NB4ox5VDRw1ecUv6SnT3VQHPXveYztRqk5"
|
/// "2NB4ox5VDRw1ecUv6SnT3VQHPXveYztRqk5"
|
||||||
/// );
|
/// );
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
#[allow(non_camel_case_types)]
|
#[allow(non_camel_case_types)]
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct P2Wpkh_P2Sh<K: IntoDescriptorKey<Segwitv0>>(pub K);
|
pub struct P2Wpkh_P2Sh<K: IntoDescriptorKey<Segwitv0>>(pub K);
|
||||||
|
|
||||||
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2Wpkh_P2Sh<K> {
|
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2Wpkh_P2Sh<K> {
|
||||||
@@ -133,23 +143,28 @@ impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2Wpkh_P2Sh<K> {
|
|||||||
/// ## Example
|
/// ## Example
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet};
|
/// # use bdk_wallet::Wallet;
|
||||||
/// # use bdk::KeychainKind;
|
/// # use bdk_wallet::KeychainKind;
|
||||||
/// use bdk::template::P2Wpkh;
|
/// use bdk_wallet::template::P2Wpkh;
|
||||||
///
|
///
|
||||||
/// let key =
|
/// let key_external =
|
||||||
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(P2Wpkh(key), None, Network::Testnet)?;
|
/// let key_internal =
|
||||||
|
/// bitcoin::PrivateKey::from_wif("cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW")?;
|
||||||
|
/// let mut wallet = Wallet::create(P2Wpkh(key_external), P2Wpkh(key_internal))
|
||||||
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(
|
/// assert_eq!(
|
||||||
/// wallet
|
/// wallet
|
||||||
/// .next_unused_address(KeychainKind::External)?
|
/// .next_unused_address(KeychainKind::External)
|
||||||
/// .to_string(),
|
/// .to_string(),
|
||||||
/// "tb1q4525hmgw265tl3drrl8jjta7ayffu6jf68ltjd"
|
/// "tb1q4525hmgw265tl3drrl8jjta7ayffu6jf68ltjd"
|
||||||
/// );
|
/// );
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct P2Wpkh<K: IntoDescriptorKey<Segwitv0>>(pub K);
|
pub struct P2Wpkh<K: IntoDescriptorKey<Segwitv0>>(pub K);
|
||||||
|
|
||||||
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2Wpkh<K> {
|
impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2Wpkh<K> {
|
||||||
@@ -163,23 +178,28 @@ impl<K: IntoDescriptorKey<Segwitv0>> DescriptorTemplate for P2Wpkh<K> {
|
|||||||
/// ## Example
|
/// ## Example
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::Wallet;
|
/// # use bdk_wallet::Wallet;
|
||||||
/// # use bdk::KeychainKind;
|
/// # use bdk_wallet::KeychainKind;
|
||||||
/// use bdk::template::P2TR;
|
/// use bdk_wallet::template::P2TR;
|
||||||
///
|
///
|
||||||
/// let key =
|
/// let key_external =
|
||||||
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
/// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(P2TR(key), None, Network::Testnet)?;
|
/// let key_internal =
|
||||||
|
/// bitcoin::PrivateKey::from_wif("cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW")?;
|
||||||
|
/// let mut wallet = Wallet::create(P2TR(key_external), P2TR(key_internal))
|
||||||
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(
|
/// assert_eq!(
|
||||||
/// wallet
|
/// wallet
|
||||||
/// .next_unused_address(KeychainKind::External)?
|
/// .next_unused_address(KeychainKind::External)
|
||||||
/// .to_string(),
|
/// .to_string(),
|
||||||
/// "tb1pvjf9t34fznr53u5tqhejz4nr69luzkhlvsdsdfq9pglutrpve2xq7hps46"
|
/// "tb1pvjf9t34fznr53u5tqhejz4nr69luzkhlvsdsdfq9pglutrpve2xq7hps46"
|
||||||
/// );
|
/// );
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct P2TR<K: IntoDescriptorKey<Tap>>(pub K);
|
pub struct P2TR<K: IntoDescriptorKey<Tap>>(pub K);
|
||||||
|
|
||||||
impl<K: IntoDescriptorKey<Tap>> DescriptorTemplate for P2TR<K> {
|
impl<K: IntoDescriptorKey<Tap>> DescriptorTemplate for P2TR<K> {
|
||||||
@@ -196,23 +216,22 @@ impl<K: IntoDescriptorKey<Tap>> DescriptorTemplate for P2TR<K> {
|
|||||||
///
|
///
|
||||||
/// ## Example
|
/// ## Example
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```rust
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet, KeychainKind};
|
/// # use bdk_wallet::{Wallet, KeychainKind};
|
||||||
/// use bdk::template::Bip44;
|
/// use bdk_wallet::template::Bip44;
|
||||||
///
|
///
|
||||||
/// let key = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
/// let key = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(
|
/// let mut wallet = Wallet::create(Bip44(key.clone(), KeychainKind::External), Bip44(key, KeychainKind::Internal))
|
||||||
/// Bip44(key.clone(), KeychainKind::External),
|
/// .network(Network::Testnet)
|
||||||
/// Some(Bip44(key, KeychainKind::Internal)),
|
/// .create_wallet_no_persist()?;
|
||||||
/// Network::Testnet,
|
|
||||||
/// )?;
|
|
||||||
///
|
///
|
||||||
/// assert_eq!(wallet.next_unused_address(KeychainKind::External)?.to_string(), "mmogjc7HJEZkrLqyQYqJmxUqFaC7i4uf89");
|
/// assert_eq!(wallet.next_unused_address(KeychainKind::External).to_string(), "mmogjc7HJEZkrLqyQYqJmxUqFaC7i4uf89");
|
||||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "pkh([c55b303f/44'/1'/0']tpubDCuorCpzvYS2LCD75BR46KHE8GdDeg1wsAgNZeNr6DaB5gQK1o14uErKwKLuFmeemkQ6N2m3rNgvctdJLyr7nwu2yia7413Hhg8WWE44cgT/0/*)#5wrnv0xt");
|
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).to_string(), "pkh([c55b303f/44'/1'/0']tpubDCuorCpzvYS2LCD75BR46KHE8GdDeg1wsAgNZeNr6DaB5gQK1o14uErKwKLuFmeemkQ6N2m3rNgvctdJLyr7nwu2yia7413Hhg8WWE44cgT/0/*)#5wrnv0xt");
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Bip44<K: DerivableKey<Legacy>>(pub K, pub KeychainKind);
|
pub struct Bip44<K: DerivableKey<Legacy>>(pub K, pub KeychainKind);
|
||||||
|
|
||||||
impl<K: DerivableKey<Legacy>> DescriptorTemplate for Bip44<K> {
|
impl<K: DerivableKey<Legacy>> DescriptorTemplate for Bip44<K> {
|
||||||
@@ -234,22 +253,24 @@ impl<K: DerivableKey<Legacy>> DescriptorTemplate for Bip44<K> {
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet, KeychainKind};
|
/// # use bdk_wallet::{KeychainKind, Wallet};
|
||||||
/// use bdk::template::Bip44Public;
|
/// use bdk_wallet::template::Bip44Public;
|
||||||
///
|
///
|
||||||
/// let key = bitcoin::bip32::Xpub::from_str("tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU")?;
|
/// let key = bitcoin::bip32::Xpub::from_str("tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU")?;
|
||||||
/// let fingerprint = bitcoin::bip32::Fingerprint::from_str("c55b303f")?;
|
/// let fingerprint = bitcoin::bip32::Fingerprint::from_str("c55b303f")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(
|
/// let mut wallet = Wallet::create(
|
||||||
/// Bip44Public(key.clone(), fingerprint, KeychainKind::External),
|
/// Bip44Public(key.clone(), fingerprint, KeychainKind::External),
|
||||||
/// Some(Bip44Public(key, fingerprint, KeychainKind::Internal)),
|
/// Bip44Public(key, fingerprint, KeychainKind::Internal),
|
||||||
/// Network::Testnet,
|
/// )
|
||||||
/// )?;
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(wallet.next_unused_address(KeychainKind::External)?.to_string(), "miNG7dJTzJqNbFS19svRdTCisC65dsubtR");
|
/// assert_eq!(wallet.next_unused_address(KeychainKind::External).to_string(), "miNG7dJTzJqNbFS19svRdTCisC65dsubtR");
|
||||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "pkh([c55b303f/44'/1'/0']tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU/0/*)#cfhumdqz");
|
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).to_string(), "pkh([c55b303f/44'/1'/0']tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU/0/*)#cfhumdqz");
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Bip44Public<K: DerivableKey<Legacy>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
pub struct Bip44Public<K: DerivableKey<Legacy>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||||
|
|
||||||
impl<K: DerivableKey<Legacy>> DescriptorTemplate for Bip44Public<K> {
|
impl<K: DerivableKey<Legacy>> DescriptorTemplate for Bip44Public<K> {
|
||||||
@@ -271,21 +292,23 @@ impl<K: DerivableKey<Legacy>> DescriptorTemplate for Bip44Public<K> {
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet, KeychainKind};
|
/// # use bdk_wallet::{Wallet, KeychainKind};
|
||||||
/// use bdk::template::Bip49;
|
/// use bdk_wallet::template::Bip49;
|
||||||
///
|
///
|
||||||
/// let key = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
/// let key = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(
|
/// let mut wallet = Wallet::create(
|
||||||
/// Bip49(key.clone(), KeychainKind::External),
|
/// Bip49(key.clone(), KeychainKind::External),
|
||||||
/// Some(Bip49(key, KeychainKind::Internal)),
|
/// Bip49(key, KeychainKind::Internal),
|
||||||
/// Network::Testnet,
|
/// )
|
||||||
/// )?;
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(wallet.next_unused_address(KeychainKind::External)?.to_string(), "2N4zkWAoGdUv4NXhSsU8DvS5MB36T8nKHEB");
|
/// assert_eq!(wallet.next_unused_address(KeychainKind::External).to_string(), "2N4zkWAoGdUv4NXhSsU8DvS5MB36T8nKHEB");
|
||||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDDYr4kdnZgjjShzYNjZUZXUUtpXaofdkMaipyS8ThEh45qFmhT4hKYways7UXmg6V7het1QiFo9kf4kYUXyDvV4rHEyvSpys9pjCB3pukxi/0/*))#s9vxlc8e");
|
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDDYr4kdnZgjjShzYNjZUZXUUtpXaofdkMaipyS8ThEh45qFmhT4hKYways7UXmg6V7het1QiFo9kf4kYUXyDvV4rHEyvSpys9pjCB3pukxi/0/*))#s9vxlc8e");
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Bip49<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
pub struct Bip49<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
||||||
|
|
||||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip49<K> {
|
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip49<K> {
|
||||||
@@ -307,22 +330,24 @@ impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip49<K> {
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet, KeychainKind};
|
/// # use bdk_wallet::{Wallet, KeychainKind};
|
||||||
/// use bdk::template::Bip49Public;
|
/// use bdk_wallet::template::Bip49Public;
|
||||||
///
|
///
|
||||||
/// let key = bitcoin::bip32::Xpub::from_str("tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L")?;
|
/// let key = bitcoin::bip32::Xpub::from_str("tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L")?;
|
||||||
/// let fingerprint = bitcoin::bip32::Fingerprint::from_str("c55b303f")?;
|
/// let fingerprint = bitcoin::bip32::Fingerprint::from_str("c55b303f")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(
|
/// let mut wallet = Wallet::create(
|
||||||
/// Bip49Public(key.clone(), fingerprint, KeychainKind::External),
|
/// Bip49Public(key.clone(), fingerprint, KeychainKind::External),
|
||||||
/// Some(Bip49Public(key, fingerprint, KeychainKind::Internal)),
|
/// Bip49Public(key, fingerprint, KeychainKind::Internal),
|
||||||
/// Network::Testnet,
|
/// )
|
||||||
/// )?;
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(wallet.next_unused_address(KeychainKind::External)?.to_string(), "2N3K4xbVAHoiTQSwxkZjWDfKoNC27pLkYnt");
|
/// assert_eq!(wallet.next_unused_address(KeychainKind::External).to_string(), "2N3K4xbVAHoiTQSwxkZjWDfKoNC27pLkYnt");
|
||||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L/0/*))#3tka9g0q");
|
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L/0/*))#3tka9g0q");
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Bip49Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
pub struct Bip49Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||||
|
|
||||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip49Public<K> {
|
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip49Public<K> {
|
||||||
@@ -344,21 +369,23 @@ impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip49Public<K> {
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet, KeychainKind};
|
/// # use bdk_wallet::{Wallet, KeychainKind};
|
||||||
/// use bdk::template::Bip84;
|
/// use bdk_wallet::template::Bip84;
|
||||||
///
|
///
|
||||||
/// let key = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
/// let key = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(
|
/// let mut wallet = Wallet::create(
|
||||||
/// Bip84(key.clone(), KeychainKind::External),
|
/// Bip84(key.clone(), KeychainKind::External),
|
||||||
/// Some(Bip84(key, KeychainKind::Internal)),
|
/// Bip84(key, KeychainKind::Internal),
|
||||||
/// Network::Testnet,
|
/// )
|
||||||
/// )?;
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(wallet.next_unused_address(KeychainKind::External)?.to_string(), "tb1qhl85z42h7r4su5u37rvvw0gk8j2t3n9y7zsg4n");
|
/// assert_eq!(wallet.next_unused_address(KeychainKind::External).to_string(), "tb1qhl85z42h7r4su5u37rvvw0gk8j2t3n9y7zsg4n");
|
||||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "wpkh([c55b303f/84'/1'/0']tpubDDc5mum24DekpNw92t6fHGp8Gr2JjF9J7i4TZBtN6Vp8xpAULG5CFaKsfugWa5imhrQQUZKXe261asP5koDHo5bs3qNTmf3U3o4v9SaB8gg/0/*)#6kfecsmr");
|
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).to_string(), "wpkh([c55b303f/84'/1'/0']tpubDDc5mum24DekpNw92t6fHGp8Gr2JjF9J7i4TZBtN6Vp8xpAULG5CFaKsfugWa5imhrQQUZKXe261asP5koDHo5bs3qNTmf3U3o4v9SaB8gg/0/*)#6kfecsmr");
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Bip84<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
pub struct Bip84<K: DerivableKey<Segwitv0>>(pub K, pub KeychainKind);
|
||||||
|
|
||||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip84<K> {
|
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip84<K> {
|
||||||
@@ -380,22 +407,24 @@ impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip84<K> {
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet, KeychainKind};
|
/// # use bdk_wallet::{Wallet, KeychainKind};
|
||||||
/// use bdk::template::Bip84Public;
|
/// use bdk_wallet::template::Bip84Public;
|
||||||
///
|
///
|
||||||
/// let key = bitcoin::bip32::Xpub::from_str("tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q")?;
|
/// let key = bitcoin::bip32::Xpub::from_str("tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q")?;
|
||||||
/// let fingerprint = bitcoin::bip32::Fingerprint::from_str("c55b303f")?;
|
/// let fingerprint = bitcoin::bip32::Fingerprint::from_str("c55b303f")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(
|
/// let mut wallet = Wallet::create(
|
||||||
/// Bip84Public(key.clone(), fingerprint, KeychainKind::External),
|
/// Bip84Public(key.clone(), fingerprint, KeychainKind::External),
|
||||||
/// Some(Bip84Public(key, fingerprint, KeychainKind::Internal)),
|
/// Bip84Public(key, fingerprint, KeychainKind::Internal),
|
||||||
/// Network::Testnet,
|
/// )
|
||||||
/// )?;
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(wallet.next_unused_address(KeychainKind::External)?.to_string(), "tb1qedg9fdlf8cnnqfd5mks6uz5w4kgpk2pr6y4qc7");
|
/// assert_eq!(wallet.next_unused_address(KeychainKind::External).to_string(), "tb1qedg9fdlf8cnnqfd5mks6uz5w4kgpk2pr6y4qc7");
|
||||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "wpkh([c55b303f/84'/1'/0']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)#dhu402yv");
|
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).to_string(), "wpkh([c55b303f/84'/1'/0']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)#dhu402yv");
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Bip84Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
pub struct Bip84Public<K: DerivableKey<Segwitv0>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||||
|
|
||||||
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip84Public<K> {
|
impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip84Public<K> {
|
||||||
@@ -417,21 +446,23 @@ impl<K: DerivableKey<Segwitv0>> DescriptorTemplate for Bip84Public<K> {
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet, KeychainKind};
|
/// # use bdk_wallet::{Wallet, KeychainKind};
|
||||||
/// use bdk::template::Bip86;
|
/// use bdk_wallet::template::Bip86;
|
||||||
///
|
///
|
||||||
/// let key = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
/// let key = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(
|
/// let mut wallet = Wallet::create(
|
||||||
/// Bip86(key.clone(), KeychainKind::External),
|
/// Bip86(key.clone(), KeychainKind::External),
|
||||||
/// Some(Bip86(key, KeychainKind::Internal)),
|
/// Bip86(key, KeychainKind::Internal),
|
||||||
/// Network::Testnet,
|
/// )
|
||||||
/// )?;
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(wallet.next_unused_address(KeychainKind::External)?.to_string(), "tb1p5unlj09djx8xsjwe97269kqtxqpwpu2epeskgqjfk4lnf69v4tnqpp35qu");
|
/// assert_eq!(wallet.next_unused_address(KeychainKind::External).to_string(), "tb1p5unlj09djx8xsjwe97269kqtxqpwpu2epeskgqjfk4lnf69v4tnqpp35qu");
|
||||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "tr([c55b303f/86'/1'/0']tpubDCiHofpEs47kx358bPdJmTZHmCDqQ8qw32upCSxHrSEdeeBs2T5Mq6QMB2ukeMqhNBiyhosBvJErteVhfURPGXPv3qLJPw5MVpHUewsbP2m/0/*)#dkgvr5hm");
|
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).to_string(), "tr([c55b303f/86'/1'/0']tpubDCiHofpEs47kx358bPdJmTZHmCDqQ8qw32upCSxHrSEdeeBs2T5Mq6QMB2ukeMqhNBiyhosBvJErteVhfURPGXPv3qLJPw5MVpHUewsbP2m/0/*)#dkgvr5hm");
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Bip86<K: DerivableKey<Tap>>(pub K, pub KeychainKind);
|
pub struct Bip86<K: DerivableKey<Tap>>(pub K, pub KeychainKind);
|
||||||
|
|
||||||
impl<K: DerivableKey<Tap>> DescriptorTemplate for Bip86<K> {
|
impl<K: DerivableKey<Tap>> DescriptorTemplate for Bip86<K> {
|
||||||
@@ -453,22 +484,24 @@ impl<K: DerivableKey<Tap>> DescriptorTemplate for Bip86<K> {
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use bdk::bitcoin::{PrivateKey, Network};
|
/// # use bdk_wallet::bitcoin::{PrivateKey, Network};
|
||||||
/// # use bdk::{Wallet, KeychainKind};
|
/// # use bdk_wallet::{Wallet, KeychainKind};
|
||||||
/// use bdk::template::Bip86Public;
|
/// use bdk_wallet::template::Bip86Public;
|
||||||
///
|
///
|
||||||
/// let key = bitcoin::bip32::Xpub::from_str("tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q")?;
|
/// let key = bitcoin::bip32::Xpub::from_str("tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q")?;
|
||||||
/// let fingerprint = bitcoin::bip32::Fingerprint::from_str("c55b303f")?;
|
/// let fingerprint = bitcoin::bip32::Fingerprint::from_str("c55b303f")?;
|
||||||
/// let mut wallet = Wallet::new_no_persist(
|
/// let mut wallet = Wallet::create(
|
||||||
/// Bip86Public(key.clone(), fingerprint, KeychainKind::External),
|
/// Bip86Public(key.clone(), fingerprint, KeychainKind::External),
|
||||||
/// Some(Bip86Public(key, fingerprint, KeychainKind::Internal)),
|
/// Bip86Public(key, fingerprint, KeychainKind::Internal),
|
||||||
/// Network::Testnet,
|
/// )
|
||||||
/// )?;
|
/// .network(Network::Testnet)
|
||||||
|
/// .create_wallet_no_persist()?;
|
||||||
///
|
///
|
||||||
/// assert_eq!(wallet.next_unused_address(KeychainKind::External)?.to_string(), "tb1pwjp9f2k5n0xq73ecuu0c5njvgqr3vkh7yaylmpqvsuuaafymh0msvcmh37");
|
/// assert_eq!(wallet.next_unused_address(KeychainKind::External).to_string(), "tb1pwjp9f2k5n0xq73ecuu0c5njvgqr3vkh7yaylmpqvsuuaafymh0msvcmh37");
|
||||||
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "tr([c55b303f/86'/1'/0']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)#2p65srku");
|
/// assert_eq!(wallet.public_descriptor(KeychainKind::External).to_string(), "tr([c55b303f/86'/1'/0']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)#2p65srku");
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Bip86Public<K: DerivableKey<Tap>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
pub struct Bip86Public<K: DerivableKey<Tap>>(pub K, pub bip32::Fingerprint, pub KeychainKind);
|
||||||
|
|
||||||
impl<K: DerivableKey<Tap>> DescriptorTemplate for Bip86Public<K> {
|
impl<K: DerivableKey<Tap>> DescriptorTemplate for Bip86Public<K> {
|
||||||
@@ -568,7 +601,7 @@ mod test {
|
|||||||
use bitcoin::bip32::ChildNumber::{self, Hardened};
|
use bitcoin::bip32::ChildNumber::{self, Hardened};
|
||||||
|
|
||||||
let xprvkey = bitcoin::bip32::Xpriv::from_str("xprv9s21ZrQH143K2fpbqApQL69a4oKdGVnVN52R82Ft7d1pSqgKmajF62acJo3aMszZb6qQ22QsVECSFxvf9uyxFUvFYQMq3QbtwtRSMjLAhMf").unwrap();
|
let xprvkey = bitcoin::bip32::Xpriv::from_str("xprv9s21ZrQH143K2fpbqApQL69a4oKdGVnVN52R82Ft7d1pSqgKmajF62acJo3aMszZb6qQ22QsVECSFxvf9uyxFUvFYQMq3QbtwtRSMjLAhMf").unwrap();
|
||||||
assert_eq!(Network::Bitcoin, xprvkey.network);
|
assert!(xprvkey.network.is_mainnet());
|
||||||
let xdesc = Bip44(xprvkey, KeychainKind::Internal)
|
let xdesc = Bip44(xprvkey, KeychainKind::Internal)
|
||||||
.build(Network::Bitcoin)
|
.build(Network::Bitcoin)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -582,7 +615,7 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let tprvkey = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
let tprvkey = bitcoin::bip32::Xpriv::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
|
||||||
assert_eq!(Network::Testnet, tprvkey.network);
|
assert!(!tprvkey.network.is_mainnet());
|
||||||
let tdesc = Bip44(tprvkey, KeychainKind::Internal)
|
let tdesc = Bip44(tprvkey, KeychainKind::Internal)
|
||||||
.build(Network::Testnet)
|
.build(Network::Testnet)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -20,6 +20,8 @@ use core::marker::PhantomData;
|
|||||||
use core::ops::Deref;
|
use core::ops::Deref;
|
||||||
use core::str::FromStr;
|
use core::str::FromStr;
|
||||||
|
|
||||||
|
use rand_core::{CryptoRng, RngCore};
|
||||||
|
|
||||||
use bitcoin::secp256k1::{self, Secp256k1, Signing};
|
use bitcoin::secp256k1::{self, Secp256k1, Signing};
|
||||||
|
|
||||||
use bitcoin::bip32;
|
use bitcoin::bip32;
|
||||||
@@ -97,7 +99,7 @@ impl<Ctx: ScriptContext> DescriptorKey<Ctx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// This method is used internally by `bdk::fragment!` and `bdk::descriptor!`. It has to be
|
// This method is used internally by `bdk_wallet::fragment!` and `bdk_wallet::descriptor!`. It has to be
|
||||||
// public because it is effectively called by external crates once the macros are expanded,
|
// public because it is effectively called by external crates once the macros are expanded,
|
||||||
// but since it is not meant to be part of the public api we hide it from the docs.
|
// but since it is not meant to be part of the public api we hide it from the docs.
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
@@ -206,9 +208,9 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
|||||||
/// Key type valid in any context:
|
/// Key type valid in any context:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use bdk::bitcoin::PublicKey;
|
/// use bdk_wallet::bitcoin::PublicKey;
|
||||||
///
|
///
|
||||||
/// use bdk::keys::{DescriptorKey, IntoDescriptorKey, KeyError, ScriptContext};
|
/// use bdk_wallet::keys::{DescriptorKey, IntoDescriptorKey, KeyError, ScriptContext};
|
||||||
///
|
///
|
||||||
/// pub struct MyKeyType {
|
/// pub struct MyKeyType {
|
||||||
/// pubkey: PublicKey,
|
/// pubkey: PublicKey,
|
||||||
@@ -224,9 +226,9 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
|||||||
/// Key type that is only valid on mainnet:
|
/// Key type that is only valid on mainnet:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use bdk::bitcoin::PublicKey;
|
/// use bdk_wallet::bitcoin::PublicKey;
|
||||||
///
|
///
|
||||||
/// use bdk::keys::{
|
/// use bdk_wallet::keys::{
|
||||||
/// mainnet_network, DescriptorKey, DescriptorPublicKey, IntoDescriptorKey, KeyError,
|
/// mainnet_network, DescriptorKey, DescriptorPublicKey, IntoDescriptorKey, KeyError,
|
||||||
/// ScriptContext, SinglePub, SinglePubKey,
|
/// ScriptContext, SinglePub, SinglePubKey,
|
||||||
/// };
|
/// };
|
||||||
@@ -251,9 +253,11 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
|||||||
/// Key type that internally encodes in which context it's valid. The context is checked at runtime:
|
/// Key type that internally encodes in which context it's valid. The context is checked at runtime:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use bdk::bitcoin::PublicKey;
|
/// use bdk_wallet::bitcoin::PublicKey;
|
||||||
///
|
///
|
||||||
/// use bdk::keys::{DescriptorKey, ExtScriptContext, IntoDescriptorKey, KeyError, ScriptContext};
|
/// use bdk_wallet::keys::{
|
||||||
|
/// DescriptorKey, ExtScriptContext, IntoDescriptorKey, KeyError, ScriptContext,
|
||||||
|
/// };
|
||||||
///
|
///
|
||||||
/// pub struct MyKeyType {
|
/// pub struct MyKeyType {
|
||||||
/// is_legacy: bool,
|
/// is_legacy: bool,
|
||||||
@@ -279,17 +283,17 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
|||||||
/// makes the compiler (correctly) fail.
|
/// makes the compiler (correctly) fail.
|
||||||
///
|
///
|
||||||
/// ```compile_fail
|
/// ```compile_fail
|
||||||
/// use bdk::bitcoin::PublicKey;
|
/// use bdk_wallet::bitcoin::PublicKey;
|
||||||
/// use core::str::FromStr;
|
/// use core::str::FromStr;
|
||||||
///
|
///
|
||||||
/// use bdk::keys::{DescriptorKey, IntoDescriptorKey, KeyError};
|
/// use bdk_wallet::keys::{DescriptorKey, IntoDescriptorKey, KeyError};
|
||||||
///
|
///
|
||||||
/// pub struct MySegwitOnlyKeyType {
|
/// pub struct MySegwitOnlyKeyType {
|
||||||
/// pubkey: PublicKey,
|
/// pubkey: PublicKey,
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// impl IntoDescriptorKey<bdk::miniscript::Segwitv0> for MySegwitOnlyKeyType {
|
/// impl IntoDescriptorKey<bdk_wallet::miniscript::Segwitv0> for MySegwitOnlyKeyType {
|
||||||
/// fn into_descriptor_key(self) -> Result<DescriptorKey<bdk::miniscript::Segwitv0>, KeyError> {
|
/// fn into_descriptor_key(self) -> Result<DescriptorKey<bdk_wallet::miniscript::Segwitv0>, KeyError> {
|
||||||
/// self.pubkey.into_descriptor_key()
|
/// self.pubkey.into_descriptor_key()
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
@@ -297,8 +301,8 @@ impl<Ctx: ScriptContext + 'static> ExtScriptContext for Ctx {
|
|||||||
/// let key = MySegwitOnlyKeyType {
|
/// let key = MySegwitOnlyKeyType {
|
||||||
/// pubkey: PublicKey::from_str("...")?,
|
/// pubkey: PublicKey::from_str("...")?,
|
||||||
/// };
|
/// };
|
||||||
/// let (descriptor, _, _) = bdk::descriptor!(pkh(key))?;
|
/// let (descriptor, _, _) = bdk_wallet::descriptor!(pkh(key))?;
|
||||||
/// // ^^^^^ changing this to `wpkh` would make it compile
|
/// // ^^^^^ changing this to `wpkh` would make it compile
|
||||||
///
|
///
|
||||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
/// ```
|
/// ```
|
||||||
@@ -334,7 +338,7 @@ impl<Ctx: ScriptContext> ExtendedKey<Ctx> {
|
|||||||
pub fn into_xprv(self, network: Network) -> Option<bip32::Xpriv> {
|
pub fn into_xprv(self, network: Network) -> Option<bip32::Xpriv> {
|
||||||
match self {
|
match self {
|
||||||
ExtendedKey::Private((mut xprv, _)) => {
|
ExtendedKey::Private((mut xprv, _)) => {
|
||||||
xprv.network = network;
|
xprv.network = network.into();
|
||||||
Some(xprv)
|
Some(xprv)
|
||||||
}
|
}
|
||||||
ExtendedKey::Public(_) => None,
|
ExtendedKey::Public(_) => None,
|
||||||
@@ -353,7 +357,7 @@ impl<Ctx: ScriptContext> ExtendedKey<Ctx> {
|
|||||||
ExtendedKey::Public((xpub, _)) => xpub,
|
ExtendedKey::Public((xpub, _)) => xpub,
|
||||||
};
|
};
|
||||||
|
|
||||||
xpub.network = network;
|
xpub.network = network.into();
|
||||||
xpub
|
xpub
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -387,9 +391,9 @@ impl<Ctx: ScriptContext> From<bip32::Xpriv> for ExtendedKey<Ctx> {
|
|||||||
/// an [`Xpub`] can implement only the required `into_extended_key()` method.
|
/// an [`Xpub`] can implement only the required `into_extended_key()` method.
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use bdk::bitcoin;
|
/// use bdk_wallet::bitcoin;
|
||||||
/// use bdk::bitcoin::bip32;
|
/// use bdk_wallet::bitcoin::bip32;
|
||||||
/// use bdk::keys::{DerivableKey, ExtendedKey, KeyError, ScriptContext};
|
/// use bdk_wallet::keys::{DerivableKey, ExtendedKey, KeyError, ScriptContext};
|
||||||
///
|
///
|
||||||
/// struct MyCustomKeyType {
|
/// struct MyCustomKeyType {
|
||||||
/// key_data: bitcoin::PrivateKey,
|
/// key_data: bitcoin::PrivateKey,
|
||||||
@@ -400,7 +404,7 @@ impl<Ctx: ScriptContext> From<bip32::Xpriv> for ExtendedKey<Ctx> {
|
|||||||
/// impl<Ctx: ScriptContext> DerivableKey<Ctx> for MyCustomKeyType {
|
/// impl<Ctx: ScriptContext> DerivableKey<Ctx> for MyCustomKeyType {
|
||||||
/// fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
/// fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||||
/// let xprv = bip32::Xpriv {
|
/// let xprv = bip32::Xpriv {
|
||||||
/// network: self.network,
|
/// network: self.network.into(),
|
||||||
/// depth: 0,
|
/// depth: 0,
|
||||||
/// parent_fingerprint: bip32::Fingerprint::default(),
|
/// parent_fingerprint: bip32::Fingerprint::default(),
|
||||||
/// private_key: self.key_data.inner,
|
/// private_key: self.key_data.inner,
|
||||||
@@ -418,9 +422,9 @@ impl<Ctx: ScriptContext> From<bip32::Xpriv> for ExtendedKey<Ctx> {
|
|||||||
/// [`Xpriv`] or [`Xpub`] will be considered valid.
|
/// [`Xpriv`] or [`Xpub`] will be considered valid.
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use bdk::bitcoin;
|
/// use bdk_wallet::bitcoin;
|
||||||
/// use bdk::bitcoin::bip32;
|
/// use bdk_wallet::bitcoin::bip32;
|
||||||
/// use bdk::keys::{
|
/// use bdk_wallet::keys::{
|
||||||
/// any_network, DerivableKey, DescriptorKey, ExtendedKey, KeyError, ScriptContext,
|
/// any_network, DerivableKey, DescriptorKey, ExtendedKey, KeyError, ScriptContext,
|
||||||
/// };
|
/// };
|
||||||
///
|
///
|
||||||
@@ -432,7 +436,7 @@ impl<Ctx: ScriptContext> From<bip32::Xpriv> for ExtendedKey<Ctx> {
|
|||||||
/// impl<Ctx: ScriptContext> DerivableKey<Ctx> for MyCustomKeyType {
|
/// impl<Ctx: ScriptContext> DerivableKey<Ctx> for MyCustomKeyType {
|
||||||
/// fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
/// fn into_extended_key(self) -> Result<ExtendedKey<Ctx>, KeyError> {
|
||||||
/// let xprv = bip32::Xpriv {
|
/// let xprv = bip32::Xpriv {
|
||||||
/// network: bitcoin::Network::Bitcoin, // pick an arbitrary network here
|
/// network: bitcoin::Network::Bitcoin.into(), // pick an arbitrary network here
|
||||||
/// depth: 0,
|
/// depth: 0,
|
||||||
/// parent_fingerprint: bip32::Fingerprint::default(),
|
/// parent_fingerprint: bip32::Fingerprint::default(),
|
||||||
/// private_key: self.key_data.inner,
|
/// private_key: self.key_data.inner,
|
||||||
@@ -469,9 +473,9 @@ pub trait DerivableKey<Ctx: ScriptContext = miniscript::Legacy>: Sized {
|
|||||||
This can be used to get direct access to `xprv`s and `xpub`s for types that implement this trait,
|
This can be used to get direct access to `xprv`s and `xpub`s for types that implement this trait,
|
||||||
like [`Mnemonic`](bip39::Mnemonic) when the `keys-bip39` feature is enabled.
|
like [`Mnemonic`](bip39::Mnemonic) when the `keys-bip39` feature is enabled.
|
||||||
```rust
|
```rust
|
||||||
use bdk::bitcoin::Network;
|
use bdk_wallet::bitcoin::Network;
|
||||||
use bdk::keys::{DerivableKey, ExtendedKey};
|
use bdk_wallet::keys::{DerivableKey, ExtendedKey};
|
||||||
use bdk::keys::bip39::{Mnemonic, Language};
|
use bdk_wallet::keys::bip39::{Mnemonic, Language};
|
||||||
|
|
||||||
# fn main() -> Result<(), Box<dyn std::error::Error>> {
|
# fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let xkey: ExtendedKey =
|
let xkey: ExtendedKey =
|
||||||
@@ -629,12 +633,23 @@ pub trait GeneratableKey<Ctx: ScriptContext>: Sized {
|
|||||||
entropy: Self::Entropy,
|
entropy: Self::Entropy,
|
||||||
) -> Result<GeneratedKey<Self, Ctx>, Self::Error>;
|
) -> Result<GeneratedKey<Self, Ctx>, Self::Error>;
|
||||||
|
|
||||||
/// Generate a key given the options with a random entropy
|
/// Generate a key given the options with random entropy.
|
||||||
|
///
|
||||||
|
/// Uses the thread-local random number generator.
|
||||||
|
#[cfg(feature = "std")]
|
||||||
fn generate(options: Self::Options) -> Result<GeneratedKey<Self, Ctx>, Self::Error> {
|
fn generate(options: Self::Options) -> Result<GeneratedKey<Self, Ctx>, Self::Error> {
|
||||||
use rand::{thread_rng, Rng};
|
Self::generate_with_aux_rand(options, &mut bitcoin::key::rand::thread_rng())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a key given the options with random entropy.
|
||||||
|
///
|
||||||
|
/// Uses a provided random number generator (rng).
|
||||||
|
fn generate_with_aux_rand(
|
||||||
|
options: Self::Options,
|
||||||
|
rng: &mut (impl CryptoRng + RngCore),
|
||||||
|
) -> Result<GeneratedKey<Self, Ctx>, Self::Error> {
|
||||||
let mut entropy = Self::Entropy::default();
|
let mut entropy = Self::Entropy::default();
|
||||||
thread_rng().fill(entropy.as_mut());
|
rng.fill_bytes(entropy.as_mut());
|
||||||
Self::generate_with_entropy(options, entropy)
|
Self::generate_with_entropy(options, entropy)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -655,8 +670,20 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a key with the default options and a random entropy
|
/// Generate a key with the default options and a random entropy
|
||||||
|
///
|
||||||
|
/// Uses the thread-local random number generator.
|
||||||
|
#[cfg(feature = "std")]
|
||||||
fn generate_default() -> Result<GeneratedKey<Self, Ctx>, Self::Error> {
|
fn generate_default() -> Result<GeneratedKey<Self, Ctx>, Self::Error> {
|
||||||
Self::generate(Default::default())
|
Self::generate_with_aux_rand(Default::default(), &mut bitcoin::key::rand::thread_rng())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a key with the default options and a random entropy
|
||||||
|
///
|
||||||
|
/// Uses a provided random number generator (rng).
|
||||||
|
fn generate_default_with_aux_rand(
|
||||||
|
rng: &mut (impl CryptoRng + RngCore),
|
||||||
|
) -> Result<GeneratedKey<Self, Ctx>, Self::Error> {
|
||||||
|
Self::generate_with_aux_rand(Default::default(), rng)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -715,7 +742,7 @@ impl<Ctx: ScriptContext> GeneratableKey<Ctx> for PrivateKey {
|
|||||||
let inner = secp256k1::SecretKey::from_slice(&entropy)?;
|
let inner = secp256k1::SecretKey::from_slice(&entropy)?;
|
||||||
let private_key = PrivateKey {
|
let private_key = PrivateKey {
|
||||||
compressed: options.compressed,
|
compressed: options.compressed,
|
||||||
network: Network::Bitcoin,
|
network: Network::Bitcoin.into(),
|
||||||
inner,
|
inner,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -764,7 +791,7 @@ fn expand_multi_keys<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
|||||||
Ok((pks, key_map, valid_networks))
|
Ok((pks, key_map, valid_networks))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used internally by `bdk::fragment!` to build `pk_k()` fragments
|
// Used internally by `bdk_wallet::fragment!` to build `pk_k()` fragments
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn make_pk<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
pub fn make_pk<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||||
descriptor_key: Pk,
|
descriptor_key: Pk,
|
||||||
@@ -778,7 +805,7 @@ pub fn make_pk<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
|||||||
Ok((minisc, key_map, valid_networks))
|
Ok((minisc, key_map, valid_networks))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used internally by `bdk::fragment!` to build `pk_h()` fragments
|
// Used internally by `bdk_wallet::fragment!` to build `pk_h()` fragments
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn make_pkh<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
pub fn make_pkh<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
||||||
descriptor_key: Pk,
|
descriptor_key: Pk,
|
||||||
@@ -792,7 +819,7 @@ pub fn make_pkh<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
|
|||||||
Ok((minisc, key_map, valid_networks))
|
Ok((minisc, key_map, valid_networks))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used internally by `bdk::fragment!` to build `multi()` fragments
|
// Used internally by `bdk_wallet::fragment!` to build `multi()` fragments
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn make_multi<
|
pub fn make_multi<
|
||||||
Pk: IntoDescriptorKey<Ctx>,
|
Pk: IntoDescriptorKey<Ctx>,
|
||||||
@@ -812,7 +839,7 @@ pub fn make_multi<
|
|||||||
Ok((minisc, key_map, valid_networks))
|
Ok((minisc, key_map, valid_networks))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used internally by `bdk::descriptor!` to build `sortedmulti()` fragments
|
// Used internally by `bdk_wallet::descriptor!` to build `sortedmulti()` fragments
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn make_sortedmulti<Pk, Ctx, F>(
|
pub fn make_sortedmulti<Pk, Ctx, F>(
|
||||||
thresh: usize,
|
thresh: usize,
|
||||||
@@ -834,7 +861,7 @@ where
|
|||||||
Ok((descriptor, key_map, valid_networks))
|
Ok((descriptor, key_map, valid_networks))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The "identity" conversion is used internally by some `bdk::fragment`s
|
/// The "identity" conversion is used internally by some `bdk_wallet::fragment`s
|
||||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for DescriptorKey<Ctx> {
|
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for DescriptorKey<Ctx> {
|
||||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||||
Ok(self)
|
Ok(self)
|
||||||
@@ -845,9 +872,7 @@ impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for DescriptorPublicKey {
|
|||||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||||
let networks = match self {
|
let networks = match self {
|
||||||
DescriptorPublicKey::Single(_) => any_network(),
|
DescriptorPublicKey::Single(_) => any_network(),
|
||||||
DescriptorPublicKey::XPub(DescriptorXKey { xkey, .. })
|
DescriptorPublicKey::XPub(DescriptorXKey { xkey, .. }) if xkey.network.is_mainnet() => {
|
||||||
if xkey.network == Network::Bitcoin =>
|
|
||||||
{
|
|
||||||
mainnet_network()
|
mainnet_network()
|
||||||
}
|
}
|
||||||
_ => test_networks(),
|
_ => test_networks(),
|
||||||
@@ -880,12 +905,8 @@ impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for XOnlyPublicKey {
|
|||||||
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for DescriptorSecretKey {
|
impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for DescriptorSecretKey {
|
||||||
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
fn into_descriptor_key(self) -> Result<DescriptorKey<Ctx>, KeyError> {
|
||||||
let networks = match &self {
|
let networks = match &self {
|
||||||
DescriptorSecretKey::Single(sk) if sk.key.network == Network::Bitcoin => {
|
DescriptorSecretKey::Single(sk) if sk.key.network.is_mainnet() => mainnet_network(),
|
||||||
mainnet_network()
|
DescriptorSecretKey::XPrv(DescriptorXKey { xkey, .. }) if xkey.network.is_mainnet() => {
|
||||||
}
|
|
||||||
DescriptorSecretKey::XPrv(DescriptorXKey { xkey, .. })
|
|
||||||
if xkey.network == Network::Bitcoin =>
|
|
||||||
{
|
|
||||||
mainnet_network()
|
mainnet_network()
|
||||||
}
|
}
|
||||||
_ => test_networks(),
|
_ => test_networks(),
|
||||||
@@ -914,7 +935,7 @@ impl<Ctx: ScriptContext> IntoDescriptorKey<Ctx> for PrivateKey {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Errors thrown while working with [`keys`](crate::keys)
|
/// Errors thrown while working with [`keys`](crate::keys)
|
||||||
#[derive(Debug)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub enum KeyError {
|
pub enum KeyError {
|
||||||
/// The key cannot exist in the given script context
|
/// The key cannot exist in the given script context
|
||||||
InvalidScriptContext,
|
InvalidScriptContext,
|
||||||
@@ -1001,6 +1022,6 @@ pub mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
let xprv = xkey.into_xprv(Network::Testnet).unwrap();
|
let xprv = xkey.into_xprv(Network::Testnet).unwrap();
|
||||||
|
|
||||||
assert_eq!(xprv.network, Network::Testnet);
|
assert_eq!(xprv.network, Network::Testnet.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -15,33 +15,36 @@ extern crate std;
|
|||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub extern crate alloc;
|
pub extern crate alloc;
|
||||||
|
pub extern crate bdk_chain as chain;
|
||||||
|
#[cfg(feature = "file_store")]
|
||||||
|
pub extern crate bdk_file_store as file_store;
|
||||||
|
#[cfg(feature = "keys-bip39")]
|
||||||
|
pub extern crate bip39;
|
||||||
pub extern crate bitcoin;
|
pub extern crate bitcoin;
|
||||||
pub extern crate miniscript;
|
pub extern crate miniscript;
|
||||||
extern crate serde;
|
pub extern crate serde;
|
||||||
extern crate serde_json;
|
pub extern crate serde_json;
|
||||||
|
|
||||||
#[cfg(feature = "keys-bip39")]
|
|
||||||
extern crate bip39;
|
|
||||||
|
|
||||||
pub mod descriptor;
|
pub mod descriptor;
|
||||||
pub mod keys;
|
pub mod keys;
|
||||||
pub mod psbt;
|
pub mod psbt;
|
||||||
pub(crate) mod types;
|
mod types;
|
||||||
pub mod wallet;
|
mod wallet;
|
||||||
|
|
||||||
|
pub(crate) use bdk_chain::collections;
|
||||||
|
#[cfg(feature = "rusqlite")]
|
||||||
|
pub use bdk_chain::rusqlite;
|
||||||
|
#[cfg(feature = "rusqlite")]
|
||||||
|
pub use bdk_chain::rusqlite_impl;
|
||||||
pub use descriptor::template;
|
pub use descriptor::template;
|
||||||
pub use descriptor::HdKeyPaths;
|
pub use descriptor::HdKeyPaths;
|
||||||
|
pub use signer;
|
||||||
|
pub use signer::SignOptions;
|
||||||
|
pub use tx_builder::*;
|
||||||
pub use types::*;
|
pub use types::*;
|
||||||
pub use wallet::signer;
|
pub use wallet::*;
|
||||||
pub use wallet::signer::SignOptions;
|
|
||||||
pub use wallet::tx_builder::TxBuilder;
|
|
||||||
pub use wallet::Wallet;
|
|
||||||
|
|
||||||
/// Get the version of BDK at runtime
|
/// Get the version of [`bdk_wallet`](crate) at runtime.
|
||||||
pub fn version() -> &'static str {
|
pub fn version() -> &'static str {
|
||||||
env!("CARGO_PKG_VERSION", "unknown")
|
env!("CARGO_PKG_VERSION", "unknown")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub use bdk_chain as chain;
|
|
||||||
pub(crate) use bdk_chain::collections;
|
|
||||||
@@ -26,7 +26,7 @@ pub trait PsbtUtils {
|
|||||||
|
|
||||||
/// The total transaction fee amount, sum of input amounts minus sum of output amounts, in sats.
|
/// The total transaction fee amount, sum of input amounts minus sum of output amounts, in sats.
|
||||||
/// If the PSBT is missing a TxOut for an input returns None.
|
/// If the PSBT is missing a TxOut for an input returns None.
|
||||||
fn fee_amount(&self) -> Option<u64>;
|
fn fee_amount(&self) -> Option<Amount>;
|
||||||
|
|
||||||
/// The transaction's fee rate. This value will only be accurate if calculated AFTER the
|
/// The transaction's fee rate. This value will only be accurate if calculated AFTER the
|
||||||
/// `Psbt` is finalized and all witness/signature data is added to the
|
/// `Psbt` is finalized and all witness/signature data is added to the
|
||||||
@@ -49,18 +49,13 @@ impl PsbtUtils for Psbt {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fee_amount(&self) -> Option<u64> {
|
fn fee_amount(&self) -> Option<Amount> {
|
||||||
let tx = &self.unsigned_tx;
|
let tx = &self.unsigned_tx;
|
||||||
let utxos: Option<Vec<TxOut>> = (0..tx.input.len()).map(|i| self.get_utxo_for(i)).collect();
|
let utxos: Option<Vec<TxOut>> = (0..tx.input.len()).map(|i| self.get_utxo_for(i)).collect();
|
||||||
|
|
||||||
utxos.map(|inputs| {
|
utxos.map(|inputs| {
|
||||||
let input_amount: u64 = inputs.iter().map(|i| i.value.to_sat()).sum();
|
let input_amount: Amount = inputs.iter().map(|i| i.value).sum();
|
||||||
let output_amount: u64 = self
|
let output_amount: Amount = self.unsigned_tx.output.iter().map(|o| o.value).sum();
|
||||||
.unsigned_tx
|
|
||||||
.output
|
|
||||||
.iter()
|
|
||||||
.map(|o| o.value.to_sat())
|
|
||||||
.sum();
|
|
||||||
input_amount
|
input_amount
|
||||||
.checked_sub(output_amount)
|
.checked_sub(output_amount)
|
||||||
.expect("input amount must be greater than output amount")
|
.expect("input amount must be greater than output amount")
|
||||||
@@ -70,6 +65,6 @@ impl PsbtUtils for Psbt {
|
|||||||
fn fee_rate(&self) -> Option<FeeRate> {
|
fn fee_rate(&self) -> Option<FeeRate> {
|
||||||
let fee_amount = self.fee_amount();
|
let fee_amount = self.fee_amount();
|
||||||
let weight = self.clone().extract_tx().ok()?.weight();
|
let weight = self.clone().extract_tx().ok()?.weight();
|
||||||
fee_amount.map(|fee| Amount::from_sat(fee) / weight)
|
fee_amount.map(|fee| fee / weight)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -13,8 +13,8 @@ use alloc::boxed::Box;
|
|||||||
use core::convert::AsRef;
|
use core::convert::AsRef;
|
||||||
|
|
||||||
use bdk_chain::ConfirmationTime;
|
use bdk_chain::ConfirmationTime;
|
||||||
use bitcoin::blockdata::transaction::{OutPoint, Sequence, TxOut};
|
use bitcoin::transaction::{OutPoint, Sequence, TxOut};
|
||||||
use bitcoin::psbt;
|
use bitcoin::{psbt, Weight};
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ pub struct WeightedUtxo {
|
|||||||
/// properly maintain the feerate when adding this input to a transaction during coin selection.
|
/// properly maintain the feerate when adding this input to a transaction during coin selection.
|
||||||
///
|
///
|
||||||
/// [weight units]: https://en.bitcoin.it/wiki/Weight_units
|
/// [weight units]: https://en.bitcoin.it/wiki/Weight_units
|
||||||
pub satisfaction_weight: usize,
|
pub satisfaction_weight: Weight,
|
||||||
/// The UTXO
|
/// The UTXO
|
||||||
pub utxo: Utxo,
|
pub utxo: Utxo,
|
||||||
}
|
}
|
||||||
209
crates/wallet/src/wallet/changeset.rs
Normal file
209
crates/wallet/src/wallet/changeset.rs
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
use bdk_chain::{
|
||||||
|
indexed_tx_graph, keychain_txout, local_chain, tx_graph, ConfirmationBlockTime, Merge,
|
||||||
|
};
|
||||||
|
use miniscript::{Descriptor, DescriptorPublicKey};
|
||||||
|
|
||||||
|
type IndexedTxGraphChangeSet =
|
||||||
|
indexed_tx_graph::ChangeSet<ConfirmationBlockTime, keychain_txout::ChangeSet>;
|
||||||
|
|
||||||
|
/// A changeset for [`Wallet`](crate::Wallet).
|
||||||
|
#[derive(Default, Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct ChangeSet {
|
||||||
|
/// Descriptor for recipient addresses.
|
||||||
|
pub descriptor: Option<Descriptor<DescriptorPublicKey>>,
|
||||||
|
/// Descriptor for change addresses.
|
||||||
|
pub change_descriptor: Option<Descriptor<DescriptorPublicKey>>,
|
||||||
|
/// Stores the network type of the transaction data.
|
||||||
|
pub network: Option<bitcoin::Network>,
|
||||||
|
/// Changes to the [`LocalChain`](local_chain::LocalChain).
|
||||||
|
pub local_chain: local_chain::ChangeSet,
|
||||||
|
/// Changes to [`TxGraph`](tx_graph::TxGraph).
|
||||||
|
pub tx_graph: tx_graph::ChangeSet<ConfirmationBlockTime>,
|
||||||
|
/// Changes to [`KeychainTxOutIndex`](keychain_txout::KeychainTxOutIndex).
|
||||||
|
pub indexer: keychain_txout::ChangeSet,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Merge for ChangeSet {
|
||||||
|
/// Merge another [`ChangeSet`] into itself.
|
||||||
|
fn merge(&mut self, other: Self) {
|
||||||
|
if other.descriptor.is_some() {
|
||||||
|
debug_assert!(
|
||||||
|
self.descriptor.is_none() || self.descriptor == other.descriptor,
|
||||||
|
"descriptor must never change"
|
||||||
|
);
|
||||||
|
self.descriptor = other.descriptor;
|
||||||
|
}
|
||||||
|
if other.change_descriptor.is_some() {
|
||||||
|
debug_assert!(
|
||||||
|
self.change_descriptor.is_none()
|
||||||
|
|| self.change_descriptor == other.change_descriptor,
|
||||||
|
"change descriptor must never change"
|
||||||
|
);
|
||||||
|
self.change_descriptor = other.change_descriptor;
|
||||||
|
}
|
||||||
|
if other.network.is_some() {
|
||||||
|
debug_assert!(
|
||||||
|
self.network.is_none() || self.network == other.network,
|
||||||
|
"network must never change"
|
||||||
|
);
|
||||||
|
self.network = other.network;
|
||||||
|
}
|
||||||
|
|
||||||
|
Merge::merge(&mut self.local_chain, other.local_chain);
|
||||||
|
Merge::merge(&mut self.tx_graph, other.tx_graph);
|
||||||
|
Merge::merge(&mut self.indexer, other.indexer);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.descriptor.is_none()
|
||||||
|
&& self.change_descriptor.is_none()
|
||||||
|
&& self.network.is_none()
|
||||||
|
&& self.local_chain.is_empty()
|
||||||
|
&& self.tx_graph.is_empty()
|
||||||
|
&& self.indexer.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rusqlite")]
|
||||||
|
impl ChangeSet {
|
||||||
|
/// Schema name for wallet.
|
||||||
|
pub const WALLET_SCHEMA_NAME: &'static str = "bdk_wallet";
|
||||||
|
/// Name of table to store wallet descriptors and network.
|
||||||
|
pub const WALLET_TABLE_NAME: &'static str = "bdk_wallet";
|
||||||
|
|
||||||
|
/// Initialize sqlite tables for wallet schema & table.
|
||||||
|
fn init_wallet_sqlite_tables(
|
||||||
|
db_tx: &chain::rusqlite::Transaction,
|
||||||
|
) -> chain::rusqlite::Result<()> {
|
||||||
|
let schema_v0: &[&str] = &[&format!(
|
||||||
|
"CREATE TABLE {} ( \
|
||||||
|
id INTEGER PRIMARY KEY NOT NULL CHECK (id = 0), \
|
||||||
|
descriptor TEXT, \
|
||||||
|
change_descriptor TEXT, \
|
||||||
|
network TEXT \
|
||||||
|
) STRICT;",
|
||||||
|
Self::WALLET_TABLE_NAME,
|
||||||
|
)];
|
||||||
|
crate::rusqlite_impl::migrate_schema(db_tx, Self::WALLET_SCHEMA_NAME, &[schema_v0])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recover a [`ChangeSet`] from sqlite database.
|
||||||
|
pub fn from_sqlite(db_tx: &chain::rusqlite::Transaction) -> chain::rusqlite::Result<Self> {
|
||||||
|
Self::init_wallet_sqlite_tables(db_tx)?;
|
||||||
|
use chain::rusqlite::OptionalExtension;
|
||||||
|
use chain::Impl;
|
||||||
|
|
||||||
|
let mut changeset = Self::default();
|
||||||
|
|
||||||
|
let mut wallet_statement = db_tx.prepare(&format!(
|
||||||
|
"SELECT descriptor, change_descriptor, network FROM {}",
|
||||||
|
Self::WALLET_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
let row = wallet_statement
|
||||||
|
.query_row([], |row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, Impl<Descriptor<DescriptorPublicKey>>>("descriptor")?,
|
||||||
|
row.get::<_, Impl<Descriptor<DescriptorPublicKey>>>("change_descriptor")?,
|
||||||
|
row.get::<_, Impl<bitcoin::Network>>("network")?,
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.optional()?;
|
||||||
|
if let Some((Impl(desc), Impl(change_desc), Impl(network))) = row {
|
||||||
|
changeset.descriptor = Some(desc);
|
||||||
|
changeset.change_descriptor = Some(change_desc);
|
||||||
|
changeset.network = Some(network);
|
||||||
|
}
|
||||||
|
|
||||||
|
changeset.local_chain = local_chain::ChangeSet::from_sqlite(db_tx)?;
|
||||||
|
changeset.tx_graph = tx_graph::ChangeSet::<_>::from_sqlite(db_tx)?;
|
||||||
|
changeset.indexer = keychain_txout::ChangeSet::from_sqlite(db_tx)?;
|
||||||
|
|
||||||
|
Ok(changeset)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Persist [`ChangeSet`] to sqlite database.
|
||||||
|
pub fn persist_to_sqlite(
|
||||||
|
&self,
|
||||||
|
db_tx: &chain::rusqlite::Transaction,
|
||||||
|
) -> chain::rusqlite::Result<()> {
|
||||||
|
Self::init_wallet_sqlite_tables(db_tx)?;
|
||||||
|
use chain::rusqlite::named_params;
|
||||||
|
use chain::Impl;
|
||||||
|
|
||||||
|
let mut descriptor_statement = db_tx.prepare_cached(&format!(
|
||||||
|
"INSERT INTO {}(id, descriptor) VALUES(:id, :descriptor) ON CONFLICT(id) DO UPDATE SET descriptor=:descriptor",
|
||||||
|
Self::WALLET_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
if let Some(descriptor) = &self.descriptor {
|
||||||
|
descriptor_statement.execute(named_params! {
|
||||||
|
":id": 0,
|
||||||
|
":descriptor": Impl(descriptor.clone()),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut change_descriptor_statement = db_tx.prepare_cached(&format!(
|
||||||
|
"INSERT INTO {}(id, change_descriptor) VALUES(:id, :change_descriptor) ON CONFLICT(id) DO UPDATE SET change_descriptor=:change_descriptor",
|
||||||
|
Self::WALLET_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
if let Some(change_descriptor) = &self.change_descriptor {
|
||||||
|
change_descriptor_statement.execute(named_params! {
|
||||||
|
":id": 0,
|
||||||
|
":change_descriptor": Impl(change_descriptor.clone()),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut network_statement = db_tx.prepare_cached(&format!(
|
||||||
|
"INSERT INTO {}(id, network) VALUES(:id, :network) ON CONFLICT(id) DO UPDATE SET network=:network",
|
||||||
|
Self::WALLET_TABLE_NAME,
|
||||||
|
))?;
|
||||||
|
if let Some(network) = self.network {
|
||||||
|
network_statement.execute(named_params! {
|
||||||
|
":id": 0,
|
||||||
|
":network": Impl(network),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.local_chain.persist_to_sqlite(db_tx)?;
|
||||||
|
self.tx_graph.persist_to_sqlite(db_tx)?;
|
||||||
|
self.indexer.persist_to_sqlite(db_tx)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<local_chain::ChangeSet> for ChangeSet {
|
||||||
|
fn from(chain: local_chain::ChangeSet) -> Self {
|
||||||
|
Self {
|
||||||
|
local_chain: chain,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<IndexedTxGraphChangeSet> for ChangeSet {
|
||||||
|
fn from(indexed_tx_graph: IndexedTxGraphChangeSet) -> Self {
|
||||||
|
Self {
|
||||||
|
tx_graph: indexed_tx_graph.tx_graph,
|
||||||
|
indexer: indexed_tx_graph.indexer,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<tx_graph::ChangeSet<ConfirmationBlockTime>> for ChangeSet {
|
||||||
|
fn from(tx_graph: tx_graph::ChangeSet<ConfirmationBlockTime>) -> Self {
|
||||||
|
Self {
|
||||||
|
tx_graph,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<keychain_txout::ChangeSet> for ChangeSet {
|
||||||
|
fn from(indexer: keychain_txout::ChangeSet) -> Self {
|
||||||
|
Self {
|
||||||
|
indexer,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -26,11 +26,10 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//! # use std::str::FromStr;
|
//! # use std::str::FromStr;
|
||||||
//! # use bitcoin::*;
|
//! # use bitcoin::*;
|
||||||
//! # use bdk::wallet::{self, ChangeSet, coin_selection::*, coin_selection};
|
//! # use bdk_wallet::{self, ChangeSet, coin_selection::*, coin_selection};
|
||||||
//! # use bdk::wallet::error::CreateTxError;
|
//! # use bdk_wallet::error::CreateTxError;
|
||||||
//! # use bdk_persist::PersistBackend;
|
//! # use bdk_wallet::*;
|
||||||
//! # use bdk::*;
|
//! # use bdk_wallet::coin_selection::decide_change;
|
||||||
//! # use bdk::wallet::coin_selection::decide_change;
|
|
||||||
//! # use anyhow::Error;
|
//! # use anyhow::Error;
|
||||||
//! #[derive(Debug)]
|
//! #[derive(Debug)]
|
||||||
//! struct AlwaysSpendEverything;
|
//! struct AlwaysSpendEverything;
|
||||||
@@ -53,11 +52,10 @@
|
|||||||
//! (&mut selected_amount, &mut additional_weight),
|
//! (&mut selected_amount, &mut additional_weight),
|
||||||
//! |(selected_amount, additional_weight), weighted_utxo| {
|
//! |(selected_amount, additional_weight), weighted_utxo| {
|
||||||
//! **selected_amount += weighted_utxo.utxo.txout().value.to_sat();
|
//! **selected_amount += weighted_utxo.utxo.txout().value.to_sat();
|
||||||
//! **additional_weight += Weight::from_wu(
|
//! **additional_weight += TxIn::default()
|
||||||
//! (TxIn::default().segwit_weight().to_wu()
|
//! .segwit_weight()
|
||||||
//! + weighted_utxo.satisfaction_weight as u64)
|
//! .checked_add(weighted_utxo.satisfaction_weight)
|
||||||
//! as u64,
|
//! .expect("`Weight` addition should not cause an integer overflow");
|
||||||
//! );
|
|
||||||
//! Some(weighted_utxo.utxo)
|
//! Some(weighted_utxo.utxo)
|
||||||
//! },
|
//! },
|
||||||
//! )
|
//! )
|
||||||
@@ -115,8 +113,9 @@ use bitcoin::{Script, Weight};
|
|||||||
|
|
||||||
use core::convert::TryInto;
|
use core::convert::TryInto;
|
||||||
use core::fmt::{self, Formatter};
|
use core::fmt::{self, Formatter};
|
||||||
use rand::seq::SliceRandom;
|
use rand_core::RngCore;
|
||||||
|
|
||||||
|
use super::utils::shuffle_slice;
|
||||||
/// Default coin selection algorithm used by [`TxBuilder`](super::tx_builder::TxBuilder) if not
|
/// Default coin selection algorithm used by [`TxBuilder`](super::tx_builder::TxBuilder) if not
|
||||||
/// overridden
|
/// overridden
|
||||||
pub type DefaultCoinSelectionAlgorithm = BranchAndBoundCoinSelection;
|
pub type DefaultCoinSelectionAlgorithm = BranchAndBoundCoinSelection;
|
||||||
@@ -316,7 +315,7 @@ pub fn decide_change(remaining_amount: u64, fee_rate: FeeRate, drain_script: &Sc
|
|||||||
let drain_val = remaining_amount.saturating_sub(change_fee);
|
let drain_val = remaining_amount.saturating_sub(change_fee);
|
||||||
|
|
||||||
if drain_val.is_dust(drain_script) {
|
if drain_val.is_dust(drain_script) {
|
||||||
let dust_threshold = drain_script.dust_value().to_sat();
|
let dust_threshold = drain_script.minimal_non_dust().to_sat();
|
||||||
Excess::NoChange {
|
Excess::NoChange {
|
||||||
dust_threshold,
|
dust_threshold,
|
||||||
change_fee,
|
change_fee,
|
||||||
@@ -344,10 +343,10 @@ fn select_sorted_utxos(
|
|||||||
|(selected_amount, fee_amount), (must_use, weighted_utxo)| {
|
|(selected_amount, fee_amount), (must_use, weighted_utxo)| {
|
||||||
if must_use || **selected_amount < target_amount + **fee_amount {
|
if must_use || **selected_amount < target_amount + **fee_amount {
|
||||||
**fee_amount += (fee_rate
|
**fee_amount += (fee_rate
|
||||||
* Weight::from_wu(
|
* (TxIn::default()
|
||||||
TxIn::default().segwit_weight().to_wu()
|
.segwit_weight()
|
||||||
+ weighted_utxo.satisfaction_weight as u64,
|
.checked_add(weighted_utxo.satisfaction_weight)
|
||||||
))
|
.expect("`Weight` addition should not cause an integer overflow")))
|
||||||
.to_sat();
|
.to_sat();
|
||||||
**selected_amount += weighted_utxo.utxo.txout().value.to_sat();
|
**selected_amount += weighted_utxo.utxo.txout().value.to_sat();
|
||||||
Some(weighted_utxo.utxo)
|
Some(weighted_utxo.utxo)
|
||||||
@@ -390,9 +389,10 @@ struct OutputGroup {
|
|||||||
impl OutputGroup {
|
impl OutputGroup {
|
||||||
fn new(weighted_utxo: WeightedUtxo, fee_rate: FeeRate) -> Self {
|
fn new(weighted_utxo: WeightedUtxo, fee_rate: FeeRate) -> Self {
|
||||||
let fee = (fee_rate
|
let fee = (fee_rate
|
||||||
* Weight::from_wu(
|
* (TxIn::default()
|
||||||
TxIn::default().segwit_weight().to_wu() + weighted_utxo.satisfaction_weight as u64,
|
.segwit_weight()
|
||||||
))
|
.checked_add(weighted_utxo.satisfaction_weight)
|
||||||
|
.expect("`Weight` addition should not cause an integer overflow")))
|
||||||
.to_sat();
|
.to_sat();
|
||||||
let effective_value = weighted_utxo.utxo.txout().value.to_sat() as i64 - fee as i64;
|
let effective_value = weighted_utxo.utxo.txout().value.to_sat() as i64 - fee as i64;
|
||||||
OutputGroup {
|
OutputGroup {
|
||||||
@@ -517,27 +517,16 @@ impl CoinSelectionAlgorithm for BranchAndBoundCoinSelection {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(self
|
self.bnb(
|
||||||
.bnb(
|
required_utxos.clone(),
|
||||||
required_utxos.clone(),
|
optional_utxos.clone(),
|
||||||
optional_utxos.clone(),
|
curr_value,
|
||||||
curr_value,
|
curr_available_value,
|
||||||
curr_available_value,
|
target_amount,
|
||||||
target_amount,
|
cost_of_change,
|
||||||
cost_of_change,
|
drain_script,
|
||||||
drain_script,
|
fee_rate,
|
||||||
fee_rate,
|
)
|
||||||
)
|
|
||||||
.unwrap_or_else(|_| {
|
|
||||||
self.single_random_draw(
|
|
||||||
required_utxos,
|
|
||||||
optional_utxos,
|
|
||||||
curr_value,
|
|
||||||
target_amount,
|
|
||||||
drain_script,
|
|
||||||
fee_rate,
|
|
||||||
)
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -664,40 +653,6 @@ impl BranchAndBoundCoinSelection {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn single_random_draw(
|
|
||||||
&self,
|
|
||||||
required_utxos: Vec<OutputGroup>,
|
|
||||||
mut optional_utxos: Vec<OutputGroup>,
|
|
||||||
curr_value: i64,
|
|
||||||
target_amount: i64,
|
|
||||||
drain_script: &Script,
|
|
||||||
fee_rate: FeeRate,
|
|
||||||
) -> CoinSelectionResult {
|
|
||||||
optional_utxos.shuffle(&mut rand::thread_rng());
|
|
||||||
let selected_utxos = optional_utxos.into_iter().fold(
|
|
||||||
(curr_value, vec![]),
|
|
||||||
|(mut amount, mut utxos), utxo| {
|
|
||||||
if amount >= target_amount {
|
|
||||||
(amount, utxos)
|
|
||||||
} else {
|
|
||||||
amount += utxo.effective_value;
|
|
||||||
utxos.push(utxo);
|
|
||||||
(amount, utxos)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// remaining_amount can't be negative as that would mean the
|
|
||||||
// selection wasn't successful
|
|
||||||
// target_amount = amount_needed + (fee_amount - vin_fees)
|
|
||||||
let remaining_amount = (selected_utxos.0 - target_amount) as u64;
|
|
||||||
|
|
||||||
let excess = decide_change(remaining_amount, fee_rate, drain_script);
|
|
||||||
|
|
||||||
BranchAndBoundCoinSelection::calculate_cs_result(selected_utxos.1, required_utxos, excess)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn calculate_cs_result(
|
fn calculate_cs_result(
|
||||||
mut selected_utxos: Vec<OutputGroup>,
|
mut selected_utxos: Vec<OutputGroup>,
|
||||||
mut required_utxos: Vec<OutputGroup>,
|
mut required_utxos: Vec<OutputGroup>,
|
||||||
@@ -718,6 +673,58 @@ impl BranchAndBoundCoinSelection {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Pull UTXOs at random until we have enough to meet the target
|
||||||
|
pub(crate) fn single_random_draw(
|
||||||
|
required_utxos: Vec<WeightedUtxo>,
|
||||||
|
optional_utxos: Vec<WeightedUtxo>,
|
||||||
|
target_amount: u64,
|
||||||
|
drain_script: &Script,
|
||||||
|
fee_rate: FeeRate,
|
||||||
|
rng: &mut impl RngCore,
|
||||||
|
) -> CoinSelectionResult {
|
||||||
|
let target_amount = target_amount
|
||||||
|
.try_into()
|
||||||
|
.expect("Bitcoin amount to fit into i64");
|
||||||
|
|
||||||
|
let required_utxos: Vec<OutputGroup> = required_utxos
|
||||||
|
.into_iter()
|
||||||
|
.map(|u| OutputGroup::new(u, fee_rate))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut optional_utxos: Vec<OutputGroup> = optional_utxos
|
||||||
|
.into_iter()
|
||||||
|
.map(|u| OutputGroup::new(u, fee_rate))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let curr_value = required_utxos
|
||||||
|
.iter()
|
||||||
|
.fold(0, |acc, x| acc + x.effective_value);
|
||||||
|
|
||||||
|
shuffle_slice(&mut optional_utxos, rng);
|
||||||
|
|
||||||
|
let selected_utxos =
|
||||||
|
optional_utxos
|
||||||
|
.into_iter()
|
||||||
|
.fold((curr_value, vec![]), |(mut amount, mut utxos), utxo| {
|
||||||
|
if amount >= target_amount {
|
||||||
|
(amount, utxos)
|
||||||
|
} else {
|
||||||
|
amount += utxo.effective_value;
|
||||||
|
utxos.push(utxo);
|
||||||
|
(amount, utxos)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// remaining_amount can't be negative as that would mean the
|
||||||
|
// selection wasn't successful
|
||||||
|
// target_amount = amount_needed + (fee_amount - vin_fees)
|
||||||
|
let remaining_amount = (selected_utxos.0 - target_amount) as u64;
|
||||||
|
|
||||||
|
let excess = decide_change(remaining_amount, fee_rate, drain_script);
|
||||||
|
|
||||||
|
BranchAndBoundCoinSelection::calculate_cs_result(selected_utxos.1, required_utxos, excess)
|
||||||
|
}
|
||||||
|
|
||||||
/// Remove duplicate UTXOs.
|
/// Remove duplicate UTXOs.
|
||||||
///
|
///
|
||||||
/// If a UTXO appears in both `required` and `optional`, the appearance in `required` is kept.
|
/// If a UTXO appears in both `required` and `optional`, the appearance in `required` is kept.
|
||||||
@@ -741,6 +748,7 @@ where
|
|||||||
mod test {
|
mod test {
|
||||||
use assert_matches::assert_matches;
|
use assert_matches::assert_matches;
|
||||||
use core::str::FromStr;
|
use core::str::FromStr;
|
||||||
|
use rand::rngs::StdRng;
|
||||||
|
|
||||||
use bdk_chain::ConfirmationTime;
|
use bdk_chain::ConfirmationTime;
|
||||||
use bitcoin::{Amount, ScriptBuf, TxIn, TxOut};
|
use bitcoin::{Amount, ScriptBuf, TxIn, TxOut};
|
||||||
@@ -749,8 +757,7 @@ mod test {
|
|||||||
use crate::types::*;
|
use crate::types::*;
|
||||||
use crate::wallet::coin_selection::filter_duplicates;
|
use crate::wallet::coin_selection::filter_duplicates;
|
||||||
|
|
||||||
use rand::rngs::StdRng;
|
use rand::prelude::SliceRandom;
|
||||||
use rand::seq::SliceRandom;
|
|
||||||
use rand::{Rng, RngCore, SeedableRng};
|
use rand::{Rng, RngCore, SeedableRng};
|
||||||
|
|
||||||
// signature len (1WU) + signature and sighash (72WU)
|
// signature len (1WU) + signature and sighash (72WU)
|
||||||
@@ -767,7 +774,7 @@ mod test {
|
|||||||
))
|
))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
WeightedUtxo {
|
WeightedUtxo {
|
||||||
satisfaction_weight: P2WPKH_SATISFACTION_SIZE,
|
satisfaction_weight: Weight::from_wu_usize(P2WPKH_SATISFACTION_SIZE),
|
||||||
utxo: Utxo::Local(LocalOutput {
|
utxo: Utxo::Local(LocalOutput {
|
||||||
outpoint,
|
outpoint,
|
||||||
txout: TxOut {
|
txout: TxOut {
|
||||||
@@ -827,7 +834,7 @@ mod test {
|
|||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for i in 0..utxos_number {
|
for i in 0..utxos_number {
|
||||||
res.push(WeightedUtxo {
|
res.push(WeightedUtxo {
|
||||||
satisfaction_weight: P2WPKH_SATISFACTION_SIZE,
|
satisfaction_weight: Weight::from_wu_usize(P2WPKH_SATISFACTION_SIZE),
|
||||||
utxo: Utxo::Local(LocalOutput {
|
utxo: Utxo::Local(LocalOutput {
|
||||||
outpoint: OutPoint::from_str(&format!(
|
outpoint: OutPoint::from_str(&format!(
|
||||||
"ebd9813ecebc57ff8f30797de7c205e3c7498ca950ea4341ee51a685ff2fa30a:{}",
|
"ebd9813ecebc57ff8f30797de7c205e3c7498ca950ea4341ee51a685ff2fa30a:{}",
|
||||||
@@ -858,7 +865,7 @@ mod test {
|
|||||||
fn generate_same_value_utxos(utxos_value: u64, utxos_number: usize) -> Vec<WeightedUtxo> {
|
fn generate_same_value_utxos(utxos_value: u64, utxos_number: usize) -> Vec<WeightedUtxo> {
|
||||||
(0..utxos_number)
|
(0..utxos_number)
|
||||||
.map(|i| WeightedUtxo {
|
.map(|i| WeightedUtxo {
|
||||||
satisfaction_weight: P2WPKH_SATISFACTION_SIZE,
|
satisfaction_weight: Weight::from_wu_usize(P2WPKH_SATISFACTION_SIZE),
|
||||||
utxo: Utxo::Local(LocalOutput {
|
utxo: Utxo::Local(LocalOutput {
|
||||||
outpoint: OutPoint::from_str(&format!(
|
outpoint: OutPoint::from_str(&format!(
|
||||||
"ebd9813ecebc57ff8f30797de7c205e3c7498ca950ea4341ee51a685ff2fa30a:{}",
|
"ebd9813ecebc57ff8f30797de7c205e3c7498ca950ea4341ee51a685ff2fa30a:{}",
|
||||||
@@ -1091,13 +1098,12 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore = "SRD fn was moved out of BnB"]
|
||||||
fn test_bnb_coin_selection_success() {
|
fn test_bnb_coin_selection_success() {
|
||||||
// In this case bnb won't find a suitable match and single random draw will
|
// In this case bnb won't find a suitable match and single random draw will
|
||||||
// select three outputs
|
// select three outputs
|
||||||
let utxos = generate_same_value_utxos(100_000, 20);
|
let utxos = generate_same_value_utxos(100_000, 20);
|
||||||
|
|
||||||
let drain_script = ScriptBuf::default();
|
let drain_script = ScriptBuf::default();
|
||||||
|
|
||||||
let target_amount = 250_000 + FEE_AMOUNT;
|
let target_amount = 250_000 + FEE_AMOUNT;
|
||||||
|
|
||||||
let result = BranchAndBoundCoinSelection::default()
|
let result = BranchAndBoundCoinSelection::default()
|
||||||
@@ -1137,6 +1143,7 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore = "no exact match for bnb, previously fell back to SRD"]
|
||||||
fn test_bnb_coin_selection_optional_are_enough() {
|
fn test_bnb_coin_selection_optional_are_enough() {
|
||||||
let utxos = get_test_utxos();
|
let utxos = get_test_utxos();
|
||||||
let drain_script = ScriptBuf::default();
|
let drain_script = ScriptBuf::default();
|
||||||
@@ -1157,6 +1164,26 @@ mod test {
|
|||||||
assert_eq!(result.fee_amount, 136);
|
assert_eq!(result.fee_amount, 136);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_single_random_draw_function_success() {
|
||||||
|
let seed = [0; 32];
|
||||||
|
let mut rng: StdRng = SeedableRng::from_seed(seed);
|
||||||
|
let mut utxos = generate_random_utxos(&mut rng, 300);
|
||||||
|
let target_amount = sum_random_utxos(&mut rng, &mut utxos) + FEE_AMOUNT;
|
||||||
|
let fee_rate = FeeRate::from_sat_per_vb_unchecked(1);
|
||||||
|
let drain_script = ScriptBuf::default();
|
||||||
|
let result = single_random_draw(
|
||||||
|
vec![],
|
||||||
|
utxos,
|
||||||
|
target_amount,
|
||||||
|
&drain_script,
|
||||||
|
fee_rate,
|
||||||
|
&mut rng,
|
||||||
|
);
|
||||||
|
assert!(result.selected_amount() > target_amount);
|
||||||
|
assert_eq!(result.fee_amount, (result.selected.len() * 68) as u64);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[ignore]
|
#[ignore]
|
||||||
fn test_bnb_coin_selection_required_not_enough() {
|
fn test_bnb_coin_selection_required_not_enough() {
|
||||||
@@ -1411,34 +1438,6 @@ mod test {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_single_random_draw_function_success() {
|
|
||||||
let seed = [0; 32];
|
|
||||||
let mut rng: StdRng = SeedableRng::from_seed(seed);
|
|
||||||
let mut utxos = generate_random_utxos(&mut rng, 300);
|
|
||||||
let target_amount = sum_random_utxos(&mut rng, &mut utxos) + FEE_AMOUNT;
|
|
||||||
|
|
||||||
let fee_rate = FeeRate::from_sat_per_vb_unchecked(1);
|
|
||||||
let utxos: Vec<OutputGroup> = utxos
|
|
||||||
.into_iter()
|
|
||||||
.map(|u| OutputGroup::new(u, fee_rate))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let drain_script = ScriptBuf::default();
|
|
||||||
|
|
||||||
let result = BranchAndBoundCoinSelection::default().single_random_draw(
|
|
||||||
vec![],
|
|
||||||
utxos,
|
|
||||||
0,
|
|
||||||
target_amount as i64,
|
|
||||||
&drain_script,
|
|
||||||
fee_rate,
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(result.selected_amount() > target_amount);
|
|
||||||
assert_eq!(result.fee_amount, (result.selected.len() * 68) as u64);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_bnb_exclude_negative_effective_value() {
|
fn test_bnb_exclude_negative_effective_value() {
|
||||||
let utxos = get_test_utxos();
|
let utxos = get_test_utxos();
|
||||||
@@ -1513,7 +1512,7 @@ mod test {
|
|||||||
fn test_filter_duplicates() {
|
fn test_filter_duplicates() {
|
||||||
fn utxo(txid: &str, value: u64) -> WeightedUtxo {
|
fn utxo(txid: &str, value: u64) -> WeightedUtxo {
|
||||||
WeightedUtxo {
|
WeightedUtxo {
|
||||||
satisfaction_weight: 0,
|
satisfaction_weight: Weight::ZERO,
|
||||||
utxo: Utxo::Local(LocalOutput {
|
utxo: Utxo::Local(LocalOutput {
|
||||||
outpoint: OutPoint::new(bitcoin::hashes::Hash::hash(txid.as_bytes()), 0),
|
outpoint: OutPoint::new(bitcoin::hashes::Hash::hash(txid.as_bytes()), 0),
|
||||||
txout: TxOut {
|
txout: TxOut {
|
||||||
@@ -16,7 +16,7 @@ use crate::descriptor::DescriptorError;
|
|||||||
use crate::wallet::coin_selection;
|
use crate::wallet::coin_selection;
|
||||||
use crate::{descriptor, KeychainKind};
|
use crate::{descriptor, KeychainKind};
|
||||||
use alloc::string::String;
|
use alloc::string::String;
|
||||||
use bitcoin::{absolute, psbt, OutPoint, Sequence, Txid};
|
use bitcoin::{absolute, psbt, Amount, OutPoint, Sequence, Txid};
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
/// Errors returned by miniscript when updating inconsistent PSBTs
|
/// Errors returned by miniscript when updating inconsistent PSBTs
|
||||||
@@ -50,8 +50,6 @@ impl std::error::Error for MiniscriptPsbtError {}
|
|||||||
pub enum CreateTxError {
|
pub enum CreateTxError {
|
||||||
/// There was a problem with the descriptors passed in
|
/// There was a problem with the descriptors passed in
|
||||||
Descriptor(DescriptorError),
|
Descriptor(DescriptorError),
|
||||||
/// We were unable to load wallet data from or write wallet data to the persistence backend
|
|
||||||
Persist(anyhow::Error),
|
|
||||||
/// There was a problem while extracting and manipulating policies
|
/// There was a problem while extracting and manipulating policies
|
||||||
Policy(PolicyError),
|
Policy(PolicyError),
|
||||||
/// Spending policy is not compatible with this [`KeychainKind`]
|
/// Spending policy is not compatible with this [`KeychainKind`]
|
||||||
@@ -78,8 +76,8 @@ pub enum CreateTxError {
|
|||||||
},
|
},
|
||||||
/// When bumping a tx the absolute fee requested is lower than replaced tx absolute fee
|
/// When bumping a tx the absolute fee requested is lower than replaced tx absolute fee
|
||||||
FeeTooLow {
|
FeeTooLow {
|
||||||
/// Required fee absolute value (satoshi)
|
/// Required fee absolute value [`Amount`]
|
||||||
required: u64,
|
required: Amount,
|
||||||
},
|
},
|
||||||
/// When bumping a tx the fee rate requested is lower than required
|
/// When bumping a tx the fee rate requested is lower than required
|
||||||
FeeRateTooLow {
|
FeeRateTooLow {
|
||||||
@@ -90,17 +88,8 @@ pub enum CreateTxError {
|
|||||||
NoUtxosSelected,
|
NoUtxosSelected,
|
||||||
/// Output created is under the dust limit, 546 satoshis
|
/// Output created is under the dust limit, 546 satoshis
|
||||||
OutputBelowDustLimit(usize),
|
OutputBelowDustLimit(usize),
|
||||||
/// The `change_policy` was set but the wallet does not have a change_descriptor
|
|
||||||
ChangePolicyDescriptor,
|
|
||||||
/// There was an error with coin selection
|
/// There was an error with coin selection
|
||||||
CoinSelection(coin_selection::Error),
|
CoinSelection(coin_selection::Error),
|
||||||
/// Wallet's UTXO set is not enough to cover recipient's requested plus fee
|
|
||||||
InsufficientFunds {
|
|
||||||
/// Sats needed for some transaction
|
|
||||||
needed: u64,
|
|
||||||
/// Sats available for spending
|
|
||||||
available: u64,
|
|
||||||
},
|
|
||||||
/// Cannot build a tx without recipients
|
/// Cannot build a tx without recipients
|
||||||
NoRecipients,
|
NoRecipients,
|
||||||
/// Partially signed bitcoin transaction error
|
/// Partially signed bitcoin transaction error
|
||||||
@@ -123,13 +112,6 @@ impl fmt::Display for CreateTxError {
|
|||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Descriptor(e) => e.fmt(f),
|
Self::Descriptor(e) => e.fmt(f),
|
||||||
Self::Persist(e) => {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"failed to load wallet data from or write wallet data to persistence backend: {}",
|
|
||||||
e
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Self::Policy(e) => e.fmt(f),
|
Self::Policy(e) => e.fmt(f),
|
||||||
CreateTxError::SpendingPolicyRequired(keychain_kind) => {
|
CreateTxError::SpendingPolicyRequired(keychain_kind) => {
|
||||||
write!(f, "Spending policy required: {:?}", keychain_kind)
|
write!(f, "Spending policy required: {:?}", keychain_kind)
|
||||||
@@ -160,7 +142,7 @@ impl fmt::Display for CreateTxError {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
CreateTxError::FeeTooLow { required } => {
|
CreateTxError::FeeTooLow { required } => {
|
||||||
write!(f, "Fee to low: required {} sat", required)
|
write!(f, "Fee to low: required {}", required.display_dynamic())
|
||||||
}
|
}
|
||||||
CreateTxError::FeeRateTooLow { required } => {
|
CreateTxError::FeeRateTooLow { required } => {
|
||||||
write!(
|
write!(
|
||||||
@@ -177,20 +159,7 @@ impl fmt::Display for CreateTxError {
|
|||||||
CreateTxError::OutputBelowDustLimit(limit) => {
|
CreateTxError::OutputBelowDustLimit(limit) => {
|
||||||
write!(f, "Output below the dust limit: {}", limit)
|
write!(f, "Output below the dust limit: {}", limit)
|
||||||
}
|
}
|
||||||
CreateTxError::ChangePolicyDescriptor => {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"The `change_policy` can be set only if the wallet has a change_descriptor"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
CreateTxError::CoinSelection(e) => e.fmt(f),
|
CreateTxError::CoinSelection(e) => e.fmt(f),
|
||||||
CreateTxError::InsufficientFunds { needed, available } => {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"Insufficient funds: {} sat available of {} sat needed",
|
|
||||||
available, needed
|
|
||||||
)
|
|
||||||
}
|
|
||||||
CreateTxError::NoRecipients => {
|
CreateTxError::NoRecipients => {
|
||||||
write!(f, "Cannot build tx without recipients")
|
write!(f, "Cannot build tx without recipients")
|
||||||
}
|
}
|
||||||
@@ -20,8 +20,8 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//! # use std::str::FromStr;
|
//! # use std::str::FromStr;
|
||||||
//! # use bitcoin::*;
|
//! # use bitcoin::*;
|
||||||
//! # use bdk::wallet::export::*;
|
//! # use bdk_wallet::export::*;
|
||||||
//! # use bdk::*;
|
//! # use bdk_wallet::*;
|
||||||
//! let import = r#"{
|
//! let import = r#"{
|
||||||
//! "descriptor": "wpkh([c258d2e4\/84h\/1h\/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe\/0\/*)",
|
//! "descriptor": "wpkh([c258d2e4\/84h\/1h\/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe\/0\/*)",
|
||||||
//! "blockheight":1782088,
|
//! "blockheight":1782088,
|
||||||
@@ -29,24 +29,26 @@
|
|||||||
//! }"#;
|
//! }"#;
|
||||||
//!
|
//!
|
||||||
//! let import = FullyNodedExport::from_str(import)?;
|
//! let import = FullyNodedExport::from_str(import)?;
|
||||||
//! let wallet = Wallet::new_no_persist(
|
//! let wallet = Wallet::create(
|
||||||
//! &import.descriptor(),
|
//! import.descriptor(),
|
||||||
//! import.change_descriptor().as_ref(),
|
//! import.change_descriptor().expect("change descriptor"),
|
||||||
//! Network::Testnet,
|
//! )
|
||||||
//! )?;
|
//! .network(Network::Testnet)
|
||||||
|
//! .create_wallet_no_persist()?;
|
||||||
//! # Ok::<_, Box<dyn std::error::Error>>(())
|
//! # Ok::<_, Box<dyn std::error::Error>>(())
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! ### Export a `Wallet`
|
//! ### Export a `Wallet`
|
||||||
//! ```
|
//! ```
|
||||||
//! # use bitcoin::*;
|
//! # use bitcoin::*;
|
||||||
//! # use bdk::wallet::export::*;
|
//! # use bdk_wallet::export::*;
|
||||||
//! # use bdk::*;
|
//! # use bdk_wallet::*;
|
||||||
//! let wallet = Wallet::new_no_persist(
|
//! let wallet = Wallet::create(
|
||||||
//! "wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/0/*)",
|
//! "wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/0/*)",
|
||||||
//! Some("wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/1/*)"),
|
//! "wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/1/*)",
|
||||||
//! Network::Testnet,
|
//! )
|
||||||
//! )?;
|
//! .network(Network::Testnet)
|
||||||
|
//! .create_wallet_no_persist()?;
|
||||||
//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true).unwrap();
|
//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true).unwrap();
|
||||||
//!
|
//!
|
||||||
//! println!("Exported: {}", export.to_string());
|
//! println!("Exported: {}", export.to_string());
|
||||||
@@ -116,7 +118,7 @@ impl FullyNodedExport {
|
|||||||
include_blockheight: bool,
|
include_blockheight: bool,
|
||||||
) -> Result<Self, &'static str> {
|
) -> Result<Self, &'static str> {
|
||||||
let descriptor = wallet
|
let descriptor = wallet
|
||||||
.get_descriptor_for_keychain(KeychainKind::External)
|
.public_descriptor(KeychainKind::External)
|
||||||
.to_string_with_secret(
|
.to_string_with_secret(
|
||||||
&wallet
|
&wallet
|
||||||
.get_signers(KeychainKind::External)
|
.get_signers(KeychainKind::External)
|
||||||
@@ -128,7 +130,7 @@ impl FullyNodedExport {
|
|||||||
let blockheight = if include_blockheight {
|
let blockheight = if include_blockheight {
|
||||||
wallet.transactions().next().map_or(0, |canonical_tx| {
|
wallet.transactions().next().map_or(0, |canonical_tx| {
|
||||||
match canonical_tx.chain_position {
|
match canonical_tx.chain_position {
|
||||||
bdk_chain::ChainPosition::Confirmed(a) => a.confirmation_height,
|
bdk_chain::ChainPosition::Confirmed(a) => a.block_id.height,
|
||||||
bdk_chain::ChainPosition::Unconfirmed(_) => 0,
|
bdk_chain::ChainPosition::Unconfirmed(_) => 0,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -142,19 +144,17 @@ impl FullyNodedExport {
|
|||||||
blockheight,
|
blockheight,
|
||||||
};
|
};
|
||||||
|
|
||||||
let change_descriptor = match wallet.public_descriptor(KeychainKind::Internal).is_some() {
|
let change_descriptor = {
|
||||||
false => None,
|
let descriptor = wallet
|
||||||
true => {
|
.public_descriptor(KeychainKind::Internal)
|
||||||
let descriptor = wallet
|
.to_string_with_secret(
|
||||||
.get_descriptor_for_keychain(KeychainKind::Internal)
|
&wallet
|
||||||
.to_string_with_secret(
|
.get_signers(KeychainKind::Internal)
|
||||||
&wallet
|
.as_key_map(wallet.secp_ctx()),
|
||||||
.get_signers(KeychainKind::Internal)
|
);
|
||||||
.as_key_map(wallet.secp_ctx()),
|
Some(remove_checksum(descriptor))
|
||||||
);
|
|
||||||
Some(remove_checksum(descriptor))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if export.change_descriptor() != change_descriptor {
|
if export.change_descriptor() != change_descriptor {
|
||||||
return Err("Incompatible change descriptor");
|
return Err("Incompatible change descriptor");
|
||||||
}
|
}
|
||||||
@@ -166,7 +166,7 @@ impl FullyNodedExport {
|
|||||||
fn check_ms<Ctx: ScriptContext>(
|
fn check_ms<Ctx: ScriptContext>(
|
||||||
terminal: &Terminal<String, Ctx>,
|
terminal: &Terminal<String, Ctx>,
|
||||||
) -> Result<(), &'static str> {
|
) -> Result<(), &'static str> {
|
||||||
if let Terminal::Multi(_, _) = terminal {
|
if let Terminal::Multi(_) = terminal {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err("The descriptor contains operators not supported by Bitcoin Core")
|
Err("The descriptor contains operators not supported by Bitcoin Core")
|
||||||
@@ -189,6 +189,7 @@ impl FullyNodedExport {
|
|||||||
WshInner::SortedMulti(_) => Ok(()),
|
WshInner::SortedMulti(_) => Ok(()),
|
||||||
WshInner::Ms(ms) => check_ms(&ms.node),
|
WshInner::Ms(ms) => check_ms(&ms.node),
|
||||||
},
|
},
|
||||||
|
Descriptor::Tr(_) => Ok(()),
|
||||||
_ => Err("The descriptor is not compatible with Bitcoin Core"),
|
_ => Err("The descriptor is not compatible with Bitcoin Core"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -215,39 +216,50 @@ mod test {
|
|||||||
use core::str::FromStr;
|
use core::str::FromStr;
|
||||||
|
|
||||||
use crate::std::string::ToString;
|
use crate::std::string::ToString;
|
||||||
use bdk_chain::{BlockId, ConfirmationTime};
|
use bdk_chain::{BlockId, ConfirmationBlockTime};
|
||||||
use bitcoin::hashes::Hash;
|
use bitcoin::hashes::Hash;
|
||||||
use bitcoin::{transaction, BlockHash, Network, Transaction};
|
use bitcoin::{transaction, BlockHash, Network, Transaction};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::wallet::Wallet;
|
use crate::Wallet;
|
||||||
|
|
||||||
fn get_test_wallet(
|
fn get_test_wallet(descriptor: &str, change_descriptor: &str, network: Network) -> Wallet {
|
||||||
descriptor: &str,
|
use crate::wallet::Update;
|
||||||
change_descriptor: Option<&str>,
|
use bdk_chain::TxGraph;
|
||||||
network: Network,
|
let mut wallet = Wallet::create(descriptor.to_string(), change_descriptor.to_string())
|
||||||
) -> Wallet {
|
.network(network)
|
||||||
let mut wallet = Wallet::new_no_persist(descriptor, change_descriptor, network).unwrap();
|
.create_wallet_no_persist()
|
||||||
|
.expect("must create wallet");
|
||||||
let transaction = Transaction {
|
let transaction = Transaction {
|
||||||
input: vec![],
|
input: vec![],
|
||||||
output: vec![],
|
output: vec![],
|
||||||
version: transaction::Version::non_standard(0),
|
version: transaction::Version::non_standard(0),
|
||||||
lock_time: bitcoin::absolute::LockTime::ZERO,
|
lock_time: bitcoin::absolute::LockTime::ZERO,
|
||||||
};
|
};
|
||||||
|
let txid = transaction.compute_txid();
|
||||||
|
let block_id = BlockId {
|
||||||
|
height: 5000,
|
||||||
|
hash: BlockHash::all_zeros(),
|
||||||
|
};
|
||||||
|
wallet.insert_checkpoint(block_id).unwrap();
|
||||||
wallet
|
wallet
|
||||||
.insert_checkpoint(BlockId {
|
.insert_checkpoint(BlockId {
|
||||||
height: 5001,
|
height: 5001,
|
||||||
hash: BlockHash::all_zeros(),
|
hash: BlockHash::all_zeros(),
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
wallet.insert_tx(transaction);
|
||||||
|
let anchor = ConfirmationBlockTime {
|
||||||
|
confirmation_time: 0,
|
||||||
|
block_id,
|
||||||
|
};
|
||||||
|
let mut graph = TxGraph::default();
|
||||||
|
let _ = graph.insert_anchor(txid, anchor);
|
||||||
wallet
|
wallet
|
||||||
.insert_tx(
|
.apply_update(Update {
|
||||||
transaction,
|
graph,
|
||||||
ConfirmationTime::Confirmed {
|
..Default::default()
|
||||||
height: 5000,
|
})
|
||||||
time: 0,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
wallet
|
wallet
|
||||||
}
|
}
|
||||||
@@ -257,7 +269,7 @@ mod test {
|
|||||||
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
||||||
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
|
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
|
||||||
|
|
||||||
let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin);
|
let wallet = get_test_wallet(descriptor, change_descriptor, Network::Bitcoin);
|
||||||
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||||
|
|
||||||
assert_eq!(export.descriptor(), descriptor);
|
assert_eq!(export.descriptor(), descriptor);
|
||||||
@@ -269,13 +281,14 @@ mod test {
|
|||||||
#[test]
|
#[test]
|
||||||
#[should_panic(expected = "Incompatible change descriptor")]
|
#[should_panic(expected = "Incompatible change descriptor")]
|
||||||
fn test_export_no_change() {
|
fn test_export_no_change() {
|
||||||
// This wallet explicitly doesn't have a change descriptor. It should be impossible to
|
// The wallet's change descriptor has no wildcard. It should be impossible to
|
||||||
// export, because exporting this kind of external descriptor normally implies the
|
// export, because exporting this kind of external descriptor normally implies the
|
||||||
// existence of an internal descriptor
|
// existence of a compatible internal descriptor
|
||||||
|
|
||||||
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
||||||
|
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/0)";
|
||||||
|
|
||||||
let wallet = get_test_wallet(descriptor, None, Network::Bitcoin);
|
let wallet = get_test_wallet(descriptor, change_descriptor, Network::Bitcoin);
|
||||||
FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -288,7 +301,7 @@ mod test {
|
|||||||
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
||||||
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/50'/0'/1/*)";
|
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/50'/0'/1/*)";
|
||||||
|
|
||||||
let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin);
|
let wallet = get_test_wallet(descriptor, change_descriptor, Network::Bitcoin);
|
||||||
FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -305,7 +318,7 @@ mod test {
|
|||||||
[c98b1535/48'/0'/0'/2']tpubDCDi5W4sP6zSnzJeowy8rQDVhBdRARaPhK1axABi8V1661wEPeanpEXj4ZLAUEoikVtoWcyK26TKKJSecSfeKxwHCcRrge9k1ybuiL71z4a/1/*\
|
[c98b1535/48'/0'/0'/2']tpubDCDi5W4sP6zSnzJeowy8rQDVhBdRARaPhK1axABi8V1661wEPeanpEXj4ZLAUEoikVtoWcyK26TKKJSecSfeKxwHCcRrge9k1ybuiL71z4a/1/*\
|
||||||
))";
|
))";
|
||||||
|
|
||||||
let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Testnet);
|
let wallet = get_test_wallet(descriptor, change_descriptor, Network::Testnet);
|
||||||
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||||
|
|
||||||
assert_eq!(export.descriptor(), descriptor);
|
assert_eq!(export.descriptor(), descriptor);
|
||||||
@@ -314,12 +327,24 @@ mod test {
|
|||||||
assert_eq!(export.label, "Test Label");
|
assert_eq!(export.label, "Test Label");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_export_tr() {
|
||||||
|
let descriptor = "tr([73c5da0a/86'/0'/0']tprv8fMn4hSKPRC1oaCPqxDb1JWtgkpeiQvZhsr8W2xuy3GEMkzoArcAWTfJxYb6Wj8XNNDWEjfYKK4wGQXh3ZUXhDF2NcnsALpWTeSwarJt7Vc/0/*)";
|
||||||
|
let change_descriptor = "tr([73c5da0a/86'/0'/0']tprv8fMn4hSKPRC1oaCPqxDb1JWtgkpeiQvZhsr8W2xuy3GEMkzoArcAWTfJxYb6Wj8XNNDWEjfYKK4wGQXh3ZUXhDF2NcnsALpWTeSwarJt7Vc/1/*)";
|
||||||
|
let wallet = get_test_wallet(descriptor, change_descriptor, Network::Testnet);
|
||||||
|
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||||
|
assert_eq!(export.descriptor(), descriptor);
|
||||||
|
assert_eq!(export.change_descriptor(), Some(change_descriptor.into()));
|
||||||
|
assert_eq!(export.blockheight, 5000);
|
||||||
|
assert_eq!(export.label, "Test Label");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_export_to_json() {
|
fn test_export_to_json() {
|
||||||
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)";
|
||||||
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
|
let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)";
|
||||||
|
|
||||||
let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin);
|
let wallet = get_test_wallet(descriptor, change_descriptor, Network::Bitcoin);
|
||||||
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap();
|
||||||
|
|
||||||
assert_eq!(export.to_string(), "{\"descriptor\":\"wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44\'/0\'/0\'/0/*)\",\"blockheight\":5000,\"label\":\"Test Label\"}");
|
assert_eq!(export.to_string(), "{\"descriptor\":\"wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44\'/0\'/0\'/0/*)\",\"blockheight\":5000,\"label\":\"Test Label\"}");
|
||||||
@@ -14,11 +14,11 @@
|
|||||||
//! This module contains HWISigner, an implementation of a [TransactionSigner] to be
|
//! This module contains HWISigner, an implementation of a [TransactionSigner] to be
|
||||||
//! used with hardware wallets.
|
//! used with hardware wallets.
|
||||||
//! ```no_run
|
//! ```no_run
|
||||||
//! # use bdk::bitcoin::Network;
|
//! # use bdk_wallet::bitcoin::Network;
|
||||||
//! # use bdk::signer::SignerOrdering;
|
//! # use bdk_wallet::signer::SignerOrdering;
|
||||||
//! # use bdk::wallet::hardwaresigner::HWISigner;
|
//! # use bdk_wallet::hardwaresigner::HWISigner;
|
||||||
//! # use bdk::wallet::AddressIndex::New;
|
//! # use bdk_wallet::AddressIndex::New;
|
||||||
//! # use bdk::{KeychainKind, SignOptions, Wallet};
|
//! # use bdk_wallet::{CreateParams, KeychainKind, SignOptions};
|
||||||
//! # use hwi::HWIClient;
|
//! # use hwi::HWIClient;
|
||||||
//! # use std::sync::Arc;
|
//! # use std::sync::Arc;
|
||||||
//! #
|
//! #
|
||||||
@@ -30,11 +30,7 @@
|
|||||||
//! let first_device = devices.remove(0)?;
|
//! let first_device = devices.remove(0)?;
|
||||||
//! let custom_signer = HWISigner::from_device(&first_device, Network::Testnet.into())?;
|
//! let custom_signer = HWISigner::from_device(&first_device, Network::Testnet.into())?;
|
||||||
//!
|
//!
|
||||||
//! # let mut wallet = Wallet::new_no_persist(
|
//! # let mut wallet = CreateParams::new("", "", Network::Testnet)?.create_wallet_no_persist()?;
|
||||||
//! # "",
|
|
||||||
//! # None,
|
|
||||||
//! # Network::Testnet,
|
|
||||||
//! # )?;
|
|
||||||
//! #
|
//! #
|
||||||
//! // Adding the hardware signer to the BDK wallet
|
//! // Adding the hardware signer to the BDK wallet
|
||||||
//! wallet.add_signer(
|
//! wallet.add_signer(
|
||||||
File diff suppressed because it is too large
Load Diff
213
crates/wallet/src/wallet/params.rs
Normal file
213
crates/wallet/src/wallet/params.rs
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
use alloc::boxed::Box;
|
||||||
|
use bdk_chain::{keychain_txout::DEFAULT_LOOKAHEAD, PersistAsyncWith, PersistWith};
|
||||||
|
use bitcoin::{BlockHash, Network};
|
||||||
|
use miniscript::descriptor::KeyMap;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
descriptor::{DescriptorError, ExtendedDescriptor, IntoWalletDescriptor},
|
||||||
|
utils::SecpCtx,
|
||||||
|
KeychainKind, Wallet,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{ChangeSet, LoadError, PersistedWallet};
|
||||||
|
|
||||||
|
/// This atrocity is to avoid having type parameters on [`CreateParams`] and [`LoadParams`].
|
||||||
|
///
|
||||||
|
/// The better option would be to do `Box<dyn IntoWalletDescriptor>`, but we cannot due to Rust's
|
||||||
|
/// [object safety rules](https://doc.rust-lang.org/reference/items/traits.html#object-safety).
|
||||||
|
type DescriptorToExtract = Box<
|
||||||
|
dyn FnOnce(&SecpCtx, Network) -> Result<(ExtendedDescriptor, KeyMap), DescriptorError>
|
||||||
|
+ 'static,
|
||||||
|
>;
|
||||||
|
|
||||||
|
fn make_descriptor_to_extract<D>(descriptor: D) -> DescriptorToExtract
|
||||||
|
where
|
||||||
|
D: IntoWalletDescriptor + 'static,
|
||||||
|
{
|
||||||
|
Box::new(|secp, network| descriptor.into_wallet_descriptor(secp, network))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parameters for [`Wallet::create`] or [`PersistedWallet::create`].
|
||||||
|
#[must_use]
|
||||||
|
pub struct CreateParams {
|
||||||
|
pub(crate) descriptor: DescriptorToExtract,
|
||||||
|
pub(crate) descriptor_keymap: KeyMap,
|
||||||
|
pub(crate) change_descriptor: DescriptorToExtract,
|
||||||
|
pub(crate) change_descriptor_keymap: KeyMap,
|
||||||
|
pub(crate) network: Network,
|
||||||
|
pub(crate) genesis_hash: Option<BlockHash>,
|
||||||
|
pub(crate) lookahead: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CreateParams {
|
||||||
|
/// Construct parameters with provided `descriptor`, `change_descriptor` and `network`.
|
||||||
|
///
|
||||||
|
/// Default values: `genesis_hash` = `None`, `lookahead` = [`DEFAULT_LOOKAHEAD`]
|
||||||
|
pub fn new<D: IntoWalletDescriptor + 'static>(descriptor: D, change_descriptor: D) -> Self {
|
||||||
|
Self {
|
||||||
|
descriptor: make_descriptor_to_extract(descriptor),
|
||||||
|
descriptor_keymap: KeyMap::default(),
|
||||||
|
change_descriptor: make_descriptor_to_extract(change_descriptor),
|
||||||
|
change_descriptor_keymap: KeyMap::default(),
|
||||||
|
network: Network::Bitcoin,
|
||||||
|
genesis_hash: None,
|
||||||
|
lookahead: DEFAULT_LOOKAHEAD,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extend the given `keychain`'s `keymap`.
|
||||||
|
pub fn keymap(mut self, keychain: KeychainKind, keymap: KeyMap) -> Self {
|
||||||
|
match keychain {
|
||||||
|
KeychainKind::External => &mut self.descriptor_keymap,
|
||||||
|
KeychainKind::Internal => &mut self.change_descriptor_keymap,
|
||||||
|
}
|
||||||
|
.extend(keymap);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set `network`.
|
||||||
|
pub fn network(mut self, network: Network) -> Self {
|
||||||
|
self.network = network;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Use a custom `genesis_hash`.
|
||||||
|
pub fn genesis_hash(mut self, genesis_hash: BlockHash) -> Self {
|
||||||
|
self.genesis_hash = Some(genesis_hash);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Use custom lookahead value.
|
||||||
|
pub fn lookahead(mut self, lookahead: u32) -> Self {
|
||||||
|
self.lookahead = lookahead;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create [`PersistedWallet`] with the given `Db`.
|
||||||
|
pub fn create_wallet<Db>(
|
||||||
|
self,
|
||||||
|
db: &mut Db,
|
||||||
|
) -> Result<PersistedWallet, <Wallet as PersistWith<Db>>::CreateError>
|
||||||
|
where
|
||||||
|
Wallet: PersistWith<Db, CreateParams = Self>,
|
||||||
|
{
|
||||||
|
PersistedWallet::create(db, self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create [`PersistedWallet`] with the given async `Db`.
|
||||||
|
pub async fn create_wallet_async<Db>(
|
||||||
|
self,
|
||||||
|
db: &mut Db,
|
||||||
|
) -> Result<PersistedWallet, <Wallet as PersistAsyncWith<Db>>::CreateError>
|
||||||
|
where
|
||||||
|
Wallet: PersistAsyncWith<Db, CreateParams = Self>,
|
||||||
|
{
|
||||||
|
PersistedWallet::create_async(db, self).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create [`Wallet`] without persistence.
|
||||||
|
pub fn create_wallet_no_persist(self) -> Result<Wallet, DescriptorError> {
|
||||||
|
Wallet::create_with_params(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parameters for [`Wallet::load`] or [`PersistedWallet::load`].
|
||||||
|
#[must_use]
|
||||||
|
pub struct LoadParams {
|
||||||
|
pub(crate) descriptor_keymap: KeyMap,
|
||||||
|
pub(crate) change_descriptor_keymap: KeyMap,
|
||||||
|
pub(crate) lookahead: u32,
|
||||||
|
pub(crate) check_network: Option<Network>,
|
||||||
|
pub(crate) check_genesis_hash: Option<BlockHash>,
|
||||||
|
pub(crate) check_descriptor: Option<DescriptorToExtract>,
|
||||||
|
pub(crate) check_change_descriptor: Option<DescriptorToExtract>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LoadParams {
|
||||||
|
/// Construct parameters with default values.
|
||||||
|
///
|
||||||
|
/// Default values: `lookahead` = [`DEFAULT_LOOKAHEAD`]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
descriptor_keymap: KeyMap::default(),
|
||||||
|
change_descriptor_keymap: KeyMap::default(),
|
||||||
|
lookahead: DEFAULT_LOOKAHEAD,
|
||||||
|
check_network: None,
|
||||||
|
check_genesis_hash: None,
|
||||||
|
check_descriptor: None,
|
||||||
|
check_change_descriptor: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extend the given `keychain`'s `keymap`.
|
||||||
|
pub fn keymap(mut self, keychain: KeychainKind, keymap: KeyMap) -> Self {
|
||||||
|
match keychain {
|
||||||
|
KeychainKind::External => &mut self.descriptor_keymap,
|
||||||
|
KeychainKind::Internal => &mut self.change_descriptor_keymap,
|
||||||
|
}
|
||||||
|
.extend(keymap);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks that `descriptor` of `keychain` matches this, and extracts private keys (if
|
||||||
|
/// available).
|
||||||
|
pub fn descriptors<D>(mut self, descriptor: D, change_descriptor: D) -> Self
|
||||||
|
where
|
||||||
|
D: IntoWalletDescriptor + 'static,
|
||||||
|
{
|
||||||
|
self.check_descriptor = Some(make_descriptor_to_extract(descriptor));
|
||||||
|
self.check_change_descriptor = Some(make_descriptor_to_extract(change_descriptor));
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check for `network`.
|
||||||
|
pub fn network(mut self, network: Network) -> Self {
|
||||||
|
self.check_network = Some(network);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check for a `genesis_hash`.
|
||||||
|
pub fn genesis_hash(mut self, genesis_hash: BlockHash) -> Self {
|
||||||
|
self.check_genesis_hash = Some(genesis_hash);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Use custom lookahead value.
|
||||||
|
pub fn lookahead(mut self, lookahead: u32) -> Self {
|
||||||
|
self.lookahead = lookahead;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load [`PersistedWallet`] with the given `Db`.
|
||||||
|
pub fn load_wallet<Db>(
|
||||||
|
self,
|
||||||
|
db: &mut Db,
|
||||||
|
) -> Result<Option<PersistedWallet>, <Wallet as PersistWith<Db>>::LoadError>
|
||||||
|
where
|
||||||
|
Wallet: PersistWith<Db, LoadParams = Self>,
|
||||||
|
{
|
||||||
|
PersistedWallet::load(db, self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load [`PersistedWallet`] with the given async `Db`.
|
||||||
|
pub async fn load_wallet_async<Db>(
|
||||||
|
self,
|
||||||
|
db: &mut Db,
|
||||||
|
) -> Result<Option<PersistedWallet>, <Wallet as PersistAsyncWith<Db>>::LoadError>
|
||||||
|
where
|
||||||
|
Wallet: PersistAsyncWith<Db, LoadParams = Self>,
|
||||||
|
{
|
||||||
|
PersistedWallet::load_async(db, self).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load [`Wallet`] without persistence.
|
||||||
|
pub fn load_wallet_no_persist(self, changeset: ChangeSet) -> Result<Option<Wallet>, LoadError> {
|
||||||
|
Wallet::load_with_params(changeset, self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LoadParams {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
171
crates/wallet/src/wallet/persisted.rs
Normal file
171
crates/wallet/src/wallet/persisted.rs
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
use core::fmt;
|
||||||
|
|
||||||
|
use crate::{descriptor::DescriptorError, Wallet};
|
||||||
|
|
||||||
|
/// Represents a persisted wallet.
|
||||||
|
pub type PersistedWallet = bdk_chain::Persisted<Wallet>;
|
||||||
|
|
||||||
|
#[cfg(feature = "rusqlite")]
|
||||||
|
impl<'c> chain::PersistWith<bdk_chain::rusqlite::Transaction<'c>> for Wallet {
|
||||||
|
type CreateParams = crate::CreateParams;
|
||||||
|
type LoadParams = crate::LoadParams;
|
||||||
|
|
||||||
|
type CreateError = CreateWithPersistError<bdk_chain::rusqlite::Error>;
|
||||||
|
type LoadError = LoadWithPersistError<bdk_chain::rusqlite::Error>;
|
||||||
|
type PersistError = bdk_chain::rusqlite::Error;
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
db: &mut bdk_chain::rusqlite::Transaction<'c>,
|
||||||
|
params: Self::CreateParams,
|
||||||
|
) -> Result<Self, Self::CreateError> {
|
||||||
|
let mut wallet =
|
||||||
|
Self::create_with_params(params).map_err(CreateWithPersistError::Descriptor)?;
|
||||||
|
if let Some(changeset) = wallet.take_staged() {
|
||||||
|
changeset
|
||||||
|
.persist_to_sqlite(db)
|
||||||
|
.map_err(CreateWithPersistError::Persist)?;
|
||||||
|
}
|
||||||
|
Ok(wallet)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load(
|
||||||
|
conn: &mut bdk_chain::rusqlite::Transaction<'c>,
|
||||||
|
params: Self::LoadParams,
|
||||||
|
) -> Result<Option<Self>, Self::LoadError> {
|
||||||
|
let changeset =
|
||||||
|
crate::ChangeSet::from_sqlite(conn).map_err(LoadWithPersistError::Persist)?;
|
||||||
|
if chain::Merge::is_empty(&changeset) {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
Self::load_with_params(changeset, params).map_err(LoadWithPersistError::InvalidChangeSet)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn persist(
|
||||||
|
db: &mut bdk_chain::rusqlite::Transaction<'c>,
|
||||||
|
changeset: &<Self as chain::Staged>::ChangeSet,
|
||||||
|
) -> Result<(), Self::PersistError> {
|
||||||
|
changeset.persist_to_sqlite(db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rusqlite")]
|
||||||
|
impl chain::PersistWith<bdk_chain::rusqlite::Connection> for Wallet {
|
||||||
|
type CreateParams = crate::CreateParams;
|
||||||
|
type LoadParams = crate::LoadParams;
|
||||||
|
|
||||||
|
type CreateError = CreateWithPersistError<bdk_chain::rusqlite::Error>;
|
||||||
|
type LoadError = LoadWithPersistError<bdk_chain::rusqlite::Error>;
|
||||||
|
type PersistError = bdk_chain::rusqlite::Error;
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
db: &mut bdk_chain::rusqlite::Connection,
|
||||||
|
params: Self::CreateParams,
|
||||||
|
) -> Result<Self, Self::CreateError> {
|
||||||
|
let mut db_tx = db.transaction().map_err(CreateWithPersistError::Persist)?;
|
||||||
|
let wallet = chain::PersistWith::create(&mut db_tx, params)?;
|
||||||
|
db_tx.commit().map_err(CreateWithPersistError::Persist)?;
|
||||||
|
Ok(wallet)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load(
|
||||||
|
db: &mut bdk_chain::rusqlite::Connection,
|
||||||
|
params: Self::LoadParams,
|
||||||
|
) -> Result<Option<Self>, Self::LoadError> {
|
||||||
|
let mut db_tx = db.transaction().map_err(LoadWithPersistError::Persist)?;
|
||||||
|
let wallet_opt = chain::PersistWith::load(&mut db_tx, params)?;
|
||||||
|
db_tx.commit().map_err(LoadWithPersistError::Persist)?;
|
||||||
|
Ok(wallet_opt)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn persist(
|
||||||
|
db: &mut bdk_chain::rusqlite::Connection,
|
||||||
|
changeset: &<Self as chain::Staged>::ChangeSet,
|
||||||
|
) -> Result<(), Self::PersistError> {
|
||||||
|
let db_tx = db.transaction()?;
|
||||||
|
changeset.persist_to_sqlite(&db_tx)?;
|
||||||
|
db_tx.commit()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "file_store")]
|
||||||
|
impl chain::PersistWith<bdk_file_store::Store<crate::ChangeSet>> for Wallet {
|
||||||
|
type CreateParams = crate::CreateParams;
|
||||||
|
type LoadParams = crate::LoadParams;
|
||||||
|
type CreateError = CreateWithPersistError<std::io::Error>;
|
||||||
|
type LoadError =
|
||||||
|
LoadWithPersistError<bdk_file_store::AggregateChangesetsError<crate::ChangeSet>>;
|
||||||
|
type PersistError = std::io::Error;
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
db: &mut bdk_file_store::Store<crate::ChangeSet>,
|
||||||
|
params: Self::CreateParams,
|
||||||
|
) -> Result<Self, Self::CreateError> {
|
||||||
|
let mut wallet =
|
||||||
|
Self::create_with_params(params).map_err(CreateWithPersistError::Descriptor)?;
|
||||||
|
if let Some(changeset) = wallet.take_staged() {
|
||||||
|
db.append_changeset(&changeset)
|
||||||
|
.map_err(CreateWithPersistError::Persist)?;
|
||||||
|
}
|
||||||
|
Ok(wallet)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load(
|
||||||
|
db: &mut bdk_file_store::Store<crate::ChangeSet>,
|
||||||
|
params: Self::LoadParams,
|
||||||
|
) -> Result<Option<Self>, Self::LoadError> {
|
||||||
|
let changeset = db
|
||||||
|
.aggregate_changesets()
|
||||||
|
.map_err(LoadWithPersistError::Persist)?
|
||||||
|
.unwrap_or_default();
|
||||||
|
Self::load_with_params(changeset, params).map_err(LoadWithPersistError::InvalidChangeSet)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn persist(
|
||||||
|
db: &mut bdk_file_store::Store<crate::ChangeSet>,
|
||||||
|
changeset: &<Self as chain::Staged>::ChangeSet,
|
||||||
|
) -> Result<(), Self::PersistError> {
|
||||||
|
db.append_changeset(changeset)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Error type for [`PersistedWallet::load`].
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
pub enum LoadWithPersistError<E> {
|
||||||
|
/// Error from persistence.
|
||||||
|
Persist(E),
|
||||||
|
/// Occurs when the loaded changeset cannot construct [`Wallet`].
|
||||||
|
InvalidChangeSet(crate::LoadError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<E: fmt::Display> fmt::Display for LoadWithPersistError<E> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::Persist(err) => fmt::Display::fmt(err, f),
|
||||||
|
Self::InvalidChangeSet(err) => fmt::Display::fmt(&err, f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
impl<E: fmt::Debug + fmt::Display> std::error::Error for LoadWithPersistError<E> {}
|
||||||
|
|
||||||
|
/// Error type for [`PersistedWallet::create`].
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum CreateWithPersistError<E> {
|
||||||
|
/// Error from persistence.
|
||||||
|
Persist(E),
|
||||||
|
/// Occurs when the loaded changeset cannot construct [`Wallet`].
|
||||||
|
Descriptor(DescriptorError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<E: fmt::Display> fmt::Display for CreateWithPersistError<E> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::Persist(err) => fmt::Display::fmt(err, f),
|
||||||
|
Self::Descriptor(err) => fmt::Display::fmt(&err, f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
impl<E: fmt::Debug + fmt::Display> std::error::Error for CreateWithPersistError<E> {}
|
||||||
@@ -19,8 +19,8 @@
|
|||||||
//! # use core::str::FromStr;
|
//! # use core::str::FromStr;
|
||||||
//! # use bitcoin::secp256k1::{Secp256k1, All};
|
//! # use bitcoin::secp256k1::{Secp256k1, All};
|
||||||
//! # use bitcoin::*;
|
//! # use bitcoin::*;
|
||||||
//! # use bdk::signer::*;
|
//! # use bdk_wallet::signer::*;
|
||||||
//! # use bdk::*;
|
//! # use bdk_wallet::*;
|
||||||
//! # #[derive(Debug)]
|
//! # #[derive(Debug)]
|
||||||
//! # struct CustomHSM;
|
//! # struct CustomHSM;
|
||||||
//! # impl CustomHSM {
|
//! # impl CustomHSM {
|
||||||
@@ -67,8 +67,11 @@
|
|||||||
//!
|
//!
|
||||||
//! let custom_signer = CustomSigner::connect();
|
//! let custom_signer = CustomSigner::connect();
|
||||||
//!
|
//!
|
||||||
//! let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)";
|
//! let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/0/*)";
|
||||||
//! let mut wallet = Wallet::new_no_persist(descriptor, None, Network::Testnet)?;
|
//! let change_descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/1/*)";
|
||||||
|
//! let mut wallet = Wallet::create(descriptor, change_descriptor)
|
||||||
|
//! .network(Network::Testnet)
|
||||||
|
//! .create_wallet_no_persist()?;
|
||||||
//! wallet.add_signer(
|
//! wallet.add_signer(
|
||||||
//! KeychainKind::External,
|
//! KeychainKind::External,
|
||||||
//! SignerOrdering(200),
|
//! SignerOrdering(200),
|
||||||
@@ -98,7 +101,7 @@ use miniscript::descriptor::{
|
|||||||
Descriptor, DescriptorMultiXKey, DescriptorPublicKey, DescriptorSecretKey, DescriptorXKey,
|
Descriptor, DescriptorMultiXKey, DescriptorPublicKey, DescriptorSecretKey, DescriptorXKey,
|
||||||
InnerXKey, KeyMap, SinglePriv, SinglePubKey,
|
InnerXKey, KeyMap, SinglePriv, SinglePubKey,
|
||||||
};
|
};
|
||||||
use miniscript::{Legacy, Segwitv0, SigType, Tap, ToPublicKey};
|
use miniscript::{SigType, ToPublicKey};
|
||||||
|
|
||||||
use super::utils::SecpCtx;
|
use super::utils::SecpCtx;
|
||||||
use crate::descriptor::{DescriptorMeta, XKeyUtils};
|
use crate::descriptor::{DescriptorMeta, XKeyUtils};
|
||||||
@@ -158,8 +161,10 @@ pub enum SignerError {
|
|||||||
NonStandardSighash,
|
NonStandardSighash,
|
||||||
/// Invalid SIGHASH for the signing context in use
|
/// Invalid SIGHASH for the signing context in use
|
||||||
InvalidSighash,
|
InvalidSighash,
|
||||||
/// Error while computing the hash to sign
|
/// Error while computing the hash to sign a Taproot input.
|
||||||
SighashError(sighash::Error),
|
SighashTaproot(sighash::TaprootError),
|
||||||
|
/// PSBT sign error.
|
||||||
|
Psbt(psbt::SignError),
|
||||||
/// Miniscript PSBT error
|
/// Miniscript PSBT error
|
||||||
MiniscriptPsbt(MiniscriptPsbtError),
|
MiniscriptPsbt(MiniscriptPsbtError),
|
||||||
/// To be used only by external libraries implementing [`InputSigner`] or
|
/// To be used only by external libraries implementing [`InputSigner`] or
|
||||||
@@ -168,12 +173,6 @@ pub enum SignerError {
|
|||||||
External(String),
|
External(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<sighash::Error> for SignerError {
|
|
||||||
fn from(e: sighash::Error) -> Self {
|
|
||||||
SignerError::SighashError(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for SignerError {
|
impl fmt::Display for SignerError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
@@ -188,7 +187,8 @@ impl fmt::Display for SignerError {
|
|||||||
Self::MissingHdKeypath => write!(f, "Missing fingerprint and derivation path"),
|
Self::MissingHdKeypath => write!(f, "Missing fingerprint and derivation path"),
|
||||||
Self::NonStandardSighash => write!(f, "The psbt contains a non standard sighash"),
|
Self::NonStandardSighash => write!(f, "The psbt contains a non standard sighash"),
|
||||||
Self::InvalidSighash => write!(f, "Invalid SIGHASH for the signing context in use"),
|
Self::InvalidSighash => write!(f, "Invalid SIGHASH for the signing context in use"),
|
||||||
Self::SighashError(err) => write!(f, "Error while computing the hash to sign: {}", err),
|
Self::SighashTaproot(err) => write!(f, "Error while computing the hash to sign a Taproot input: {}", err),
|
||||||
|
Self::Psbt(err) => write!(f, "Error computing the sighash: {}", err),
|
||||||
Self::MiniscriptPsbt(err) => write!(f, "Miniscript PSBT error: {}", err),
|
Self::MiniscriptPsbt(err) => write!(f, "Miniscript PSBT error: {}", err),
|
||||||
Self::External(err) => write!(f, "{}", err),
|
Self::External(err) => write!(f, "{}", err),
|
||||||
}
|
}
|
||||||
@@ -453,93 +453,88 @@ impl InputSigner for SignerWrapper<PrivateKey> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let pubkey = PublicKey::from_private_key(secp, self);
|
let pubkey = PublicKey::from_private_key(secp, self);
|
||||||
let x_only_pubkey = XOnlyPublicKey::from(pubkey.inner);
|
|
||||||
|
|
||||||
if let SignerContext::Tap { is_internal_key } = self.ctx {
|
match self.ctx {
|
||||||
if let Some(psbt_internal_key) = psbt.inputs[input_index].tap_internal_key {
|
SignerContext::Tap { is_internal_key } => {
|
||||||
if is_internal_key
|
let x_only_pubkey = XOnlyPublicKey::from(pubkey.inner);
|
||||||
&& psbt.inputs[input_index].tap_key_sig.is_none()
|
|
||||||
&& sign_options.sign_with_tap_internal_key
|
if let Some(psbt_internal_key) = psbt.inputs[input_index].tap_internal_key {
|
||||||
&& x_only_pubkey == psbt_internal_key
|
if is_internal_key
|
||||||
|
&& psbt.inputs[input_index].tap_key_sig.is_none()
|
||||||
|
&& sign_options.sign_with_tap_internal_key
|
||||||
|
&& x_only_pubkey == psbt_internal_key
|
||||||
|
{
|
||||||
|
let (sighash, sighash_type) = compute_tap_sighash(psbt, input_index, None)?;
|
||||||
|
sign_psbt_schnorr(
|
||||||
|
&self.inner,
|
||||||
|
x_only_pubkey,
|
||||||
|
None,
|
||||||
|
&mut psbt.inputs[input_index],
|
||||||
|
sighash,
|
||||||
|
sighash_type,
|
||||||
|
secp,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some((leaf_hashes, _)) =
|
||||||
|
psbt.inputs[input_index].tap_key_origins.get(&x_only_pubkey)
|
||||||
{
|
{
|
||||||
let (hash, hash_ty) = Tap::sighash(psbt, input_index, None)?;
|
let leaf_hashes = leaf_hashes
|
||||||
sign_psbt_schnorr(
|
.iter()
|
||||||
&self.inner,
|
.filter(|lh| {
|
||||||
x_only_pubkey,
|
// Removing the leaves we shouldn't sign for
|
||||||
None,
|
let should_sign = match &sign_options.tap_leaves_options {
|
||||||
&mut psbt.inputs[input_index],
|
TapLeavesOptions::All => true,
|
||||||
hash,
|
TapLeavesOptions::Include(v) => v.contains(lh),
|
||||||
hash_ty,
|
TapLeavesOptions::Exclude(v) => !v.contains(lh),
|
||||||
secp,
|
TapLeavesOptions::None => false,
|
||||||
);
|
};
|
||||||
|
// Filtering out the leaves without our key
|
||||||
|
should_sign
|
||||||
|
&& !psbt.inputs[input_index]
|
||||||
|
.tap_script_sigs
|
||||||
|
.contains_key(&(x_only_pubkey, **lh))
|
||||||
|
})
|
||||||
|
.cloned()
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
for lh in leaf_hashes {
|
||||||
|
let (sighash, sighash_type) =
|
||||||
|
compute_tap_sighash(psbt, input_index, Some(lh))?;
|
||||||
|
sign_psbt_schnorr(
|
||||||
|
&self.inner,
|
||||||
|
x_only_pubkey,
|
||||||
|
Some(lh),
|
||||||
|
&mut psbt.inputs[input_index],
|
||||||
|
sighash,
|
||||||
|
sighash_type,
|
||||||
|
secp,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
SignerContext::Segwitv0 | SignerContext::Legacy => {
|
||||||
if let Some((leaf_hashes, _)) =
|
if psbt.inputs[input_index].partial_sigs.contains_key(&pubkey) {
|
||||||
psbt.inputs[input_index].tap_key_origins.get(&x_only_pubkey)
|
return Ok(());
|
||||||
{
|
|
||||||
let leaf_hashes = leaf_hashes
|
|
||||||
.iter()
|
|
||||||
.filter(|lh| {
|
|
||||||
// Removing the leaves we shouldn't sign for
|
|
||||||
let should_sign = match &sign_options.tap_leaves_options {
|
|
||||||
TapLeavesOptions::All => true,
|
|
||||||
TapLeavesOptions::Include(v) => v.contains(lh),
|
|
||||||
TapLeavesOptions::Exclude(v) => !v.contains(lh),
|
|
||||||
TapLeavesOptions::None => false,
|
|
||||||
};
|
|
||||||
// Filtering out the leaves without our key
|
|
||||||
should_sign
|
|
||||||
&& !psbt.inputs[input_index]
|
|
||||||
.tap_script_sigs
|
|
||||||
.contains_key(&(x_only_pubkey, **lh))
|
|
||||||
})
|
|
||||||
.cloned()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
for lh in leaf_hashes {
|
|
||||||
let (hash, hash_ty) = Tap::sighash(psbt, input_index, Some(lh))?;
|
|
||||||
sign_psbt_schnorr(
|
|
||||||
&self.inner,
|
|
||||||
x_only_pubkey,
|
|
||||||
Some(lh),
|
|
||||||
&mut psbt.inputs[input_index],
|
|
||||||
hash,
|
|
||||||
hash_ty,
|
|
||||||
secp,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(());
|
let mut sighasher = sighash::SighashCache::new(psbt.unsigned_tx.clone());
|
||||||
|
let (msg, sighash_type) = psbt
|
||||||
|
.sighash_ecdsa(input_index, &mut sighasher)
|
||||||
|
.map_err(SignerError::Psbt)?;
|
||||||
|
|
||||||
|
sign_psbt_ecdsa(
|
||||||
|
&self.inner,
|
||||||
|
pubkey,
|
||||||
|
&mut psbt.inputs[input_index],
|
||||||
|
&msg,
|
||||||
|
sighash_type,
|
||||||
|
secp,
|
||||||
|
sign_options.allow_grinding,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if psbt.inputs[input_index].partial_sigs.contains_key(&pubkey) {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let (hash, hash_ty) = match self.ctx {
|
|
||||||
SignerContext::Segwitv0 => {
|
|
||||||
let (h, t) = Segwitv0::sighash(psbt, input_index, ())?;
|
|
||||||
let h = h.to_raw_hash();
|
|
||||||
(h, t)
|
|
||||||
}
|
|
||||||
SignerContext::Legacy => {
|
|
||||||
let (h, t) = Legacy::sighash(psbt, input_index, ())?;
|
|
||||||
let h = h.to_raw_hash();
|
|
||||||
(h, t)
|
|
||||||
}
|
|
||||||
_ => return Ok(()), // handled above
|
|
||||||
};
|
|
||||||
sign_psbt_ecdsa(
|
|
||||||
&self.inner,
|
|
||||||
pubkey,
|
|
||||||
&mut psbt.inputs[input_index],
|
|
||||||
hash,
|
|
||||||
hash_ty,
|
|
||||||
secp,
|
|
||||||
sign_options.allow_grinding,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -548,21 +543,23 @@ fn sign_psbt_ecdsa(
|
|||||||
secret_key: &secp256k1::SecretKey,
|
secret_key: &secp256k1::SecretKey,
|
||||||
pubkey: PublicKey,
|
pubkey: PublicKey,
|
||||||
psbt_input: &mut psbt::Input,
|
psbt_input: &mut psbt::Input,
|
||||||
hash: impl bitcoin::hashes::Hash + bitcoin::secp256k1::ThirtyTwoByteHash,
|
msg: &Message,
|
||||||
hash_ty: EcdsaSighashType,
|
sighash_type: EcdsaSighashType,
|
||||||
secp: &SecpCtx,
|
secp: &SecpCtx,
|
||||||
allow_grinding: bool,
|
allow_grinding: bool,
|
||||||
) {
|
) {
|
||||||
let msg = &Message::from(hash);
|
let signature = if allow_grinding {
|
||||||
let sig = if allow_grinding {
|
|
||||||
secp.sign_ecdsa_low_r(msg, secret_key)
|
secp.sign_ecdsa_low_r(msg, secret_key)
|
||||||
} else {
|
} else {
|
||||||
secp.sign_ecdsa(msg, secret_key)
|
secp.sign_ecdsa(msg, secret_key)
|
||||||
};
|
};
|
||||||
secp.verify_ecdsa(msg, &sig, &pubkey.inner)
|
secp.verify_ecdsa(msg, &signature, &pubkey.inner)
|
||||||
.expect("invalid or corrupted ecdsa signature");
|
.expect("invalid or corrupted ecdsa signature");
|
||||||
|
|
||||||
let final_signature = ecdsa::Signature { sig, hash_ty };
|
let final_signature = ecdsa::Signature {
|
||||||
|
signature,
|
||||||
|
sighash_type,
|
||||||
|
};
|
||||||
psbt_input.partial_sigs.insert(pubkey, final_signature);
|
psbt_input.partial_sigs.insert(pubkey, final_signature);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -572,8 +569,8 @@ fn sign_psbt_schnorr(
|
|||||||
pubkey: XOnlyPublicKey,
|
pubkey: XOnlyPublicKey,
|
||||||
leaf_hash: Option<taproot::TapLeafHash>,
|
leaf_hash: Option<taproot::TapLeafHash>,
|
||||||
psbt_input: &mut psbt::Input,
|
psbt_input: &mut psbt::Input,
|
||||||
hash: TapSighash,
|
sighash: TapSighash,
|
||||||
hash_ty: TapSighashType,
|
sighash_type: TapSighashType,
|
||||||
secp: &SecpCtx,
|
secp: &SecpCtx,
|
||||||
) {
|
) {
|
||||||
let keypair = secp256k1::Keypair::from_seckey_slice(secp, secret_key.as_ref()).unwrap();
|
let keypair = secp256k1::Keypair::from_seckey_slice(secp, secret_key.as_ref()).unwrap();
|
||||||
@@ -584,12 +581,15 @@ fn sign_psbt_schnorr(
|
|||||||
Some(_) => keypair, // no tweak for script spend
|
Some(_) => keypair, // no tweak for script spend
|
||||||
};
|
};
|
||||||
|
|
||||||
let msg = &Message::from(hash);
|
let msg = &Message::from(sighash);
|
||||||
let sig = secp.sign_schnorr(msg, &keypair);
|
let signature = secp.sign_schnorr_no_aux_rand(msg, &keypair);
|
||||||
secp.verify_schnorr(&sig, msg, &XOnlyPublicKey::from_keypair(&keypair).0)
|
secp.verify_schnorr(&signature, msg, &XOnlyPublicKey::from_keypair(&keypair).0)
|
||||||
.expect("invalid or corrupted schnorr signature");
|
.expect("invalid or corrupted schnorr signature");
|
||||||
|
|
||||||
let final_signature = taproot::Signature { sig, hash_ty };
|
let final_signature = taproot::Signature {
|
||||||
|
signature,
|
||||||
|
sighash_type,
|
||||||
|
};
|
||||||
|
|
||||||
if let Some(lh) = leaf_hash {
|
if let Some(lh) = leaf_hash {
|
||||||
psbt_input
|
psbt_input
|
||||||
@@ -776,21 +776,6 @@ pub struct SignOptions {
|
|||||||
/// Defaults to `false` which will only allow signing using `SIGHASH_ALL`.
|
/// Defaults to `false` which will only allow signing using `SIGHASH_ALL`.
|
||||||
pub allow_all_sighashes: bool,
|
pub allow_all_sighashes: bool,
|
||||||
|
|
||||||
/// Whether to remove partial signatures from the PSBT inputs while finalizing PSBT.
|
|
||||||
///
|
|
||||||
/// Defaults to `true` which will remove partial signatures during finalization.
|
|
||||||
pub remove_partial_sigs: bool,
|
|
||||||
|
|
||||||
/// Whether to remove taproot specific fields from the PSBT on finalization.
|
|
||||||
///
|
|
||||||
/// For inputs this includes the taproot internal key, merkle root, and individual
|
|
||||||
/// scripts and signatures. For both inputs and outputs it includes key origin info.
|
|
||||||
///
|
|
||||||
/// Defaults to `true` which will remove all of the above mentioned fields when finalizing.
|
|
||||||
///
|
|
||||||
/// See [`BIP371`](https://github.com/bitcoin/bips/blob/master/bip-0371.mediawiki) for details.
|
|
||||||
pub remove_taproot_extras: bool,
|
|
||||||
|
|
||||||
/// Whether to try finalizing the PSBT after the inputs are signed.
|
/// Whether to try finalizing the PSBT after the inputs are signed.
|
||||||
///
|
///
|
||||||
/// Defaults to `true` which will try finalizing PSBT after inputs are signed.
|
/// Defaults to `true` which will try finalizing PSBT after inputs are signed.
|
||||||
@@ -835,8 +820,6 @@ impl Default for SignOptions {
|
|||||||
trust_witness_utxo: false,
|
trust_witness_utxo: false,
|
||||||
assume_height: None,
|
assume_height: None,
|
||||||
allow_all_sighashes: false,
|
allow_all_sighashes: false,
|
||||||
remove_partial_sigs: true,
|
|
||||||
remove_taproot_extras: true,
|
|
||||||
try_finalize: true,
|
try_finalize: true,
|
||||||
tap_leaves_options: TapLeavesOptions::default(),
|
tap_leaves_options: TapLeavesOptions::default(),
|
||||||
sign_with_tap_internal_key: true,
|
sign_with_tap_internal_key: true,
|
||||||
@@ -845,198 +828,53 @@ impl Default for SignOptions {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait ComputeSighash {
|
/// Computes the taproot sighash.
|
||||||
type Extra;
|
fn compute_tap_sighash(
|
||||||
type Sighash;
|
psbt: &Psbt,
|
||||||
type SighashType;
|
input_index: usize,
|
||||||
|
extra: Option<taproot::TapLeafHash>,
|
||||||
fn sighash(
|
) -> Result<(sighash::TapSighash, TapSighashType), SignerError> {
|
||||||
psbt: &Psbt,
|
if input_index >= psbt.inputs.len() || input_index >= psbt.unsigned_tx.input.len() {
|
||||||
input_index: usize,
|
return Err(SignerError::InputIndexOutOfRange);
|
||||||
extra: Self::Extra,
|
|
||||||
) -> Result<(Self::Sighash, Self::SighashType), SignerError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ComputeSighash for Legacy {
|
|
||||||
type Extra = ();
|
|
||||||
type Sighash = sighash::LegacySighash;
|
|
||||||
type SighashType = EcdsaSighashType;
|
|
||||||
|
|
||||||
fn sighash(
|
|
||||||
psbt: &Psbt,
|
|
||||||
input_index: usize,
|
|
||||||
_extra: (),
|
|
||||||
) -> Result<(Self::Sighash, Self::SighashType), SignerError> {
|
|
||||||
if input_index >= psbt.inputs.len() || input_index >= psbt.unsigned_tx.input.len() {
|
|
||||||
return Err(SignerError::InputIndexOutOfRange);
|
|
||||||
}
|
|
||||||
|
|
||||||
let psbt_input = &psbt.inputs[input_index];
|
|
||||||
let tx_input = &psbt.unsigned_tx.input[input_index];
|
|
||||||
|
|
||||||
let sighash = psbt_input
|
|
||||||
.sighash_type
|
|
||||||
.unwrap_or_else(|| EcdsaSighashType::All.into())
|
|
||||||
.ecdsa_hash_ty()
|
|
||||||
.map_err(|_| SignerError::InvalidSighash)?;
|
|
||||||
let script = match psbt_input.redeem_script {
|
|
||||||
Some(ref redeem_script) => redeem_script.clone(),
|
|
||||||
None => {
|
|
||||||
let non_witness_utxo = psbt_input
|
|
||||||
.non_witness_utxo
|
|
||||||
.as_ref()
|
|
||||||
.ok_or(SignerError::MissingNonWitnessUtxo)?;
|
|
||||||
let prev_out = non_witness_utxo
|
|
||||||
.output
|
|
||||||
.get(tx_input.previous_output.vout as usize)
|
|
||||||
.ok_or(SignerError::InvalidNonWitnessUtxo)?;
|
|
||||||
|
|
||||||
prev_out.script_pubkey.clone()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
sighash::SighashCache::new(&psbt.unsigned_tx).legacy_signature_hash(
|
|
||||||
input_index,
|
|
||||||
&script,
|
|
||||||
sighash.to_u32(),
|
|
||||||
)?,
|
|
||||||
sighash,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl ComputeSighash for Segwitv0 {
|
let psbt_input = &psbt.inputs[input_index];
|
||||||
type Extra = ();
|
|
||||||
type Sighash = sighash::SegwitV0Sighash;
|
|
||||||
type SighashType = EcdsaSighashType;
|
|
||||||
|
|
||||||
fn sighash(
|
let sighash_type = psbt_input
|
||||||
psbt: &Psbt,
|
.sighash_type
|
||||||
input_index: usize,
|
.unwrap_or_else(|| TapSighashType::Default.into())
|
||||||
_extra: (),
|
.taproot_hash_ty()
|
||||||
) -> Result<(Self::Sighash, Self::SighashType), SignerError> {
|
.map_err(|_| SignerError::InvalidSighash)?;
|
||||||
if input_index >= psbt.inputs.len() || input_index >= psbt.unsigned_tx.input.len() {
|
let witness_utxos = (0..psbt.inputs.len())
|
||||||
return Err(SignerError::InputIndexOutOfRange);
|
.map(|i| psbt.get_utxo_for(i))
|
||||||
}
|
.collect::<Vec<_>>();
|
||||||
|
let mut all_witness_utxos = vec![];
|
||||||
|
|
||||||
let psbt_input = &psbt.inputs[input_index];
|
let mut cache = sighash::SighashCache::new(&psbt.unsigned_tx);
|
||||||
let tx_input = &psbt.unsigned_tx.input[input_index];
|
let is_anyone_can_pay = psbt::PsbtSighashType::from(sighash_type).to_u32() & 0x80 != 0;
|
||||||
|
let prevouts = if is_anyone_can_pay {
|
||||||
|
sighash::Prevouts::One(
|
||||||
|
input_index,
|
||||||
|
witness_utxos[input_index]
|
||||||
|
.as_ref()
|
||||||
|
.ok_or(SignerError::MissingWitnessUtxo)?,
|
||||||
|
)
|
||||||
|
} else if witness_utxos.iter().all(Option::is_some) {
|
||||||
|
all_witness_utxos.extend(witness_utxos.iter().filter_map(|x| x.as_ref()));
|
||||||
|
sighash::Prevouts::All(&all_witness_utxos)
|
||||||
|
} else {
|
||||||
|
return Err(SignerError::MissingWitnessUtxo);
|
||||||
|
};
|
||||||
|
|
||||||
let sighash_type = psbt_input
|
// Assume no OP_CODESEPARATOR
|
||||||
.sighash_type
|
let extra = extra.map(|leaf_hash| (leaf_hash, 0xFFFFFFFF));
|
||||||
.unwrap_or_else(|| EcdsaSighashType::All.into())
|
|
||||||
.ecdsa_hash_ty()
|
|
||||||
.map_err(|_| SignerError::InvalidSighash)?;
|
|
||||||
|
|
||||||
// Always try first with the non-witness utxo
|
Ok((
|
||||||
let utxo = if let Some(prev_tx) = &psbt_input.non_witness_utxo {
|
cache
|
||||||
// Check the provided prev-tx
|
.taproot_signature_hash(input_index, &prevouts, None, extra, sighash_type)
|
||||||
if prev_tx.txid() != tx_input.previous_output.txid {
|
.map_err(SignerError::SighashTaproot)?,
|
||||||
return Err(SignerError::InvalidNonWitnessUtxo);
|
sighash_type,
|
||||||
}
|
))
|
||||||
|
|
||||||
// The output should be present, if it's missing the `non_witness_utxo` is invalid
|
|
||||||
prev_tx
|
|
||||||
.output
|
|
||||||
.get(tx_input.previous_output.vout as usize)
|
|
||||||
.ok_or(SignerError::InvalidNonWitnessUtxo)?
|
|
||||||
} else if let Some(witness_utxo) = &psbt_input.witness_utxo {
|
|
||||||
// Fallback to the witness_utxo. If we aren't allowed to use it, signing should fail
|
|
||||||
// before we get to this point
|
|
||||||
witness_utxo
|
|
||||||
} else {
|
|
||||||
// Nothing has been provided
|
|
||||||
return Err(SignerError::MissingNonWitnessUtxo);
|
|
||||||
};
|
|
||||||
let value = utxo.value;
|
|
||||||
|
|
||||||
let mut sighasher = sighash::SighashCache::new(&psbt.unsigned_tx);
|
|
||||||
|
|
||||||
let sighash = match psbt_input.witness_script {
|
|
||||||
Some(ref witness_script) => {
|
|
||||||
sighasher.p2wsh_signature_hash(input_index, witness_script, value, sighash_type)?
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
if utxo.script_pubkey.is_p2wpkh() {
|
|
||||||
sighasher.p2wpkh_signature_hash(
|
|
||||||
input_index,
|
|
||||||
&utxo.script_pubkey,
|
|
||||||
value,
|
|
||||||
sighash_type,
|
|
||||||
)?
|
|
||||||
} else if psbt_input
|
|
||||||
.redeem_script
|
|
||||||
.as_ref()
|
|
||||||
.map(|s| s.is_p2wpkh())
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
let script_pubkey = psbt_input.redeem_script.as_ref().unwrap();
|
|
||||||
sighasher.p2wpkh_signature_hash(
|
|
||||||
input_index,
|
|
||||||
script_pubkey,
|
|
||||||
value,
|
|
||||||
sighash_type,
|
|
||||||
)?
|
|
||||||
} else {
|
|
||||||
return Err(SignerError::MissingWitnessScript);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok((sighash, sighash_type))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ComputeSighash for Tap {
|
|
||||||
type Extra = Option<taproot::TapLeafHash>;
|
|
||||||
type Sighash = TapSighash;
|
|
||||||
type SighashType = TapSighashType;
|
|
||||||
|
|
||||||
fn sighash(
|
|
||||||
psbt: &Psbt,
|
|
||||||
input_index: usize,
|
|
||||||
extra: Self::Extra,
|
|
||||||
) -> Result<(Self::Sighash, TapSighashType), SignerError> {
|
|
||||||
if input_index >= psbt.inputs.len() || input_index >= psbt.unsigned_tx.input.len() {
|
|
||||||
return Err(SignerError::InputIndexOutOfRange);
|
|
||||||
}
|
|
||||||
|
|
||||||
let psbt_input = &psbt.inputs[input_index];
|
|
||||||
|
|
||||||
let sighash_type = psbt_input
|
|
||||||
.sighash_type
|
|
||||||
.unwrap_or_else(|| TapSighashType::Default.into())
|
|
||||||
.taproot_hash_ty()
|
|
||||||
.map_err(|_| SignerError::InvalidSighash)?;
|
|
||||||
let witness_utxos = (0..psbt.inputs.len())
|
|
||||||
.map(|i| psbt.get_utxo_for(i))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
let mut all_witness_utxos = vec![];
|
|
||||||
|
|
||||||
let mut cache = sighash::SighashCache::new(&psbt.unsigned_tx);
|
|
||||||
let is_anyone_can_pay = psbt::PsbtSighashType::from(sighash_type).to_u32() & 0x80 != 0;
|
|
||||||
let prevouts = if is_anyone_can_pay {
|
|
||||||
sighash::Prevouts::One(
|
|
||||||
input_index,
|
|
||||||
witness_utxos[input_index]
|
|
||||||
.as_ref()
|
|
||||||
.ok_or(SignerError::MissingWitnessUtxo)?,
|
|
||||||
)
|
|
||||||
} else if witness_utxos.iter().all(Option::is_some) {
|
|
||||||
all_witness_utxos.extend(witness_utxos.iter().filter_map(|x| x.as_ref()));
|
|
||||||
sighash::Prevouts::All(&all_witness_utxos)
|
|
||||||
} else {
|
|
||||||
return Err(SignerError::MissingWitnessUtxo);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Assume no OP_CODESEPARATOR
|
|
||||||
let extra = extra.map(|leaf_hash| (leaf_hash, 0xFFFFFFFF));
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
cache.taproot_signature_hash(input_index, &prevouts, None, extra, sighash_type)?,
|
|
||||||
sighash_type,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialOrd for SignersContainerKey {
|
impl PartialOrd for SignersContainerKey {
|
||||||
@@ -16,11 +16,9 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//! # use std::str::FromStr;
|
//! # use std::str::FromStr;
|
||||||
//! # use bitcoin::*;
|
//! # use bitcoin::*;
|
||||||
//! # use bdk::*;
|
//! # use bdk_wallet::*;
|
||||||
//! # use bdk::wallet::ChangeSet;
|
//! # use bdk_wallet::ChangeSet;
|
||||||
//! # use bdk::wallet::error::CreateTxError;
|
//! # use bdk_wallet::error::CreateTxError;
|
||||||
//! # use bdk::wallet::tx_builder::CreateTx;
|
|
||||||
//! # use bdk_persist::PersistBackend;
|
|
||||||
//! # use anyhow::Error;
|
//! # use anyhow::Error;
|
||||||
//! # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap().assume_checked();
|
//! # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap().assume_checked();
|
||||||
//! # let mut wallet = doctest_wallet!();
|
//! # let mut wallet = doctest_wallet!();
|
||||||
@@ -43,31 +41,23 @@
|
|||||||
use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec};
|
use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec};
|
||||||
use core::cell::RefCell;
|
use core::cell::RefCell;
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
use core::marker::PhantomData;
|
|
||||||
|
use alloc::sync::Arc;
|
||||||
|
|
||||||
use bitcoin::psbt::{self, Psbt};
|
use bitcoin::psbt::{self, Psbt};
|
||||||
use bitcoin::script::PushBytes;
|
use bitcoin::script::PushBytes;
|
||||||
use bitcoin::{absolute, Amount, FeeRate, OutPoint, ScriptBuf, Sequence, Transaction, Txid};
|
use bitcoin::{
|
||||||
|
absolute, Amount, FeeRate, OutPoint, ScriptBuf, Sequence, Transaction, TxIn, TxOut, Txid,
|
||||||
|
Weight,
|
||||||
|
};
|
||||||
|
use rand_core::RngCore;
|
||||||
|
|
||||||
use super::coin_selection::{CoinSelectionAlgorithm, DefaultCoinSelectionAlgorithm};
|
use super::coin_selection::CoinSelectionAlgorithm;
|
||||||
|
use super::utils::shuffle_slice;
|
||||||
use super::{CreateTxError, Wallet};
|
use super::{CreateTxError, Wallet};
|
||||||
use crate::collections::{BTreeMap, HashSet};
|
use crate::collections::{BTreeMap, HashSet};
|
||||||
use crate::{KeychainKind, LocalOutput, Utxo, WeightedUtxo};
|
use crate::{KeychainKind, LocalOutput, Utxo, WeightedUtxo};
|
||||||
|
|
||||||
/// Context in which the [`TxBuilder`] is valid
|
|
||||||
pub trait TxBuilderContext: core::fmt::Debug + Default + Clone {}
|
|
||||||
|
|
||||||
/// Marker type to indicate the [`TxBuilder`] is being used to create a new transaction (as opposed
|
|
||||||
/// to bumping the fee of an existing one).
|
|
||||||
#[derive(Debug, Default, Clone)]
|
|
||||||
pub struct CreateTx;
|
|
||||||
impl TxBuilderContext for CreateTx {}
|
|
||||||
|
|
||||||
/// Marker type to indicate the [`TxBuilder`] is being used to bump the fee of an existing transaction.
|
|
||||||
#[derive(Debug, Default, Clone)]
|
|
||||||
pub struct BumpFee;
|
|
||||||
impl TxBuilderContext for BumpFee {}
|
|
||||||
|
|
||||||
/// A transaction builder
|
/// A transaction builder
|
||||||
///
|
///
|
||||||
/// A `TxBuilder` is created by calling [`build_tx`] or [`build_fee_bump`] on a wallet. After
|
/// A `TxBuilder` is created by calling [`build_tx`] or [`build_fee_bump`] on a wallet. After
|
||||||
@@ -78,13 +68,12 @@ impl TxBuilderContext for BumpFee {}
|
|||||||
/// as in the following example:
|
/// as in the following example:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use bdk::*;
|
/// # use bdk_wallet::*;
|
||||||
/// # use bdk::wallet::tx_builder::*;
|
/// # use bdk_wallet::tx_builder::*;
|
||||||
/// # use bitcoin::*;
|
/// # use bitcoin::*;
|
||||||
/// # use core::str::FromStr;
|
/// # use core::str::FromStr;
|
||||||
/// # use bdk::wallet::ChangeSet;
|
/// # use bdk_wallet::ChangeSet;
|
||||||
/// # use bdk::wallet::error::CreateTxError;
|
/// # use bdk_wallet::error::CreateTxError;
|
||||||
/// # use bdk_persist::PersistBackend;
|
|
||||||
/// # use anyhow::Error;
|
/// # use anyhow::Error;
|
||||||
/// # let mut wallet = doctest_wallet!();
|
/// # let mut wallet = doctest_wallet!();
|
||||||
/// # let addr1 = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap().assume_checked();
|
/// # let addr1 = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap().assume_checked();
|
||||||
@@ -123,11 +112,10 @@ impl TxBuilderContext for BumpFee {}
|
|||||||
/// [`finish`]: Self::finish
|
/// [`finish`]: Self::finish
|
||||||
/// [`coin_selection`]: Self::coin_selection
|
/// [`coin_selection`]: Self::coin_selection
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct TxBuilder<'a, Cs, Ctx> {
|
pub struct TxBuilder<'a, Cs> {
|
||||||
pub(crate) wallet: Rc<RefCell<&'a mut Wallet>>,
|
pub(crate) wallet: Rc<RefCell<&'a mut Wallet>>,
|
||||||
pub(crate) params: TxParams,
|
pub(crate) params: TxParams,
|
||||||
pub(crate) coin_selection: Cs,
|
pub(crate) coin_selection: Cs,
|
||||||
pub(crate) phantom: PhantomData<Ctx>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The parameters for transaction creation sans coin selection algorithm.
|
/// The parameters for transaction creation sans coin selection algorithm.
|
||||||
@@ -175,19 +163,18 @@ impl Default for FeePolicy {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, Cs: Clone, Ctx> Clone for TxBuilder<'a, Cs, Ctx> {
|
impl<'a, Cs: Clone> Clone for TxBuilder<'a, Cs> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
TxBuilder {
|
TxBuilder {
|
||||||
wallet: self.wallet.clone(),
|
wallet: self.wallet.clone(),
|
||||||
params: self.params.clone(),
|
params: self.params.clone(),
|
||||||
coin_selection: self.coin_selection.clone(),
|
coin_selection: self.coin_selection.clone(),
|
||||||
phantom: PhantomData,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// methods supported by both contexts, for any CoinSelectionAlgorithm
|
// Methods supported for any CoinSelectionAlgorithm.
|
||||||
impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
impl<'a, Cs> TxBuilder<'a, Cs> {
|
||||||
/// Set a custom fee rate.
|
/// Set a custom fee rate.
|
||||||
///
|
///
|
||||||
/// This method sets the mining fee paid by the transaction as a rate on its size.
|
/// This method sets the mining fee paid by the transaction as a rate on its size.
|
||||||
@@ -204,16 +191,16 @@ impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Set an absolute fee
|
/// Set an absolute fee
|
||||||
/// The fee_absolute method refers to the absolute transaction fee in satoshis (sats).
|
/// The fee_absolute method refers to the absolute transaction fee in [`Amount`].
|
||||||
/// If anyone sets both the fee_absolute method and the fee_rate method,
|
/// If anyone sets both the `fee_absolute` method and the `fee_rate` method,
|
||||||
/// the FeePolicy enum will be set by whichever method was called last,
|
/// the `FeePolicy` enum will be set by whichever method was called last,
|
||||||
/// as the FeeRate and FeeAmount are mutually exclusive.
|
/// as the [`FeeRate`] and `FeeAmount` are mutually exclusive.
|
||||||
///
|
///
|
||||||
/// Note that this is really a minimum absolute fee -- it's possible to
|
/// Note that this is really a minimum absolute fee -- it's possible to
|
||||||
/// overshoot it slightly since adding a change output to drain the remaining
|
/// overshoot it slightly since adding a change output to drain the remaining
|
||||||
/// excess might not be viable.
|
/// excess might not be viable.
|
||||||
pub fn fee_absolute(&mut self, fee_amount: u64) -> &mut Self {
|
pub fn fee_absolute(&mut self, fee_amount: Amount) -> &mut Self {
|
||||||
self.params.fee_policy = Some(FeePolicy::FeeAmount(fee_amount));
|
self.params.fee_policy = Some(FeePolicy::FeeAmount(fee_amount.to_sat()));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -263,7 +250,7 @@ impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
|||||||
/// # use std::str::FromStr;
|
/// # use std::str::FromStr;
|
||||||
/// # use std::collections::BTreeMap;
|
/// # use std::collections::BTreeMap;
|
||||||
/// # use bitcoin::*;
|
/// # use bitcoin::*;
|
||||||
/// # use bdk::*;
|
/// # use bdk_wallet::*;
|
||||||
/// # let to_address =
|
/// # let to_address =
|
||||||
/// Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt")
|
/// Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt")
|
||||||
/// .unwrap()
|
/// .unwrap()
|
||||||
@@ -312,7 +299,7 @@ impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
|||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
for utxo in utxos {
|
for utxo in utxos {
|
||||||
let descriptor = wallet.get_descriptor_for_keychain(utxo.keychain);
|
let descriptor = wallet.public_descriptor(utxo.keychain);
|
||||||
let satisfaction_weight = descriptor.max_weight_to_satisfy().unwrap();
|
let satisfaction_weight = descriptor.max_weight_to_satisfy().unwrap();
|
||||||
self.params.utxos.push(WeightedUtxo {
|
self.params.utxos.push(WeightedUtxo {
|
||||||
satisfaction_weight,
|
satisfaction_weight,
|
||||||
@@ -382,7 +369,7 @@ impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
outpoint: OutPoint,
|
outpoint: OutPoint,
|
||||||
psbt_input: psbt::Input,
|
psbt_input: psbt::Input,
|
||||||
satisfaction_weight: usize,
|
satisfaction_weight: Weight,
|
||||||
) -> Result<&mut Self, AddForeignUtxoError> {
|
) -> Result<&mut Self, AddForeignUtxoError> {
|
||||||
self.add_foreign_utxo_with_sequence(
|
self.add_foreign_utxo_with_sequence(
|
||||||
outpoint,
|
outpoint,
|
||||||
@@ -397,15 +384,15 @@ impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
outpoint: OutPoint,
|
outpoint: OutPoint,
|
||||||
psbt_input: psbt::Input,
|
psbt_input: psbt::Input,
|
||||||
satisfaction_weight: usize,
|
satisfaction_weight: Weight,
|
||||||
sequence: Sequence,
|
sequence: Sequence,
|
||||||
) -> Result<&mut Self, AddForeignUtxoError> {
|
) -> Result<&mut Self, AddForeignUtxoError> {
|
||||||
if psbt_input.witness_utxo.is_none() {
|
if psbt_input.witness_utxo.is_none() {
|
||||||
match psbt_input.non_witness_utxo.as_ref() {
|
match psbt_input.non_witness_utxo.as_ref() {
|
||||||
Some(tx) => {
|
Some(tx) => {
|
||||||
if tx.txid() != outpoint.txid {
|
if tx.compute_txid() != outpoint.txid {
|
||||||
return Err(AddForeignUtxoError::InvalidTxid {
|
return Err(AddForeignUtxoError::InvalidTxid {
|
||||||
input_txid: tx.txid(),
|
input_txid: tx.compute_txid(),
|
||||||
foreign_utxo: outpoint,
|
foreign_utxo: outpoint,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -553,18 +540,14 @@ impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
|||||||
|
|
||||||
/// Choose the coin selection algorithm
|
/// Choose the coin selection algorithm
|
||||||
///
|
///
|
||||||
/// Overrides the [`DefaultCoinSelectionAlgorithm`].
|
/// Overrides the [`CoinSelectionAlgorithm`].
|
||||||
///
|
///
|
||||||
/// Note that this function consumes the builder and returns it so it is usually best to put this as the first call on the builder.
|
/// Note that this function consumes the builder and returns it so it is usually best to put this as the first call on the builder.
|
||||||
pub fn coin_selection<P: CoinSelectionAlgorithm>(
|
pub fn coin_selection<P: CoinSelectionAlgorithm>(self, coin_selection: P) -> TxBuilder<'a, P> {
|
||||||
self,
|
|
||||||
coin_selection: P,
|
|
||||||
) -> TxBuilder<'a, P, Ctx> {
|
|
||||||
TxBuilder {
|
TxBuilder {
|
||||||
wallet: self.wallet,
|
wallet: self.wallet,
|
||||||
params: self.params,
|
params: self.params,
|
||||||
coin_selection,
|
coin_selection,
|
||||||
phantom: PhantomData,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -592,7 +575,7 @@ impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
|||||||
///
|
///
|
||||||
/// This will be used to:
|
/// This will be used to:
|
||||||
/// 1. Set the nLockTime for preventing fee sniping.
|
/// 1. Set the nLockTime for preventing fee sniping.
|
||||||
/// **Note**: This will be ignored if you manually specify a nlocktime using [`TxBuilder::nlocktime`].
|
/// **Note**: This will be ignored if you manually specify a nlocktime using [`TxBuilder::nlocktime`].
|
||||||
/// 2. Decide whether coinbase outputs are mature or not. If the coinbase outputs are not
|
/// 2. Decide whether coinbase outputs are mature or not. If the coinbase outputs are not
|
||||||
/// mature at `current_height`, we ignore them in the coin selection.
|
/// mature at `current_height`, we ignore them in the coin selection.
|
||||||
/// If you want to create a transaction that spends immature coinbase inputs, manually
|
/// If you want to create a transaction that spends immature coinbase inputs, manually
|
||||||
@@ -612,18 +595,112 @@ impl<'a, Cs, Ctx> TxBuilder<'a, Cs, Ctx> {
|
|||||||
self.params.allow_dust = allow_dust;
|
self.params.allow_dust = allow_dust;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Replace the recipients already added with a new list
|
||||||
|
pub fn set_recipients(&mut self, recipients: Vec<(ScriptBuf, Amount)>) -> &mut Self {
|
||||||
|
self.params.recipients = recipients
|
||||||
|
.into_iter()
|
||||||
|
.map(|(script, amount)| (script, amount.to_sat()))
|
||||||
|
.collect();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a recipient to the internal list
|
||||||
|
pub fn add_recipient(&mut self, script_pubkey: ScriptBuf, amount: Amount) -> &mut Self {
|
||||||
|
self.params
|
||||||
|
.recipients
|
||||||
|
.push((script_pubkey, amount.to_sat()));
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add data as an output, using OP_RETURN
|
||||||
|
pub fn add_data<T: AsRef<PushBytes>>(&mut self, data: &T) -> &mut Self {
|
||||||
|
let script = ScriptBuf::new_op_return(data);
|
||||||
|
self.add_recipient(script, Amount::ZERO);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the address to *drain* excess coins to.
|
||||||
|
///
|
||||||
|
/// Usually, when there are excess coins they are sent to a change address generated by the
|
||||||
|
/// wallet. This option replaces the usual change address with an arbitrary `script_pubkey` of
|
||||||
|
/// your choosing. Just as with a change output, if the drain output is not needed (the excess
|
||||||
|
/// coins are too small) it will not be included in the resulting transaction. The only
|
||||||
|
/// difference is that it is valid to use `drain_to` without setting any ordinary recipients
|
||||||
|
/// with [`add_recipient`] (but it is perfectly fine to add recipients as well).
|
||||||
|
///
|
||||||
|
/// If you choose not to set any recipients, you should provide the utxos that the
|
||||||
|
/// transaction should spend via [`add_utxos`].
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// `drain_to` is very useful for draining all the coins in a wallet with [`drain_wallet`] to a
|
||||||
|
/// single address.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use std::str::FromStr;
|
||||||
|
/// # use bitcoin::*;
|
||||||
|
/// # use bdk_wallet::*;
|
||||||
|
/// # use bdk_wallet::ChangeSet;
|
||||||
|
/// # use bdk_wallet::error::CreateTxError;
|
||||||
|
/// # use anyhow::Error;
|
||||||
|
/// # let to_address =
|
||||||
|
/// Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt")
|
||||||
|
/// .unwrap()
|
||||||
|
/// .assume_checked();
|
||||||
|
/// # let mut wallet = doctest_wallet!();
|
||||||
|
/// let mut tx_builder = wallet.build_tx();
|
||||||
|
///
|
||||||
|
/// tx_builder
|
||||||
|
/// // Spend all outputs in this wallet.
|
||||||
|
/// .drain_wallet()
|
||||||
|
/// // Send the excess (which is all the coins minus the fee) to this address.
|
||||||
|
/// .drain_to(to_address.script_pubkey())
|
||||||
|
/// .fee_rate(FeeRate::from_sat_per_vb(5).expect("valid feerate"))
|
||||||
|
/// .enable_rbf();
|
||||||
|
/// let psbt = tx_builder.finish()?;
|
||||||
|
/// # Ok::<(), anyhow::Error>(())
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// [`add_recipient`]: Self::add_recipient
|
||||||
|
/// [`add_utxos`]: Self::add_utxos
|
||||||
|
/// [`drain_wallet`]: Self::drain_wallet
|
||||||
|
pub fn drain_to(&mut self, script_pubkey: ScriptBuf) -> &mut Self {
|
||||||
|
self.params.drain_to = Some(script_pubkey);
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, Cs: CoinSelectionAlgorithm, Ctx> TxBuilder<'a, Cs, Ctx> {
|
impl<'a, Cs: CoinSelectionAlgorithm> TxBuilder<'a, Cs> {
|
||||||
/// Finish building the transaction.
|
/// Finish building the transaction.
|
||||||
///
|
///
|
||||||
|
/// Uses the thread-local random number generator (rng).
|
||||||
|
///
|
||||||
/// Returns a new [`Psbt`] per [`BIP174`].
|
/// Returns a new [`Psbt`] per [`BIP174`].
|
||||||
///
|
///
|
||||||
/// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki
|
/// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki
|
||||||
|
///
|
||||||
|
/// **WARNING**: To avoid change address reuse you must persist the changes resulting from one
|
||||||
|
/// or more calls to this method before closing the wallet. See [`Wallet::reveal_next_address`].
|
||||||
|
#[cfg(feature = "std")]
|
||||||
pub fn finish(self) -> Result<Psbt, CreateTxError> {
|
pub fn finish(self) -> Result<Psbt, CreateTxError> {
|
||||||
|
self.finish_with_aux_rand(&mut bitcoin::key::rand::thread_rng())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finish building the transaction.
|
||||||
|
///
|
||||||
|
/// Uses a provided random number generator (rng).
|
||||||
|
///
|
||||||
|
/// Returns a new [`Psbt`] per [`BIP174`].
|
||||||
|
///
|
||||||
|
/// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki
|
||||||
|
///
|
||||||
|
/// **WARNING**: To avoid change address reuse you must persist the changes resulting from one
|
||||||
|
/// or more calls to this method before closing the wallet. See [`Wallet::reveal_next_address`].
|
||||||
|
pub fn finish_with_aux_rand(self, rng: &mut impl RngCore) -> Result<Psbt, CreateTxError> {
|
||||||
self.wallet
|
self.wallet
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
.create_tx(self.coin_selection, self.params)
|
.create_tx(self.coin_selection, self.params, rng)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -689,171 +766,60 @@ impl fmt::Display for AddForeignUtxoError {
|
|||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
impl std::error::Error for AddForeignUtxoError {}
|
impl std::error::Error for AddForeignUtxoError {}
|
||||||
|
|
||||||
#[derive(Debug)]
|
type TxSort<T> = dyn Fn(&T, &T) -> core::cmp::Ordering;
|
||||||
/// Error returned from [`TxBuilder::allow_shrinking`]
|
|
||||||
pub enum AllowShrinkingError {
|
|
||||||
/// Script/PubKey was not in the original transaction
|
|
||||||
MissingScriptPubKey(ScriptBuf),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for AllowShrinkingError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::MissingScriptPubKey(script_buf) => write!(
|
|
||||||
f,
|
|
||||||
"Script/PubKey was not in the original transaction: {}",
|
|
||||||
script_buf,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
impl std::error::Error for AllowShrinkingError {}
|
|
||||||
|
|
||||||
impl<'a, Cs: CoinSelectionAlgorithm> TxBuilder<'a, Cs, CreateTx> {
|
|
||||||
/// Replace the recipients already added with a new list
|
|
||||||
pub fn set_recipients(&mut self, recipients: Vec<(ScriptBuf, Amount)>) -> &mut Self {
|
|
||||||
self.params.recipients = recipients
|
|
||||||
.into_iter()
|
|
||||||
.map(|(script, amount)| (script, amount.to_sat()))
|
|
||||||
.collect();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a recipient to the internal list
|
|
||||||
pub fn add_recipient(&mut self, script_pubkey: ScriptBuf, amount: Amount) -> &mut Self {
|
|
||||||
self.params
|
|
||||||
.recipients
|
|
||||||
.push((script_pubkey, amount.to_sat()));
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add data as an output, using OP_RETURN
|
|
||||||
pub fn add_data<T: AsRef<PushBytes>>(&mut self, data: &T) -> &mut Self {
|
|
||||||
let script = ScriptBuf::new_op_return(data);
|
|
||||||
self.add_recipient(script, Amount::ZERO);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sets the address to *drain* excess coins to.
|
|
||||||
///
|
|
||||||
/// Usually, when there are excess coins they are sent to a change address generated by the
|
|
||||||
/// wallet. This option replaces the usual change address with an arbitrary `script_pubkey` of
|
|
||||||
/// your choosing. Just as with a change output, if the drain output is not needed (the excess
|
|
||||||
/// coins are too small) it will not be included in the resulting transaction. The only
|
|
||||||
/// difference is that it is valid to use `drain_to` without setting any ordinary recipients
|
|
||||||
/// with [`add_recipient`] (but it is perfectly fine to add recipients as well).
|
|
||||||
///
|
|
||||||
/// If you choose not to set any recipients, you should either provide the utxos that the
|
|
||||||
/// transaction should spend via [`add_utxos`], or set [`drain_wallet`] to spend all of them.
|
|
||||||
///
|
|
||||||
/// When bumping the fees of a transaction made with this option, you probably want to
|
|
||||||
/// use [`allow_shrinking`] to allow this output to be reduced to pay for the extra fees.
|
|
||||||
///
|
|
||||||
/// # Example
|
|
||||||
///
|
|
||||||
/// `drain_to` is very useful for draining all the coins in a wallet with [`drain_wallet`] to a
|
|
||||||
/// single address.
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// # use std::str::FromStr;
|
|
||||||
/// # use bitcoin::*;
|
|
||||||
/// # use bdk::*;
|
|
||||||
/// # use bdk::wallet::ChangeSet;
|
|
||||||
/// # use bdk::wallet::error::CreateTxError;
|
|
||||||
/// # use bdk::wallet::tx_builder::CreateTx;
|
|
||||||
/// # use bdk_persist::PersistBackend;
|
|
||||||
/// # use anyhow::Error;
|
|
||||||
/// # let to_address =
|
|
||||||
/// Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt")
|
|
||||||
/// .unwrap()
|
|
||||||
/// .assume_checked();
|
|
||||||
/// # let mut wallet = doctest_wallet!();
|
|
||||||
/// let mut tx_builder = wallet.build_tx();
|
|
||||||
///
|
|
||||||
/// tx_builder
|
|
||||||
/// // Spend all outputs in this wallet.
|
|
||||||
/// .drain_wallet()
|
|
||||||
/// // Send the excess (which is all the coins minus the fee) to this address.
|
|
||||||
/// .drain_to(to_address.script_pubkey())
|
|
||||||
/// .fee_rate(FeeRate::from_sat_per_vb(5).expect("valid feerate"))
|
|
||||||
/// .enable_rbf();
|
|
||||||
/// let psbt = tx_builder.finish()?;
|
|
||||||
/// # Ok::<(), anyhow::Error>(())
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// [`allow_shrinking`]: Self::allow_shrinking
|
|
||||||
/// [`add_recipient`]: Self::add_recipient
|
|
||||||
/// [`add_utxos`]: Self::add_utxos
|
|
||||||
/// [`drain_wallet`]: Self::drain_wallet
|
|
||||||
pub fn drain_to(&mut self, script_pubkey: ScriptBuf) -> &mut Self {
|
|
||||||
self.params.drain_to = Some(script_pubkey);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// methods supported only by bump_fee
|
|
||||||
impl<'a> TxBuilder<'a, DefaultCoinSelectionAlgorithm, BumpFee> {
|
|
||||||
/// Explicitly tells the wallet that it is allowed to reduce the amount of the output matching this
|
|
||||||
/// `script_pubkey` in order to bump the transaction fee. Without specifying this the wallet
|
|
||||||
/// will attempt to find a change output to shrink instead.
|
|
||||||
///
|
|
||||||
/// **Note** that the output may shrink to below the dust limit and therefore be removed. If it is
|
|
||||||
/// preserved then it is currently not guaranteed to be in the same position as it was
|
|
||||||
/// originally.
|
|
||||||
///
|
|
||||||
/// Returns an `Err` if `script_pubkey` can't be found among the recipients of the
|
|
||||||
/// transaction we are bumping.
|
|
||||||
pub fn allow_shrinking(
|
|
||||||
&mut self,
|
|
||||||
script_pubkey: ScriptBuf,
|
|
||||||
) -> Result<&mut Self, AllowShrinkingError> {
|
|
||||||
match self
|
|
||||||
.params
|
|
||||||
.recipients
|
|
||||||
.iter()
|
|
||||||
.position(|(recipient_script, _)| *recipient_script == script_pubkey)
|
|
||||||
{
|
|
||||||
Some(position) => {
|
|
||||||
self.params.recipients.remove(position);
|
|
||||||
self.params.drain_to = Some(script_pubkey);
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
None => Err(AllowShrinkingError::MissingScriptPubKey(script_pubkey)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ordering of the transaction's inputs and outputs
|
/// Ordering of the transaction's inputs and outputs
|
||||||
#[derive(Default, Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]
|
#[derive(Clone, Default)]
|
||||||
pub enum TxOrdering {
|
pub enum TxOrdering {
|
||||||
/// Randomized (default)
|
/// Randomized (default)
|
||||||
#[default]
|
#[default]
|
||||||
Shuffle,
|
Shuffle,
|
||||||
/// Unchanged
|
/// Unchanged
|
||||||
Untouched,
|
Untouched,
|
||||||
/// BIP69 / Lexicographic
|
/// Provide custom comparison functions for sorting
|
||||||
Bip69Lexicographic,
|
Custom {
|
||||||
|
/// Transaction inputs sort function
|
||||||
|
input_sort: Arc<TxSort<TxIn>>,
|
||||||
|
/// Transaction outputs sort function
|
||||||
|
output_sort: Arc<TxSort<TxOut>>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl core::fmt::Debug for TxOrdering {
|
||||||
|
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
|
||||||
|
match self {
|
||||||
|
TxOrdering::Shuffle => write!(f, "Shuffle"),
|
||||||
|
TxOrdering::Untouched => write!(f, "Untouched"),
|
||||||
|
TxOrdering::Custom { .. } => write!(f, "Custom"),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TxOrdering {
|
impl TxOrdering {
|
||||||
/// Sort transaction inputs and outputs by [`TxOrdering`] variant
|
/// Sort transaction inputs and outputs by [`TxOrdering`] variant.
|
||||||
|
///
|
||||||
|
/// Uses the thread-local random number generator (rng).
|
||||||
|
#[cfg(feature = "std")]
|
||||||
pub fn sort_tx(&self, tx: &mut Transaction) {
|
pub fn sort_tx(&self, tx: &mut Transaction) {
|
||||||
|
self.sort_tx_with_aux_rand(tx, &mut bitcoin::key::rand::thread_rng())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sort transaction inputs and outputs by [`TxOrdering`] variant.
|
||||||
|
///
|
||||||
|
/// Uses a provided random number generator (rng).
|
||||||
|
pub fn sort_tx_with_aux_rand(&self, tx: &mut Transaction, rng: &mut impl RngCore) {
|
||||||
match self {
|
match self {
|
||||||
TxOrdering::Untouched => {}
|
TxOrdering::Untouched => {}
|
||||||
TxOrdering::Shuffle => {
|
TxOrdering::Shuffle => {
|
||||||
use rand::seq::SliceRandom;
|
shuffle_slice(&mut tx.input, rng);
|
||||||
let mut rng = rand::thread_rng();
|
shuffle_slice(&mut tx.output, rng);
|
||||||
tx.input.shuffle(&mut rng);
|
|
||||||
tx.output.shuffle(&mut rng);
|
|
||||||
}
|
}
|
||||||
TxOrdering::Bip69Lexicographic => {
|
TxOrdering::Custom {
|
||||||
tx.input.sort_unstable_by_key(|txin| {
|
input_sort,
|
||||||
(txin.previous_output.txid, txin.previous_output.vout)
|
output_sort,
|
||||||
});
|
} => {
|
||||||
tx.output
|
tx.input.sort_unstable_by(|a, b| input_sort(a, b));
|
||||||
.sort_unstable_by_key(|txout| (txout.value, txout.script_pubkey.clone()));
|
tx.output.sort_unstable_by(|a, b| output_sort(a, b));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -932,12 +898,6 @@ mod test {
|
|||||||
use bitcoin::TxOut;
|
use bitcoin::TxOut;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_output_ordering_default_shuffle() {
|
|
||||||
assert_eq!(TxOrdering::default(), TxOrdering::Shuffle);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_output_ordering_untouched() {
|
fn test_output_ordering_untouched() {
|
||||||
let original_tx = ordering_test_tx!();
|
let original_tx = ordering_test_tx!();
|
||||||
@@ -970,13 +930,28 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_output_ordering_bip69() {
|
fn test_output_ordering_custom_but_bip69() {
|
||||||
use core::str::FromStr;
|
use core::str::FromStr;
|
||||||
|
|
||||||
let original_tx = ordering_test_tx!();
|
let original_tx = ordering_test_tx!();
|
||||||
let mut tx = original_tx;
|
let mut tx = original_tx;
|
||||||
|
|
||||||
TxOrdering::Bip69Lexicographic.sort_tx(&mut tx);
|
let bip69_txin_cmp = |tx_a: &TxIn, tx_b: &TxIn| {
|
||||||
|
let project_outpoint = |t: &TxIn| (t.previous_output.txid, t.previous_output.vout);
|
||||||
|
project_outpoint(tx_a).cmp(&project_outpoint(tx_b))
|
||||||
|
};
|
||||||
|
|
||||||
|
let bip69_txout_cmp = |tx_a: &TxOut, tx_b: &TxOut| {
|
||||||
|
let project_utxo = |t: &TxOut| (t.value, t.script_pubkey.clone());
|
||||||
|
project_utxo(tx_a).cmp(&project_utxo(tx_b))
|
||||||
|
};
|
||||||
|
|
||||||
|
let custom_bip69_ordering = TxOrdering::Custom {
|
||||||
|
input_sort: Arc::new(bip69_txin_cmp),
|
||||||
|
output_sort: Arc::new(bip69_txout_cmp),
|
||||||
|
};
|
||||||
|
|
||||||
|
custom_bip69_ordering.sort_tx(&mut tx);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx.input[0].previous_output,
|
tx.input[0].previous_output,
|
||||||
@@ -1008,6 +983,63 @@ mod test {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_output_ordering_custom_with_sha256() {
|
||||||
|
use bitcoin::hashes::{sha256, Hash};
|
||||||
|
|
||||||
|
let original_tx = ordering_test_tx!();
|
||||||
|
let mut tx_1 = original_tx.clone();
|
||||||
|
let mut tx_2 = original_tx.clone();
|
||||||
|
let shared_secret = "secret_tweak";
|
||||||
|
|
||||||
|
let hash_txin_with_shared_secret_seed = Arc::new(|tx_a: &TxIn, tx_b: &TxIn| {
|
||||||
|
let secret_digest_from_txin = |txin: &TxIn| {
|
||||||
|
sha256::Hash::hash(
|
||||||
|
&[
|
||||||
|
&txin.previous_output.txid.to_raw_hash()[..],
|
||||||
|
&txin.previous_output.vout.to_be_bytes(),
|
||||||
|
shared_secret.as_bytes(),
|
||||||
|
]
|
||||||
|
.concat(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
secret_digest_from_txin(tx_a).cmp(&secret_digest_from_txin(tx_b))
|
||||||
|
});
|
||||||
|
|
||||||
|
let hash_txout_with_shared_secret_seed = Arc::new(|tx_a: &TxOut, tx_b: &TxOut| {
|
||||||
|
let secret_digest_from_txout = |txin: &TxOut| {
|
||||||
|
sha256::Hash::hash(
|
||||||
|
&[
|
||||||
|
&txin.value.to_sat().to_be_bytes(),
|
||||||
|
&txin.script_pubkey.clone().into_bytes()[..],
|
||||||
|
shared_secret.as_bytes(),
|
||||||
|
]
|
||||||
|
.concat(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
secret_digest_from_txout(tx_a).cmp(&secret_digest_from_txout(tx_b))
|
||||||
|
});
|
||||||
|
|
||||||
|
let custom_ordering_from_salted_sha256_1 = TxOrdering::Custom {
|
||||||
|
input_sort: hash_txin_with_shared_secret_seed.clone(),
|
||||||
|
output_sort: hash_txout_with_shared_secret_seed.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let custom_ordering_from_salted_sha256_2 = TxOrdering::Custom {
|
||||||
|
input_sort: hash_txin_with_shared_secret_seed,
|
||||||
|
output_sort: hash_txout_with_shared_secret_seed,
|
||||||
|
};
|
||||||
|
|
||||||
|
custom_ordering_from_salted_sha256_1.sort_tx(&mut tx_1);
|
||||||
|
custom_ordering_from_salted_sha256_2.sort_tx(&mut tx_2);
|
||||||
|
|
||||||
|
// Check the ordering is consistent between calls
|
||||||
|
assert_eq!(tx_1, tx_2);
|
||||||
|
// Check transaction order has changed
|
||||||
|
assert_ne!(tx_1, original_tx);
|
||||||
|
assert_ne!(tx_2, original_tx);
|
||||||
|
}
|
||||||
|
|
||||||
fn get_test_utxos() -> Vec<LocalOutput> {
|
fn get_test_utxos() -> Vec<LocalOutput> {
|
||||||
use bitcoin::hashes::Hash;
|
use bitcoin::hashes::Hash;
|
||||||
|
|
||||||
@@ -10,10 +10,12 @@
|
|||||||
// licenses.
|
// licenses.
|
||||||
|
|
||||||
use bitcoin::secp256k1::{All, Secp256k1};
|
use bitcoin::secp256k1::{All, Secp256k1};
|
||||||
use bitcoin::{absolute, Script, Sequence};
|
use bitcoin::{absolute, relative, Script, Sequence};
|
||||||
|
|
||||||
use miniscript::{MiniscriptKey, Satisfier, ToPublicKey};
|
use miniscript::{MiniscriptKey, Satisfier, ToPublicKey};
|
||||||
|
|
||||||
|
use rand_core::RngCore;
|
||||||
|
|
||||||
/// Trait to check if a value is below the dust limit.
|
/// Trait to check if a value is below the dust limit.
|
||||||
/// We are performing dust value calculation for a given script public key using rust-bitcoin to
|
/// We are performing dust value calculation for a given script public key using rust-bitcoin to
|
||||||
/// keep it compatible with network dust rate
|
/// keep it compatible with network dust rate
|
||||||
@@ -26,7 +28,7 @@ pub trait IsDust {
|
|||||||
|
|
||||||
impl IsDust for u64 {
|
impl IsDust for u64 {
|
||||||
fn is_dust(&self, script: &Script) -> bool {
|
fn is_dust(&self, script: &Script) -> bool {
|
||||||
*self < script.dust_value().to_sat()
|
*self < script.minimal_non_dust().to_sat()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,7 +97,7 @@ impl Older {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<Pk: MiniscriptKey + ToPublicKey> Satisfier<Pk> for Older {
|
impl<Pk: MiniscriptKey + ToPublicKey> Satisfier<Pk> for Older {
|
||||||
fn check_older(&self, n: Sequence) -> bool {
|
fn check_older(&self, n: relative::LockTime) -> bool {
|
||||||
if let Some(current_height) = self.current_height {
|
if let Some(current_height) = self.current_height {
|
||||||
// TODO: test >= / >
|
// TODO: test >= / >
|
||||||
current_height
|
current_height
|
||||||
@@ -110,6 +112,19 @@ impl<Pk: MiniscriptKey + ToPublicKey> Satisfier<Pk> for Older {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The Knuth shuffling algorithm based on the original [Fisher-Yates method](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle)
|
||||||
|
pub(crate) fn shuffle_slice<T>(list: &mut [T], rng: &mut impl RngCore) {
|
||||||
|
if list.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let mut current_index = list.len() - 1;
|
||||||
|
while current_index > 0 {
|
||||||
|
let random_index = rng.next_u32() as usize % (current_index + 1);
|
||||||
|
list.swap(current_index, random_index);
|
||||||
|
current_index -= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) type SecpCtx = Secp256k1<All>;
|
pub(crate) type SecpCtx = Secp256k1<All>;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -118,9 +133,11 @@ mod test {
|
|||||||
// otherwise it's time-based
|
// otherwise it's time-based
|
||||||
pub(crate) const SEQUENCE_LOCKTIME_TYPE_FLAG: u32 = 1 << 22;
|
pub(crate) const SEQUENCE_LOCKTIME_TYPE_FLAG: u32 = 1 << 22;
|
||||||
|
|
||||||
use super::{check_nsequence_rbf, IsDust};
|
use super::{check_nsequence_rbf, shuffle_slice, IsDust};
|
||||||
use crate::bitcoin::{Address, Network, Sequence};
|
use crate::bitcoin::{Address, Network, Sequence};
|
||||||
|
use alloc::vec::Vec;
|
||||||
use core::str::FromStr;
|
use core::str::FromStr;
|
||||||
|
use rand::{rngs::StdRng, thread_rng, SeedableRng};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_is_dust() {
|
fn test_is_dust() {
|
||||||
@@ -182,4 +199,46 @@ mod test {
|
|||||||
);
|
);
|
||||||
assert!(result);
|
assert!(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
fn test_shuffle_slice_empty_vec() {
|
||||||
|
let mut test: Vec<u8> = vec![];
|
||||||
|
shuffle_slice(&mut test, &mut thread_rng());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
fn test_shuffle_slice_single_vec() {
|
||||||
|
let mut test: Vec<u8> = vec![0];
|
||||||
|
shuffle_slice(&mut test, &mut thread_rng());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_shuffle_slice_duple_vec() {
|
||||||
|
let seed = [0; 32];
|
||||||
|
let mut rng: StdRng = SeedableRng::from_seed(seed);
|
||||||
|
let mut test: Vec<u8> = vec![0, 1];
|
||||||
|
shuffle_slice(&mut test, &mut rng);
|
||||||
|
assert_eq!(test, &[0, 1]);
|
||||||
|
let seed = [6; 32];
|
||||||
|
let mut rng: StdRng = SeedableRng::from_seed(seed);
|
||||||
|
let mut test: Vec<u8> = vec![0, 1];
|
||||||
|
shuffle_slice(&mut test, &mut rng);
|
||||||
|
assert_eq!(test, &[1, 0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_shuffle_slice_multi_vec() {
|
||||||
|
let seed = [0; 32];
|
||||||
|
let mut rng: StdRng = SeedableRng::from_seed(seed);
|
||||||
|
let mut test: Vec<u8> = vec![0, 1, 2, 4, 5];
|
||||||
|
shuffle_slice(&mut test, &mut rng);
|
||||||
|
assert_eq!(test, &[2, 1, 0, 4, 5]);
|
||||||
|
let seed = [25; 32];
|
||||||
|
let mut rng: StdRng = SeedableRng::from_seed(seed);
|
||||||
|
let mut test: Vec<u8> = vec![0, 1, 2, 4, 5];
|
||||||
|
shuffle_slice(&mut test, &mut rng);
|
||||||
|
assert_eq!(test, &[0, 4, 1, 2, 5]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -1,26 +1,24 @@
|
|||||||
#![allow(unused)]
|
#![allow(unused)]
|
||||||
|
use bdk_chain::{BlockId, ConfirmationBlockTime, ConfirmationTime, TxGraph};
|
||||||
use bdk::{KeychainKind, LocalOutput, Wallet};
|
use bdk_wallet::{CreateParams, KeychainKind, LocalOutput, Update, Wallet};
|
||||||
use bdk_chain::indexed_tx_graph::Indexer;
|
|
||||||
use bdk_chain::{BlockId, ConfirmationTime};
|
|
||||||
use bitcoin::hashes::Hash;
|
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
transaction, Address, Amount, BlockHash, FeeRate, Network, OutPoint, Transaction, TxIn, TxOut,
|
hashes::Hash, transaction, Address, Amount, BlockHash, FeeRate, Network, OutPoint, Transaction,
|
||||||
Txid,
|
TxIn, TxOut, Txid,
|
||||||
};
|
};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
// Return a fake wallet that appears to be funded for testing.
|
/// Return a fake wallet that appears to be funded for testing.
|
||||||
//
|
///
|
||||||
// The funded wallet containing a tx with a 76_000 sats input and two outputs, one spending 25_000
|
/// The funded wallet contains a tx with a 76_000 sats input and two outputs, one spending 25_000
|
||||||
// to a foreign address and one returning 50_000 back to the wallet as change. The remaining 1000
|
/// to a foreign address and one returning 50_000 back to the wallet. The remaining 1000
|
||||||
// sats are the transaction fee.
|
/// sats are the transaction fee.
|
||||||
pub fn get_funded_wallet_with_change(
|
pub fn get_funded_wallet_with_change(descriptor: &str, change: &str) -> (Wallet, bitcoin::Txid) {
|
||||||
descriptor: &str,
|
let mut wallet = Wallet::create(descriptor.to_string(), change.to_string())
|
||||||
change: Option<&str>,
|
.network(Network::Regtest)
|
||||||
) -> (Wallet, bitcoin::Txid) {
|
.create_wallet_no_persist()
|
||||||
let mut wallet = Wallet::new_no_persist(descriptor, change, Network::Regtest).unwrap();
|
.expect("descriptors must be valid");
|
||||||
let change_address = wallet.peek_address(KeychainKind::External, 0).address;
|
|
||||||
|
let receive_address = wallet.peek_address(KeychainKind::External, 0).address;
|
||||||
let sendto_address = Address::from_str("bcrt1q3qtze4ys45tgdvguj66zrk4fu6hq3a3v9pfly5")
|
let sendto_address = Address::from_str("bcrt1q3qtze4ys45tgdvguj66zrk4fu6hq3a3v9pfly5")
|
||||||
.expect("address")
|
.expect("address")
|
||||||
.require_network(Network::Regtest)
|
.require_network(Network::Regtest)
|
||||||
@@ -40,7 +38,7 @@ pub fn get_funded_wallet_with_change(
|
|||||||
}],
|
}],
|
||||||
output: vec![TxOut {
|
output: vec![TxOut {
|
||||||
value: Amount::from_sat(76_000),
|
value: Amount::from_sat(76_000),
|
||||||
script_pubkey: change_address.script_pubkey(),
|
script_pubkey: receive_address.script_pubkey(),
|
||||||
}],
|
}],
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -49,7 +47,7 @@ pub fn get_funded_wallet_with_change(
|
|||||||
lock_time: bitcoin::absolute::LockTime::ZERO,
|
lock_time: bitcoin::absolute::LockTime::ZERO,
|
||||||
input: vec![TxIn {
|
input: vec![TxIn {
|
||||||
previous_output: OutPoint {
|
previous_output: OutPoint {
|
||||||
txid: tx0.txid(),
|
txid: tx0.compute_txid(),
|
||||||
vout: 0,
|
vout: 0,
|
||||||
},
|
},
|
||||||
script_sig: Default::default(),
|
script_sig: Default::default(),
|
||||||
@@ -59,7 +57,7 @@ pub fn get_funded_wallet_with_change(
|
|||||||
output: vec![
|
output: vec![
|
||||||
TxOut {
|
TxOut {
|
||||||
value: Amount::from_sat(50_000),
|
value: Amount::from_sat(50_000),
|
||||||
script_pubkey: change_address.script_pubkey(),
|
script_pubkey: receive_address.script_pubkey(),
|
||||||
},
|
},
|
||||||
TxOut {
|
TxOut {
|
||||||
value: Amount::from_sat(25_000),
|
value: Amount::from_sat(25_000),
|
||||||
@@ -68,6 +66,12 @@ pub fn get_funded_wallet_with_change(
|
|||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
wallet
|
||||||
|
.insert_checkpoint(BlockId {
|
||||||
|
height: 42,
|
||||||
|
hash: BlockHash::all_zeros(),
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
wallet
|
wallet
|
||||||
.insert_checkpoint(BlockId {
|
.insert_checkpoint(BlockId {
|
||||||
height: 1_000,
|
height: 1_000,
|
||||||
@@ -80,41 +84,63 @@ pub fn get_funded_wallet_with_change(
|
|||||||
hash: BlockHash::all_zeros(),
|
hash: BlockHash::all_zeros(),
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
wallet
|
|
||||||
.insert_tx(
|
|
||||||
tx0,
|
|
||||||
ConfirmationTime::Confirmed {
|
|
||||||
height: 1_000,
|
|
||||||
time: 100,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
wallet
|
|
||||||
.insert_tx(
|
|
||||||
tx1.clone(),
|
|
||||||
ConfirmationTime::Confirmed {
|
|
||||||
height: 2_000,
|
|
||||||
time: 200,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
(wallet, tx1.txid())
|
wallet.insert_tx(tx0.clone());
|
||||||
|
insert_anchor_from_conf(
|
||||||
|
&mut wallet,
|
||||||
|
tx0.compute_txid(),
|
||||||
|
ConfirmationTime::Confirmed {
|
||||||
|
height: 1_000,
|
||||||
|
time: 100,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
wallet.insert_tx(tx1.clone());
|
||||||
|
insert_anchor_from_conf(
|
||||||
|
&mut wallet,
|
||||||
|
tx1.compute_txid(),
|
||||||
|
ConfirmationTime::Confirmed {
|
||||||
|
height: 2_000,
|
||||||
|
time: 200,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
(wallet, tx1.compute_txid())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return a fake wallet that appears to be funded for testing.
|
/// Return a fake wallet that appears to be funded for testing.
|
||||||
//
|
///
|
||||||
// The funded wallet containing a tx with a 76_000 sats input and two outputs, one spending 25_000
|
/// The funded wallet contains a tx with a 76_000 sats input and two outputs, one spending 25_000
|
||||||
// to a foreign address and one returning 50_000 back to the wallet as change. The remaining 1000
|
/// to a foreign address and one returning 50_000 back to the wallet. The remaining 1000
|
||||||
// sats are the transaction fee.
|
/// sats are the transaction fee.
|
||||||
|
///
|
||||||
|
/// Note: the change descriptor will have script type `p2wpkh`. If passing some other script type
|
||||||
|
/// as argument, make sure you're ok with getting a wallet where the keychains have potentially
|
||||||
|
/// different script types. Otherwise, use `get_funded_wallet_with_change`.
|
||||||
pub fn get_funded_wallet(descriptor: &str) -> (Wallet, bitcoin::Txid) {
|
pub fn get_funded_wallet(descriptor: &str) -> (Wallet, bitcoin::Txid) {
|
||||||
get_funded_wallet_with_change(descriptor, None)
|
let change = get_test_wpkh_change();
|
||||||
|
get_funded_wallet_with_change(descriptor, change)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_funded_wallet_wpkh() -> (Wallet, bitcoin::Txid) {
|
||||||
|
get_funded_wallet_with_change(get_test_wpkh(), get_test_wpkh_change())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_test_wpkh() -> &'static str {
|
pub fn get_test_wpkh() -> &'static str {
|
||||||
"wpkh(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW)"
|
"wpkh(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW)"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_test_wpkh_with_change_desc() -> (&'static str, &'static str) {
|
||||||
|
(
|
||||||
|
"wpkh(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW)",
|
||||||
|
get_test_wpkh_change(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_test_wpkh_change() -> &'static str {
|
||||||
|
"wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/0)"
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_test_single_sig_csv() -> &'static str {
|
pub fn get_test_single_sig_csv() -> &'static str {
|
||||||
// and(pk(Alice),older(6))
|
// and(pk(Alice),older(6))
|
||||||
"wsh(and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),older(6)))"
|
"wsh(and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),older(6)))"
|
||||||
@@ -150,6 +176,11 @@ pub fn get_test_tr_single_sig_xprv() -> &'static str {
|
|||||||
"tr(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/*)"
|
"tr(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/*)"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_test_tr_single_sig_xprv_with_change_desc() -> (&'static str, &'static str) {
|
||||||
|
("tr(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/0/*)",
|
||||||
|
"tr(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/1/*)")
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_test_tr_with_taptree_xprv() -> &'static str {
|
pub fn get_test_tr_with_taptree_xprv() -> &'static str {
|
||||||
"tr(cNJmN3fH9DDbDt131fQNkVakkpzawJBSeybCUNmP1BovpmGQ45xG,{pk(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/*),pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642)})"
|
"tr(cNJmN3fH9DDbDt131fQNkVakkpzawJBSeybCUNmP1BovpmGQ45xG,{pk(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/*),pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642)})"
|
||||||
}
|
}
|
||||||
@@ -170,3 +201,30 @@ pub fn feerate_unchecked(sat_vb: f64) -> FeeRate {
|
|||||||
let sat_kwu = (sat_vb * 250.0).ceil() as u64;
|
let sat_kwu = (sat_vb * 250.0).ceil() as u64;
|
||||||
FeeRate::from_sat_per_kwu(sat_kwu)
|
FeeRate::from_sat_per_kwu(sat_kwu)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Simulates confirming a tx with `txid` at the specified `position` by inserting an anchor
|
||||||
|
/// at the lowest height in local chain that is greater or equal to `position`'s height,
|
||||||
|
/// assuming the confirmation time matches `ConfirmationTime::Confirmed`.
|
||||||
|
pub fn insert_anchor_from_conf(wallet: &mut Wallet, txid: Txid, position: ConfirmationTime) {
|
||||||
|
if let ConfirmationTime::Confirmed { height, time } = position {
|
||||||
|
// anchor tx to checkpoint with lowest height that is >= position's height
|
||||||
|
let anchor = wallet
|
||||||
|
.local_chain()
|
||||||
|
.range(height..)
|
||||||
|
.last()
|
||||||
|
.map(|anchor_cp| ConfirmationBlockTime {
|
||||||
|
block_id: anchor_cp.block_id(),
|
||||||
|
confirmation_time: time,
|
||||||
|
})
|
||||||
|
.expect("confirmation height cannot be greater than tip");
|
||||||
|
|
||||||
|
let mut graph = TxGraph::default();
|
||||||
|
let _ = graph.insert_anchor(txid, anchor);
|
||||||
|
wallet
|
||||||
|
.apply_update(Update {
|
||||||
|
graph,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
use bdk::bitcoin::{Amount, FeeRate, Psbt, TxIn};
|
use bdk_wallet::bitcoin::{Amount, FeeRate, Psbt, TxIn};
|
||||||
use bdk::{psbt, KeychainKind, SignOptions};
|
use bdk_wallet::{psbt, KeychainKind, SignOptions};
|
||||||
use core::str::FromStr;
|
use core::str::FromStr;
|
||||||
mod common;
|
mod common;
|
||||||
use common::*;
|
use common::*;
|
||||||
@@ -142,7 +142,9 @@ fn test_psbt_fee_rate_with_missing_txout() {
|
|||||||
assert!(wpkh_psbt.fee_amount().is_none());
|
assert!(wpkh_psbt.fee_amount().is_none());
|
||||||
assert!(wpkh_psbt.fee_rate().is_none());
|
assert!(wpkh_psbt.fee_rate().is_none());
|
||||||
|
|
||||||
let (mut pkh_wallet, _) = get_funded_wallet("pkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)");
|
let desc = "pkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/0)";
|
||||||
|
let change_desc = "pkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/1)";
|
||||||
|
let (mut pkh_wallet, _) = get_funded_wallet_with_change(desc, change_desc);
|
||||||
let addr = pkh_wallet.peek_address(KeychainKind::External, 0);
|
let addr = pkh_wallet.peek_address(KeychainKind::External, 0);
|
||||||
let mut builder = pkh_wallet.build_tx();
|
let mut builder = pkh_wallet.build_tx();
|
||||||
builder.drain_to(addr.script_pubkey()).drain_wallet();
|
builder.drain_to(addr.script_pubkey()).drain_wallet();
|
||||||
@@ -156,8 +158,8 @@ fn test_psbt_fee_rate_with_missing_txout() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_psbt_multiple_internalkey_signers() {
|
fn test_psbt_multiple_internalkey_signers() {
|
||||||
use bdk::signer::{SignerContext, SignerOrdering, SignerWrapper};
|
use bdk_wallet::signer::{SignerContext, SignerOrdering, SignerWrapper};
|
||||||
use bdk::KeychainKind;
|
use bdk_wallet::KeychainKind;
|
||||||
use bitcoin::key::TapTweak;
|
use bitcoin::key::TapTweak;
|
||||||
use bitcoin::secp256k1::{schnorr, Keypair, Message, Secp256k1, XOnlyPublicKey};
|
use bitcoin::secp256k1::{schnorr, Keypair, Message, Secp256k1, XOnlyPublicKey};
|
||||||
use bitcoin::sighash::{Prevouts, SighashCache, TapSighashType};
|
use bitcoin::sighash::{Prevouts, SighashCache, TapSighashType};
|
||||||
@@ -170,8 +172,9 @@ fn test_psbt_multiple_internalkey_signers() {
|
|||||||
let prv = PrivateKey::from_wif(wif).unwrap();
|
let prv = PrivateKey::from_wif(wif).unwrap();
|
||||||
let keypair = Keypair::from_secret_key(&secp, &prv.inner);
|
let keypair = Keypair::from_secret_key(&secp, &prv.inner);
|
||||||
|
|
||||||
let (mut wallet, _) = get_funded_wallet(&desc);
|
let change_desc = "tr(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW)";
|
||||||
let to_spend = wallet.get_balance().total();
|
let (mut wallet, _) = get_funded_wallet_with_change(&desc, change_desc);
|
||||||
|
let to_spend = wallet.balance().total();
|
||||||
let send_to = wallet.peek_address(KeychainKind::External, 0);
|
let send_to = wallet.peek_address(KeychainKind::External, 0);
|
||||||
let mut builder = wallet.build_tx();
|
let mut builder = wallet.build_tx();
|
||||||
builder.drain_to(send_to.script_pubkey()).drain_wallet();
|
builder.drain_to(send_to.script_pubkey()).drain_wallet();
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -13,9 +13,10 @@ use bdk_bitcoind_rpc::{
|
|||||||
};
|
};
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
bitcoin::{constants::genesis_block, Block, Transaction},
|
bitcoin::{constants::genesis_block, Block, Transaction},
|
||||||
indexed_tx_graph, keychain,
|
indexed_tx_graph,
|
||||||
|
indexer::keychain_txout,
|
||||||
local_chain::{self, LocalChain},
|
local_chain::{self, LocalChain},
|
||||||
ConfirmationTimeHeightAnchor, IndexedTxGraph,
|
ConfirmationBlockTime, IndexedTxGraph, Merge,
|
||||||
};
|
};
|
||||||
use example_cli::{
|
use example_cli::{
|
||||||
anyhow,
|
anyhow,
|
||||||
@@ -37,7 +38,7 @@ const DB_COMMIT_DELAY: Duration = Duration::from_secs(60);
|
|||||||
|
|
||||||
type ChangeSet = (
|
type ChangeSet = (
|
||||||
local_chain::ChangeSet,
|
local_chain::ChangeSet,
|
||||||
indexed_tx_graph::ChangeSet<ConfirmationTimeHeightAnchor, keychain::ChangeSet<Keychain>>,
|
indexed_tx_graph::ChangeSet<ConfirmationBlockTime, keychain_txout::ChangeSet>,
|
||||||
);
|
);
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -137,8 +138,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let genesis_hash = genesis_block(args.network).block_hash();
|
let genesis_hash = genesis_block(args.network).block_hash();
|
||||||
let (chain, chain_changeset) = LocalChain::from_genesis_hash(genesis_hash);
|
let (chain, chain_changeset) = LocalChain::from_genesis_hash(genesis_hash);
|
||||||
let mut db = db.lock().unwrap();
|
let mut db = db.lock().unwrap();
|
||||||
db.stage((chain_changeset, Default::default()));
|
db.append_changeset(&(chain_changeset, Default::default()))?;
|
||||||
db.commit()?;
|
|
||||||
chain
|
chain
|
||||||
} else {
|
} else {
|
||||||
LocalChain::from_changeset(init_chain_changeset)?
|
LocalChain::from_changeset(init_chain_changeset)?
|
||||||
@@ -176,6 +176,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let chain_tip = chain.lock().unwrap().tip();
|
let chain_tip = chain.lock().unwrap().tip();
|
||||||
let rpc_client = rpc_args.new_client()?;
|
let rpc_client = rpc_args.new_client()?;
|
||||||
let mut emitter = Emitter::new(&rpc_client, chain_tip, fallback_height);
|
let mut emitter = Emitter::new(&rpc_client, chain_tip, fallback_height);
|
||||||
|
let mut db_stage = ChangeSet::default();
|
||||||
|
|
||||||
let mut last_db_commit = Instant::now();
|
let mut last_db_commit = Instant::now();
|
||||||
let mut last_print = Instant::now();
|
let mut last_print = Instant::now();
|
||||||
@@ -185,18 +186,20 @@ fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let mut chain = chain.lock().unwrap();
|
let mut chain = chain.lock().unwrap();
|
||||||
let mut graph = graph.lock().unwrap();
|
let mut graph = graph.lock().unwrap();
|
||||||
let mut db = db.lock().unwrap();
|
|
||||||
|
|
||||||
let chain_changeset = chain
|
let chain_changeset = chain
|
||||||
.apply_update(emission.checkpoint)
|
.apply_update(emission.checkpoint)
|
||||||
.expect("must always apply as we receive blocks in order from emitter");
|
.expect("must always apply as we receive blocks in order from emitter");
|
||||||
let graph_changeset = graph.apply_block_relevant(&emission.block, height);
|
let graph_changeset = graph.apply_block_relevant(&emission.block, height);
|
||||||
db.stage((chain_changeset, graph_changeset));
|
db_stage.merge((chain_changeset, graph_changeset));
|
||||||
|
|
||||||
// commit staged db changes in intervals
|
// commit staged db changes in intervals
|
||||||
if last_db_commit.elapsed() >= DB_COMMIT_DELAY {
|
if last_db_commit.elapsed() >= DB_COMMIT_DELAY {
|
||||||
|
let db = &mut *db.lock().unwrap();
|
||||||
last_db_commit = Instant::now();
|
last_db_commit = Instant::now();
|
||||||
db.commit()?;
|
if let Some(changeset) = db_stage.take() {
|
||||||
|
db.append_changeset(&changeset)?;
|
||||||
|
}
|
||||||
println!(
|
println!(
|
||||||
"[{:>10}s] committed to db (took {}s)",
|
"[{:>10}s] committed to db (took {}s)",
|
||||||
start.elapsed().as_secs_f32(),
|
start.elapsed().as_secs_f32(),
|
||||||
@@ -212,7 +215,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
graph.graph().balance(
|
graph.graph().balance(
|
||||||
&*chain,
|
&*chain,
|
||||||
synced_to.block_id(),
|
synced_to.block_id(),
|
||||||
graph.index.outpoints(),
|
graph.index.outpoints().iter().cloned(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
@@ -231,9 +234,11 @@ fn main() -> anyhow::Result<()> {
|
|||||||
mempool_txs.iter().map(|(tx, time)| (tx, *time)),
|
mempool_txs.iter().map(|(tx, time)| (tx, *time)),
|
||||||
);
|
);
|
||||||
{
|
{
|
||||||
let mut db = db.lock().unwrap();
|
let db = &mut *db.lock().unwrap();
|
||||||
db.stage((local_chain::ChangeSet::default(), graph_changeset));
|
db_stage.merge((local_chain::ChangeSet::default(), graph_changeset));
|
||||||
db.commit()?; // commit one last time
|
if let Some(changeset) = db_stage.take() {
|
||||||
|
db.append_changeset(&changeset)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RpcCommands::Live { rpc_args } => {
|
RpcCommands::Live { rpc_args } => {
|
||||||
@@ -289,9 +294,9 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let mut tip_height = 0_u32;
|
let mut tip_height = 0_u32;
|
||||||
let mut last_db_commit = Instant::now();
|
let mut last_db_commit = Instant::now();
|
||||||
let mut last_print = Option::<Instant>::None;
|
let mut last_print = Option::<Instant>::None;
|
||||||
|
let mut db_stage = ChangeSet::default();
|
||||||
|
|
||||||
for emission in rx {
|
for emission in rx {
|
||||||
let mut db = db.lock().unwrap();
|
|
||||||
let mut graph = graph.lock().unwrap();
|
let mut graph = graph.lock().unwrap();
|
||||||
let mut chain = chain.lock().unwrap();
|
let mut chain = chain.lock().unwrap();
|
||||||
|
|
||||||
@@ -316,12 +321,14 @@ fn main() -> anyhow::Result<()> {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
db_stage.merge(changeset);
|
||||||
db.stage(changeset);
|
|
||||||
|
|
||||||
if last_db_commit.elapsed() >= DB_COMMIT_DELAY {
|
if last_db_commit.elapsed() >= DB_COMMIT_DELAY {
|
||||||
|
let db = &mut *db.lock().unwrap();
|
||||||
last_db_commit = Instant::now();
|
last_db_commit = Instant::now();
|
||||||
db.commit()?;
|
if let Some(changeset) = db_stage.take() {
|
||||||
|
db.append_changeset(&changeset)?;
|
||||||
|
}
|
||||||
println!(
|
println!(
|
||||||
"[{:>10}s] committed to db (took {}s)",
|
"[{:>10}s] committed to db (took {}s)",
|
||||||
start.elapsed().as_secs_f32(),
|
start.elapsed().as_secs_f32(),
|
||||||
@@ -336,7 +343,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
graph.graph().balance(
|
graph.graph().balance(
|
||||||
&*chain,
|
&*chain,
|
||||||
synced_to.block_id(),
|
synced_to.block_id(),
|
||||||
graph.index.outpoints(),
|
graph.index.outpoints().iter().cloned(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ edition = "2021"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk_chain = { path = "../../crates/chain", features = ["serde", "miniscript"]}
|
bdk_chain = { path = "../../crates/chain", features = ["serde", "miniscript"]}
|
||||||
bdk_persist = { path = "../../crates/persist" }
|
|
||||||
bdk_file_store = { path = "../../crates/file_store" }
|
bdk_file_store = { path = "../../crates/file_store" }
|
||||||
bdk_tmp_plan = { path = "../../nursery/tmp_plan" }
|
bdk_tmp_plan = { path = "../../nursery/tmp_plan" }
|
||||||
bdk_coin_select = { path = "../../nursery/coin_select" }
|
bdk_coin_select = { path = "../../nursery/coin_select" }
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ use anyhow::Context;
|
|||||||
use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue};
|
use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue};
|
||||||
use bdk_file_store::Store;
|
use bdk_file_store::Store;
|
||||||
use serde::{de::DeserializeOwned, Serialize};
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
|
use std::fmt::Debug;
|
||||||
use std::{cmp::Reverse, collections::BTreeMap, path::PathBuf, sync::Mutex, time::Duration};
|
use std::{cmp::Reverse, collections::BTreeMap, path::PathBuf, sync::Mutex, time::Duration};
|
||||||
|
|
||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
@@ -13,16 +14,15 @@ use bdk_chain::{
|
|||||||
transaction, Address, Amount, Network, Sequence, Transaction, TxIn, TxOut,
|
transaction, Address, Amount, Network, Sequence, Transaction, TxIn, TxOut,
|
||||||
},
|
},
|
||||||
indexed_tx_graph::{self, IndexedTxGraph},
|
indexed_tx_graph::{self, IndexedTxGraph},
|
||||||
keychain::{self, KeychainTxOutIndex},
|
indexer::keychain_txout::{self, KeychainTxOutIndex},
|
||||||
local_chain,
|
local_chain,
|
||||||
miniscript::{
|
miniscript::{
|
||||||
descriptor::{DescriptorSecretKey, KeyMap},
|
descriptor::{DescriptorSecretKey, KeyMap},
|
||||||
Descriptor, DescriptorPublicKey,
|
Descriptor, DescriptorPublicKey,
|
||||||
},
|
},
|
||||||
Anchor, Append, ChainOracle, DescriptorExt, FullTxOut,
|
Anchor, ChainOracle, DescriptorExt, FullTxOut, Merge,
|
||||||
};
|
};
|
||||||
pub use bdk_file_store;
|
pub use bdk_file_store;
|
||||||
use bdk_persist::{Persist, PersistBackend};
|
|
||||||
pub use clap;
|
pub use clap;
|
||||||
|
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
@@ -30,7 +30,7 @@ use clap::{Parser, Subcommand};
|
|||||||
pub type KeychainTxGraph<A> = IndexedTxGraph<A, KeychainTxOutIndex<Keychain>>;
|
pub type KeychainTxGraph<A> = IndexedTxGraph<A, KeychainTxOutIndex<Keychain>>;
|
||||||
pub type KeychainChangeSet<A> = (
|
pub type KeychainChangeSet<A> = (
|
||||||
local_chain::ChangeSet,
|
local_chain::ChangeSet,
|
||||||
indexed_tx_graph::ChangeSet<A, keychain::ChangeSet<Keychain>>,
|
indexed_tx_graph::ChangeSet<A, keychain_txout::ChangeSet>,
|
||||||
);
|
);
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
@@ -191,7 +191,7 @@ impl core::fmt::Display for Keychain {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct CreateTxChange {
|
pub struct CreateTxChange {
|
||||||
pub index_changeset: keychain::ChangeSet<Keychain>,
|
pub index_changeset: keychain_txout::ChangeSet,
|
||||||
pub change_keychain: Keychain,
|
pub change_keychain: Keychain,
|
||||||
pub index: u32,
|
pub index: u32,
|
||||||
}
|
}
|
||||||
@@ -207,7 +207,7 @@ pub fn create_tx<A: Anchor, O: ChainOracle>(
|
|||||||
where
|
where
|
||||||
O::Error: std::error::Error + Send + Sync + 'static,
|
O::Error: std::error::Error + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
let mut changeset = keychain::ChangeSet::default();
|
let mut changeset = keychain_txout::ChangeSet::default();
|
||||||
|
|
||||||
let assets = bdk_tmp_plan::Assets {
|
let assets = bdk_tmp_plan::Assets {
|
||||||
keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(),
|
keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(),
|
||||||
@@ -252,7 +252,7 @@ where
|
|||||||
let internal_keychain = if graph
|
let internal_keychain = if graph
|
||||||
.index
|
.index
|
||||||
.keychains()
|
.keychains()
|
||||||
.any(|(k, _)| *k == Keychain::Internal)
|
.any(|(k, _)| k == Keychain::Internal)
|
||||||
{
|
{
|
||||||
Keychain::Internal
|
Keychain::Internal
|
||||||
} else {
|
} else {
|
||||||
@@ -261,18 +261,15 @@ where
|
|||||||
|
|
||||||
let ((change_index, change_script), change_changeset) = graph
|
let ((change_index, change_script), change_changeset) = graph
|
||||||
.index
|
.index
|
||||||
.next_unused_spk(&internal_keychain)
|
.next_unused_spk(internal_keychain)
|
||||||
.expect("Must exist");
|
.expect("Must exist");
|
||||||
changeset.append(change_changeset);
|
changeset.merge(change_changeset);
|
||||||
|
|
||||||
// Clone to drop the immutable reference.
|
|
||||||
let change_script = change_script.into();
|
|
||||||
|
|
||||||
let change_plan = bdk_tmp_plan::plan_satisfaction(
|
let change_plan = bdk_tmp_plan::plan_satisfaction(
|
||||||
&graph
|
&graph
|
||||||
.index
|
.index
|
||||||
.keychains()
|
.keychains()
|
||||||
.find(|(k, _)| *k == &internal_keychain)
|
.find(|(k, _)| *k == internal_keychain)
|
||||||
.expect("must exist")
|
.expect("must exist")
|
||||||
.1
|
.1
|
||||||
.at_derivation_index(change_index)
|
.at_derivation_index(change_index)
|
||||||
@@ -291,7 +288,7 @@ where
|
|||||||
min_drain_value: graph
|
min_drain_value: graph
|
||||||
.index
|
.index
|
||||||
.keychains()
|
.keychains()
|
||||||
.find(|(k, _)| *k == &internal_keychain)
|
.find(|(k, _)| *k == internal_keychain)
|
||||||
.expect("must exist")
|
.expect("must exist")
|
||||||
.1
|
.1
|
||||||
.dust_value(),
|
.dust_value(),
|
||||||
@@ -427,7 +424,7 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
|||||||
let outpoints = graph.index.outpoints();
|
let outpoints = graph.index.outpoints();
|
||||||
graph
|
graph
|
||||||
.graph()
|
.graph()
|
||||||
.try_filter_chain_unspents(chain, chain_tip, outpoints)
|
.try_filter_chain_unspents(chain, chain_tip, outpoints.iter().cloned())
|
||||||
.filter_map(|r| -> Option<Result<PlannedUtxo<K, A>, _>> {
|
.filter_map(|r| -> Option<Result<PlannedUtxo<K, A>, _>> {
|
||||||
let (k, i, full_txo) = match r {
|
let (k, i, full_txo) = match r {
|
||||||
Err(err) => return Some(Err(err)),
|
Err(err) => return Some(Err(err)),
|
||||||
@@ -436,7 +433,7 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
|||||||
let desc = graph
|
let desc = graph
|
||||||
.index
|
.index
|
||||||
.keychains()
|
.keychains()
|
||||||
.find(|(keychain, _)| *keychain == &k)
|
.find(|(keychain, _)| *keychain == k)
|
||||||
.expect("keychain must exist")
|
.expect("keychain must exist")
|
||||||
.1
|
.1
|
||||||
.at_derivation_index(i)
|
.at_derivation_index(i)
|
||||||
@@ -449,7 +446,7 @@ pub fn planned_utxos<A: Anchor, O: ChainOracle, K: Clone + bdk_tmp_plan::CanDeri
|
|||||||
|
|
||||||
pub fn handle_commands<CS: clap::Subcommand, S: clap::Args, A: Anchor, O: ChainOracle, C>(
|
pub fn handle_commands<CS: clap::Subcommand, S: clap::Args, A: Anchor, O: ChainOracle, C>(
|
||||||
graph: &Mutex<KeychainTxGraph<A>>,
|
graph: &Mutex<KeychainTxGraph<A>>,
|
||||||
db: &Mutex<Persist<C>>,
|
db: &Mutex<Store<C>>,
|
||||||
chain: &Mutex<O>,
|
chain: &Mutex<O>,
|
||||||
keymap: &BTreeMap<DescriptorPublicKey, DescriptorSecretKey>,
|
keymap: &BTreeMap<DescriptorPublicKey, DescriptorSecretKey>,
|
||||||
network: Network,
|
network: Network,
|
||||||
@@ -458,7 +455,14 @@ pub fn handle_commands<CS: clap::Subcommand, S: clap::Args, A: Anchor, O: ChainO
|
|||||||
) -> anyhow::Result<()>
|
) -> anyhow::Result<()>
|
||||||
where
|
where
|
||||||
O::Error: std::error::Error + Send + Sync + 'static,
|
O::Error: std::error::Error + Send + Sync + 'static,
|
||||||
C: Default + Append + DeserializeOwned + Serialize + From<KeychainChangeSet<A>>,
|
C: Default
|
||||||
|
+ Merge
|
||||||
|
+ DeserializeOwned
|
||||||
|
+ Serialize
|
||||||
|
+ From<KeychainChangeSet<A>>
|
||||||
|
+ Send
|
||||||
|
+ Sync
|
||||||
|
+ Debug,
|
||||||
{
|
{
|
||||||
match cmd {
|
match cmd {
|
||||||
Commands::ChainSpecific(_) => unreachable!("example code should handle this!"),
|
Commands::ChainSpecific(_) => unreachable!("example code should handle this!"),
|
||||||
@@ -475,14 +479,14 @@ where
|
|||||||
};
|
};
|
||||||
|
|
||||||
let ((spk_i, spk), index_changeset) =
|
let ((spk_i, spk), index_changeset) =
|
||||||
spk_chooser(index, &Keychain::External).expect("Must exist");
|
spk_chooser(index, Keychain::External).expect("Must exist");
|
||||||
let db = &mut *db.lock().unwrap();
|
let db = &mut *db.lock().unwrap();
|
||||||
db.stage_and_commit(C::from((
|
db.append_changeset(&C::from((
|
||||||
local_chain::ChangeSet::default(),
|
local_chain::ChangeSet::default(),
|
||||||
indexed_tx_graph::ChangeSet::from(index_changeset),
|
indexed_tx_graph::ChangeSet::from(index_changeset),
|
||||||
)))?;
|
)))?;
|
||||||
let addr =
|
let addr = Address::from_script(spk.as_script(), network)
|
||||||
Address::from_script(spk, network).context("failed to derive address")?;
|
.context("failed to derive address")?;
|
||||||
println!("[address @ {}] {}", spk_i, addr);
|
println!("[address @ {}] {}", spk_i, addr);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -497,8 +501,8 @@ where
|
|||||||
true => Keychain::Internal,
|
true => Keychain::Internal,
|
||||||
false => Keychain::External,
|
false => Keychain::External,
|
||||||
};
|
};
|
||||||
for (spk_i, spk) in index.revealed_keychain_spks(&target_keychain) {
|
for (spk_i, spk) in index.revealed_keychain_spks(target_keychain) {
|
||||||
let address = Address::from_script(spk, network)
|
let address = Address::from_script(spk.as_script(), network)
|
||||||
.expect("should always be able to derive address");
|
.expect("should always be able to derive address");
|
||||||
println!(
|
println!(
|
||||||
"{:?} {} used:{}",
|
"{:?} {} used:{}",
|
||||||
@@ -527,7 +531,7 @@ where
|
|||||||
let balance = graph.graph().try_balance(
|
let balance = graph.graph().try_balance(
|
||||||
chain,
|
chain,
|
||||||
chain.get_chain_tip()?,
|
chain.get_chain_tip()?,
|
||||||
graph.index.outpoints(),
|
graph.index.outpoints().iter().cloned(),
|
||||||
|(k, _), _| k == &Keychain::Internal,
|
|(k, _), _| k == &Keychain::Internal,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@@ -568,7 +572,7 @@ where
|
|||||||
} => {
|
} => {
|
||||||
let txouts = graph
|
let txouts = graph
|
||||||
.graph()
|
.graph()
|
||||||
.try_filter_chain_txouts(chain, chain_tip, outpoints)
|
.try_filter_chain_txouts(chain, chain_tip, outpoints.iter().cloned())
|
||||||
.filter(|r| match r {
|
.filter(|r| match r {
|
||||||
Ok((_, full_txo)) => match (spent, unspent) {
|
Ok((_, full_txo)) => match (spent, unspent) {
|
||||||
(true, false) => full_txo.spent_by.is_some(),
|
(true, false) => full_txo.spent_by.is_some(),
|
||||||
@@ -625,7 +629,7 @@ where
|
|||||||
// If we're unable to persist this, then we don't want to broadcast.
|
// If we're unable to persist this, then we don't want to broadcast.
|
||||||
{
|
{
|
||||||
let db = &mut *db.lock().unwrap();
|
let db = &mut *db.lock().unwrap();
|
||||||
db.stage_and_commit(C::from((
|
db.append_changeset(&C::from((
|
||||||
local_chain::ChangeSet::default(),
|
local_chain::ChangeSet::default(),
|
||||||
indexed_tx_graph::ChangeSet::from(index_changeset),
|
indexed_tx_graph::ChangeSet::from(index_changeset),
|
||||||
)))?;
|
)))?;
|
||||||
@@ -643,14 +647,14 @@ where
|
|||||||
|
|
||||||
match (broadcast)(chain_specific, &transaction) {
|
match (broadcast)(chain_specific, &transaction) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
println!("Broadcasted Tx : {}", transaction.txid());
|
println!("Broadcasted Tx : {}", transaction.compute_txid());
|
||||||
|
|
||||||
let keychain_changeset = graph.lock().unwrap().insert_tx(transaction);
|
let keychain_changeset = graph.lock().unwrap().insert_tx(transaction);
|
||||||
|
|
||||||
// We know the tx is at least unconfirmed now. Note if persisting here fails,
|
// We know the tx is at least unconfirmed now. Note if persisting here fails,
|
||||||
// it's not a big deal since we can always find it again form
|
// it's not a big deal since we can always find it again form
|
||||||
// blockchain.
|
// blockchain.
|
||||||
db.lock().unwrap().stage_and_commit(C::from((
|
db.lock().unwrap().append_changeset(&C::from((
|
||||||
local_chain::ChangeSet::default(),
|
local_chain::ChangeSet::default(),
|
||||||
keychain_changeset,
|
keychain_changeset,
|
||||||
)))?;
|
)))?;
|
||||||
@@ -669,7 +673,10 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The initial state returned by [`init`].
|
/// The initial state returned by [`init`].
|
||||||
pub struct Init<CS: clap::Subcommand, S: clap::Args, C> {
|
pub struct Init<CS: clap::Subcommand, S: clap::Args, C>
|
||||||
|
where
|
||||||
|
C: Default + Merge + Serialize + DeserializeOwned + Debug + Send + Sync + 'static,
|
||||||
|
{
|
||||||
/// Arguments parsed by the cli.
|
/// Arguments parsed by the cli.
|
||||||
pub args: Args<CS, S>,
|
pub args: Args<CS, S>,
|
||||||
/// Descriptor keymap.
|
/// Descriptor keymap.
|
||||||
@@ -677,7 +684,7 @@ pub struct Init<CS: clap::Subcommand, S: clap::Args, C> {
|
|||||||
/// Keychain-txout index.
|
/// Keychain-txout index.
|
||||||
pub index: KeychainTxOutIndex<Keychain>,
|
pub index: KeychainTxOutIndex<Keychain>,
|
||||||
/// Persistence backend.
|
/// Persistence backend.
|
||||||
pub db: Mutex<Persist<C>>,
|
pub db: Mutex<Store<C>>,
|
||||||
/// Initial changeset.
|
/// Initial changeset.
|
||||||
pub init_changeset: C,
|
pub init_changeset: C,
|
||||||
}
|
}
|
||||||
@@ -690,9 +697,10 @@ pub fn init<CS: clap::Subcommand, S: clap::Args, C>(
|
|||||||
) -> anyhow::Result<Init<CS, S, C>>
|
) -> anyhow::Result<Init<CS, S, C>>
|
||||||
where
|
where
|
||||||
C: Default
|
C: Default
|
||||||
+ Append
|
+ Merge
|
||||||
+ Serialize
|
+ Serialize
|
||||||
+ DeserializeOwned
|
+ DeserializeOwned
|
||||||
|
+ Debug
|
||||||
+ core::marker::Send
|
+ core::marker::Send
|
||||||
+ core::marker::Sync
|
+ core::marker::Sync
|
||||||
+ 'static,
|
+ 'static,
|
||||||
@@ -709,7 +717,7 @@ where
|
|||||||
// them in the index here. However, the keymap is not stored in the database.
|
// them in the index here. However, the keymap is not stored in the database.
|
||||||
let (descriptor, mut keymap) =
|
let (descriptor, mut keymap) =
|
||||||
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &args.descriptor)?;
|
Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &args.descriptor)?;
|
||||||
let _ = index.insert_descriptor(Keychain::External, descriptor);
|
let _ = index.insert_descriptor(Keychain::External, descriptor)?;
|
||||||
|
|
||||||
if let Some((internal_descriptor, internal_keymap)) = args
|
if let Some((internal_descriptor, internal_keymap)) = args
|
||||||
.change_descriptor
|
.change_descriptor
|
||||||
@@ -718,7 +726,7 @@ where
|
|||||||
.transpose()?
|
.transpose()?
|
||||||
{
|
{
|
||||||
keymap.extend(internal_keymap);
|
keymap.extend(internal_keymap);
|
||||||
let _ = index.insert_descriptor(Keychain::Internal, internal_descriptor);
|
let _ = index.insert_descriptor(Keychain::Internal, internal_descriptor)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut db_backend = match Store::<C>::open_or_create_new(db_magic, &args.db_path) {
|
let mut db_backend = match Store::<C>::open_or_create_new(db_magic, &args.db_path) {
|
||||||
@@ -727,13 +735,13 @@ where
|
|||||||
Err(err) => return Err(anyhow::anyhow!("failed to init db backend: {:?}", err)),
|
Err(err) => return Err(anyhow::anyhow!("failed to init db backend: {:?}", err)),
|
||||||
};
|
};
|
||||||
|
|
||||||
let init_changeset = db_backend.load_from_persistence()?.unwrap_or_default();
|
let init_changeset = db_backend.aggregate_changesets()?.unwrap_or_default();
|
||||||
|
|
||||||
Ok(Init {
|
Ok(Init {
|
||||||
args,
|
args,
|
||||||
keymap,
|
keymap,
|
||||||
index,
|
index,
|
||||||
db: Mutex::new(Persist::new(db_backend)),
|
db: Mutex::new(db_backend),
|
||||||
init_changeset,
|
init_changeset,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,14 +7,14 @@ use bdk_chain::{
|
|||||||
bitcoin::{constants::genesis_block, Address, Network, Txid},
|
bitcoin::{constants::genesis_block, Address, Network, Txid},
|
||||||
collections::BTreeSet,
|
collections::BTreeSet,
|
||||||
indexed_tx_graph::{self, IndexedTxGraph},
|
indexed_tx_graph::{self, IndexedTxGraph},
|
||||||
keychain,
|
indexer::keychain_txout,
|
||||||
local_chain::{self, LocalChain},
|
local_chain::{self, LocalChain},
|
||||||
spk_client::{FullScanRequest, SyncRequest},
|
spk_client::{FullScanRequest, SyncRequest},
|
||||||
Append, ConfirmationHeightAnchor,
|
ConfirmationBlockTime, Merge,
|
||||||
};
|
};
|
||||||
use bdk_electrum::{
|
use bdk_electrum::{
|
||||||
electrum_client::{self, Client, ElectrumApi},
|
electrum_client::{self, Client, ElectrumApi},
|
||||||
ElectrumExt,
|
BdkElectrumClient,
|
||||||
};
|
};
|
||||||
use example_cli::{
|
use example_cli::{
|
||||||
anyhow::{self, Context},
|
anyhow::{self, Context},
|
||||||
@@ -100,7 +100,7 @@ pub struct ScanOptions {
|
|||||||
|
|
||||||
type ChangeSet = (
|
type ChangeSet = (
|
||||||
local_chain::ChangeSet,
|
local_chain::ChangeSet,
|
||||||
indexed_tx_graph::ChangeSet<ConfirmationHeightAnchor, keychain::ChangeSet<Keychain>>,
|
indexed_tx_graph::ChangeSet<ConfirmationBlockTime, keychain_txout::ChangeSet>,
|
||||||
);
|
);
|
||||||
|
|
||||||
fn main() -> anyhow::Result<()> {
|
fn main() -> anyhow::Result<()> {
|
||||||
@@ -146,7 +146,10 @@ fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let client = electrum_cmd.electrum_args().client(args.network)?;
|
let client = BdkElectrumClient::new(electrum_cmd.electrum_args().client(args.network)?);
|
||||||
|
|
||||||
|
// Tell the electrum client about the txs we've already got locally so it doesn't re-download them
|
||||||
|
client.populate_tx_cache(&*graph.lock().unwrap());
|
||||||
|
|
||||||
let (chain_update, mut graph_update, keychain_update) = match electrum_cmd.clone() {
|
let (chain_update, mut graph_update, keychain_update) = match electrum_cmd.clone() {
|
||||||
ElectrumCommands::Scan {
|
ElectrumCommands::Scan {
|
||||||
@@ -159,12 +162,11 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let chain = &*chain.lock().unwrap();
|
let chain = &*chain.lock().unwrap();
|
||||||
|
|
||||||
FullScanRequest::from_chain_tip(chain.tip())
|
FullScanRequest::from_chain_tip(chain.tip())
|
||||||
.cache_graph_txs(graph.graph())
|
|
||||||
.set_spks_for_keychain(
|
.set_spks_for_keychain(
|
||||||
Keychain::External,
|
Keychain::External,
|
||||||
graph
|
graph
|
||||||
.index
|
.index
|
||||||
.unbounded_spk_iter(&Keychain::External)
|
.unbounded_spk_iter(Keychain::External)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten(),
|
.flatten(),
|
||||||
)
|
)
|
||||||
@@ -172,7 +174,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
Keychain::Internal,
|
Keychain::Internal,
|
||||||
graph
|
graph
|
||||||
.index
|
.index
|
||||||
.unbounded_spk_iter(&Keychain::Internal)
|
.unbounded_spk_iter(Keychain::Internal)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten(),
|
.flatten(),
|
||||||
)
|
)
|
||||||
@@ -191,8 +193,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let res = client
|
let res = client
|
||||||
.full_scan::<_>(request, stop_gap, scan_options.batch_size, false)
|
.full_scan::<_>(request, stop_gap, scan_options.batch_size, false)
|
||||||
.context("scanning the blockchain")?
|
.context("scanning the blockchain")?;
|
||||||
.with_confirmation_height_anchor();
|
|
||||||
(
|
(
|
||||||
res.chain_update,
|
res.chain_update,
|
||||||
res.graph_update,
|
res.graph_update,
|
||||||
@@ -220,16 +221,15 @@ fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let chain_tip = chain.tip();
|
let chain_tip = chain.tip();
|
||||||
let mut request =
|
let mut request = SyncRequest::from_chain_tip(chain_tip.clone());
|
||||||
SyncRequest::from_chain_tip(chain_tip.clone()).cache_graph_txs(graph.graph());
|
|
||||||
|
|
||||||
if all_spks {
|
if all_spks {
|
||||||
let all_spks = graph
|
let all_spks = graph
|
||||||
.index
|
.index
|
||||||
.revealed_spks(..)
|
.revealed_spks(..)
|
||||||
.map(|(k, i, spk)| (k.to_owned(), i, spk.to_owned()))
|
.map(|(index, spk)| (index, spk.to_owned()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request = request.chain_spks(all_spks.into_iter().map(|(k, spk_i, spk)| {
|
request = request.chain_spks(all_spks.into_iter().map(|((k, spk_i), spk)| {
|
||||||
eprint!("Scanning {}: {}", k, spk_i);
|
eprint!("Scanning {}: {}", k, spk_i);
|
||||||
spk
|
spk
|
||||||
}));
|
}));
|
||||||
@@ -238,10 +238,10 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let unused_spks = graph
|
let unused_spks = graph
|
||||||
.index
|
.index
|
||||||
.unused_spks()
|
.unused_spks()
|
||||||
.map(|(k, i, spk)| (k, i, spk.to_owned()))
|
.map(|(index, spk)| (index, spk.to_owned()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request =
|
request =
|
||||||
request.chain_spks(unused_spks.into_iter().map(move |(k, spk_i, spk)| {
|
request.chain_spks(unused_spks.into_iter().map(move |((k, spk_i), spk)| {
|
||||||
eprint!(
|
eprint!(
|
||||||
"Checking if address {} {}:{} has been used",
|
"Checking if address {} {}:{} has been used",
|
||||||
Address::from_script(&spk, args.network).unwrap(),
|
Address::from_script(&spk, args.network).unwrap(),
|
||||||
@@ -257,7 +257,11 @@ fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(&*chain, chain_tip.block_id(), init_outpoints)
|
.filter_chain_unspents(
|
||||||
|
&*chain,
|
||||||
|
chain_tip.block_id(),
|
||||||
|
init_outpoints.iter().cloned(),
|
||||||
|
)
|
||||||
.map(|(_, utxo)| utxo)
|
.map(|(_, utxo)| utxo)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request = request.chain_outpoints(utxos.into_iter().map(|utxo| {
|
request = request.chain_outpoints(utxos.into_iter().map(|utxo| {
|
||||||
@@ -272,7 +276,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
if unconfirmed {
|
if unconfirmed {
|
||||||
let unconfirmed_txids = graph
|
let unconfirmed_txids = graph
|
||||||
.graph()
|
.graph()
|
||||||
.list_chain_txs(&*chain, chain_tip.block_id())
|
.list_canonical_txs(&*chain, chain_tip.block_id())
|
||||||
.filter(|canonical_tx| !canonical_tx.chain_position.is_confirmed())
|
.filter(|canonical_tx| !canonical_tx.chain_position.is_confirmed())
|
||||||
.map(|canonical_tx| canonical_tx.tx_node.txid)
|
.map(|canonical_tx| canonical_tx.tx_node.txid)
|
||||||
.collect::<Vec<Txid>>();
|
.collect::<Vec<Txid>>();
|
||||||
@@ -312,8 +316,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let res = client
|
let res = client
|
||||||
.sync(request, scan_options.batch_size, false)
|
.sync(request, scan_options.batch_size, false)
|
||||||
.context("scanning the blockchain")?
|
.context("scanning the blockchain")?;
|
||||||
.with_confirmation_height_anchor();
|
|
||||||
|
|
||||||
// drop lock on graph and chain
|
// drop lock on graph and chain
|
||||||
drop((graph, chain));
|
drop((graph, chain));
|
||||||
@@ -335,18 +338,17 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let chain_changeset = chain.apply_update(chain_update)?;
|
let chain_changeset = chain.apply_update(chain_update)?;
|
||||||
|
|
||||||
let mut indexed_tx_graph_changeset =
|
let mut indexed_tx_graph_changeset =
|
||||||
indexed_tx_graph::ChangeSet::<ConfirmationHeightAnchor, _>::default();
|
indexed_tx_graph::ChangeSet::<ConfirmationBlockTime, _>::default();
|
||||||
if let Some(keychain_update) = keychain_update {
|
if let Some(keychain_update) = keychain_update {
|
||||||
let (_, keychain_changeset) = graph.index.reveal_to_target_multi(&keychain_update);
|
let keychain_changeset = graph.index.reveal_to_target_multi(&keychain_update);
|
||||||
indexed_tx_graph_changeset.append(keychain_changeset.into());
|
indexed_tx_graph_changeset.merge(keychain_changeset.into());
|
||||||
}
|
}
|
||||||
indexed_tx_graph_changeset.append(graph.apply_update(graph_update));
|
indexed_tx_graph_changeset.merge(graph.apply_update(graph_update));
|
||||||
|
|
||||||
(chain_changeset, indexed_tx_graph_changeset)
|
(chain_changeset, indexed_tx_graph_changeset)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = db.lock().unwrap();
|
let mut db = db.lock().unwrap();
|
||||||
db.stage(db_changeset);
|
db.append_changeset(&db_changeset)?;
|
||||||
db.commit()?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,10 +7,10 @@ use std::{
|
|||||||
use bdk_chain::{
|
use bdk_chain::{
|
||||||
bitcoin::{constants::genesis_block, Address, Network, Txid},
|
bitcoin::{constants::genesis_block, Address, Network, Txid},
|
||||||
indexed_tx_graph::{self, IndexedTxGraph},
|
indexed_tx_graph::{self, IndexedTxGraph},
|
||||||
keychain,
|
indexer::keychain_txout,
|
||||||
local_chain::{self, LocalChain},
|
local_chain::{self, LocalChain},
|
||||||
spk_client::{FullScanRequest, SyncRequest},
|
spk_client::{FullScanRequest, SyncRequest},
|
||||||
Append, ConfirmationTimeHeightAnchor,
|
ConfirmationBlockTime, Merge,
|
||||||
};
|
};
|
||||||
|
|
||||||
use bdk_esplora::{esplora_client, EsploraExt};
|
use bdk_esplora::{esplora_client, EsploraExt};
|
||||||
@@ -22,11 +22,11 @@ use example_cli::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
const DB_MAGIC: &[u8] = b"bdk_example_esplora";
|
const DB_MAGIC: &[u8] = b"bdk_example_esplora";
|
||||||
const DB_PATH: &str = ".bdk_esplora_example.db";
|
const DB_PATH: &str = "bdk_example_esplora.db";
|
||||||
|
|
||||||
type ChangeSet = (
|
type ChangeSet = (
|
||||||
local_chain::ChangeSet,
|
local_chain::ChangeSet,
|
||||||
indexed_tx_graph::ChangeSet<ConfirmationTimeHeightAnchor, keychain::ChangeSet<Keychain>>,
|
indexed_tx_graph::ChangeSet<ConfirmationBlockTime, keychain_txout::ChangeSet>,
|
||||||
);
|
);
|
||||||
|
|
||||||
#[derive(Subcommand, Debug, Clone)]
|
#[derive(Subcommand, Debug, Clone)]
|
||||||
@@ -84,7 +84,7 @@ impl EsploraArgs {
|
|||||||
Network::Bitcoin => "https://blockstream.info/api",
|
Network::Bitcoin => "https://blockstream.info/api",
|
||||||
Network::Testnet => "https://blockstream.info/testnet/api",
|
Network::Testnet => "https://blockstream.info/testnet/api",
|
||||||
Network::Regtest => "http://localhost:3002",
|
Network::Regtest => "http://localhost:3002",
|
||||||
Network::Signet => "https://mempool.space/signet/api",
|
Network::Signet => "http://signet.bitcoindevkit.net",
|
||||||
_ => panic!("unsupported network"),
|
_ => panic!("unsupported network"),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -96,7 +96,7 @@ impl EsploraArgs {
|
|||||||
#[derive(Parser, Debug, Clone, PartialEq)]
|
#[derive(Parser, Debug, Clone, PartialEq)]
|
||||||
pub struct ScanOptions {
|
pub struct ScanOptions {
|
||||||
/// Max number of concurrent esplora server requests.
|
/// Max number of concurrent esplora server requests.
|
||||||
#[clap(long, default_value = "1")]
|
#[clap(long, default_value = "5")]
|
||||||
pub parallel_requests: usize,
|
pub parallel_requests: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -204,11 +204,11 @@ fn main() -> anyhow::Result<()> {
|
|||||||
// addresses derived so we need to derive up to last active addresses the scan found
|
// addresses derived so we need to derive up to last active addresses the scan found
|
||||||
// before adding the transactions.
|
// before adding the transactions.
|
||||||
(chain.apply_update(update.chain_update)?, {
|
(chain.apply_update(update.chain_update)?, {
|
||||||
let (_, index_changeset) = graph
|
let index_changeset = graph
|
||||||
.index
|
.index
|
||||||
.reveal_to_target_multi(&update.last_active_indices);
|
.reveal_to_target_multi(&update.last_active_indices);
|
||||||
let mut indexed_tx_graph_changeset = graph.apply_update(update.graph_update);
|
let mut indexed_tx_graph_changeset = graph.apply_update(update.graph_update);
|
||||||
indexed_tx_graph_changeset.append(index_changeset.into());
|
indexed_tx_graph_changeset.merge(index_changeset.into());
|
||||||
indexed_tx_graph_changeset
|
indexed_tx_graph_changeset
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -245,7 +245,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let all_spks = graph
|
let all_spks = graph
|
||||||
.index
|
.index
|
||||||
.revealed_spks(..)
|
.revealed_spks(..)
|
||||||
.map(|(k, i, spk)| (k.to_owned(), i, spk.to_owned()))
|
.map(|((k, i), spk)| (k, i, spk.to_owned()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request = request.chain_spks(all_spks.into_iter().map(|(k, i, spk)| {
|
request = request.chain_spks(all_spks.into_iter().map(|(k, i, spk)| {
|
||||||
eprint!("scanning {}:{}", k, i);
|
eprint!("scanning {}:{}", k, i);
|
||||||
@@ -258,10 +258,10 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let unused_spks = graph
|
let unused_spks = graph
|
||||||
.index
|
.index
|
||||||
.unused_spks()
|
.unused_spks()
|
||||||
.map(|(k, i, spk)| (k, i, spk.to_owned()))
|
.map(|(index, spk)| (index, spk.to_owned()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request =
|
request =
|
||||||
request.chain_spks(unused_spks.into_iter().map(move |(k, i, spk)| {
|
request.chain_spks(unused_spks.into_iter().map(move |((k, i), spk)| {
|
||||||
eprint!(
|
eprint!(
|
||||||
"Checking if address {} {}:{} has been used",
|
"Checking if address {} {}:{} has been used",
|
||||||
Address::from_script(&spk, args.network).unwrap(),
|
Address::from_script(&spk, args.network).unwrap(),
|
||||||
@@ -280,7 +280,11 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let init_outpoints = graph.index.outpoints();
|
let init_outpoints = graph.index.outpoints();
|
||||||
let utxos = graph
|
let utxos = graph
|
||||||
.graph()
|
.graph()
|
||||||
.filter_chain_unspents(&*chain, local_tip.block_id(), init_outpoints)
|
.filter_chain_unspents(
|
||||||
|
&*chain,
|
||||||
|
local_tip.block_id(),
|
||||||
|
init_outpoints.iter().cloned(),
|
||||||
|
)
|
||||||
.map(|(_, utxo)| utxo)
|
.map(|(_, utxo)| utxo)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
request = request.chain_outpoints(
|
request = request.chain_outpoints(
|
||||||
@@ -303,7 +307,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
// `EsploraExt::update_tx_graph_without_keychain`.
|
// `EsploraExt::update_tx_graph_without_keychain`.
|
||||||
let unconfirmed_txids = graph
|
let unconfirmed_txids = graph
|
||||||
.graph()
|
.graph()
|
||||||
.list_chain_txs(&*chain, local_tip.block_id())
|
.list_canonical_txs(&*chain, local_tip.block_id())
|
||||||
.filter(|canonical_tx| !canonical_tx.chain_position.is_confirmed())
|
.filter(|canonical_tx| !canonical_tx.chain_position.is_confirmed())
|
||||||
.map(|canonical_tx| canonical_tx.tx_node.txid)
|
.map(|canonical_tx| canonical_tx.tx_node.txid)
|
||||||
.collect::<Vec<Txid>>();
|
.collect::<Vec<Txid>>();
|
||||||
@@ -357,7 +361,6 @@ fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
// We persist the changes
|
// We persist the changes
|
||||||
let mut db = db.lock().unwrap();
|
let mut db = db.lock().unwrap();
|
||||||
db.stage((local_chain_changeset, indexed_tx_graph_changeset));
|
db.append_changeset(&(local_chain_changeset, indexed_tx_graph_changeset))?;
|
||||||
db.commit()?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ version = "0.2.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk = { path = "../../crates/bdk" }
|
bdk_wallet = { path = "../../crates/wallet", features = ["file_store"] }
|
||||||
bdk_electrum = { path = "../../crates/electrum" }
|
bdk_electrum = { path = "../../crates/electrum" }
|
||||||
bdk_file_store = { path = "../../crates/file_store" }
|
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
|
|||||||
@@ -1,42 +1,54 @@
|
|||||||
|
use bdk_wallet::file_store::Store;
|
||||||
|
use bdk_wallet::Wallet;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use bdk_electrum::electrum_client;
|
||||||
|
use bdk_electrum::BdkElectrumClient;
|
||||||
|
use bdk_wallet::bitcoin::Network;
|
||||||
|
use bdk_wallet::bitcoin::{Address, Amount};
|
||||||
|
use bdk_wallet::chain::collections::HashSet;
|
||||||
|
use bdk_wallet::{KeychainKind, SignOptions};
|
||||||
|
|
||||||
const DB_MAGIC: &str = "bdk_wallet_electrum_example";
|
const DB_MAGIC: &str = "bdk_wallet_electrum_example";
|
||||||
const SEND_AMOUNT: Amount = Amount::from_sat(5000);
|
const SEND_AMOUNT: Amount = Amount::from_sat(5000);
|
||||||
const STOP_GAP: usize = 50;
|
const STOP_GAP: usize = 50;
|
||||||
const BATCH_SIZE: usize = 5;
|
const BATCH_SIZE: usize = 5;
|
||||||
|
|
||||||
use std::io::Write;
|
const NETWORK: Network = Network::Testnet;
|
||||||
use std::str::FromStr;
|
const EXTERNAL_DESC: &str = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)";
|
||||||
|
const INTERNAL_DESC: &str = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)";
|
||||||
use bdk::bitcoin::{Address, Amount};
|
const ELECTRUM_URL: &str = "ssl://electrum.blockstream.info:60002";
|
||||||
use bdk::chain::collections::HashSet;
|
|
||||||
use bdk::{bitcoin::Network, Wallet};
|
|
||||||
use bdk::{KeychainKind, SignOptions};
|
|
||||||
use bdk_electrum::{
|
|
||||||
electrum_client::{self, ElectrumApi},
|
|
||||||
ElectrumExt,
|
|
||||||
};
|
|
||||||
use bdk_file_store::Store;
|
|
||||||
|
|
||||||
fn main() -> Result<(), anyhow::Error> {
|
fn main() -> Result<(), anyhow::Error> {
|
||||||
let db_path = std::env::temp_dir().join("bdk-electrum-example");
|
let db_path = "bdk-electrum-example.db";
|
||||||
let db = Store::<bdk::wallet::ChangeSet>::open_or_create_new(DB_MAGIC.as_bytes(), db_path)?;
|
|
||||||
let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)";
|
|
||||||
let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)";
|
|
||||||
|
|
||||||
let mut wallet = Wallet::new_or_load(
|
let mut db = Store::<bdk_wallet::ChangeSet>::open_or_create_new(DB_MAGIC.as_bytes(), db_path)?;
|
||||||
external_descriptor,
|
|
||||||
Some(internal_descriptor),
|
|
||||||
db,
|
|
||||||
Network::Testnet,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let address = wallet.next_unused_address(KeychainKind::External)?;
|
let wallet_opt = Wallet::load()
|
||||||
|
.descriptors(EXTERNAL_DESC, INTERNAL_DESC)
|
||||||
|
.network(NETWORK)
|
||||||
|
.load_wallet(&mut db)?;
|
||||||
|
let mut wallet = match wallet_opt {
|
||||||
|
Some(wallet) => wallet,
|
||||||
|
None => Wallet::create(EXTERNAL_DESC, INTERNAL_DESC)
|
||||||
|
.network(NETWORK)
|
||||||
|
.create_wallet(&mut db)?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let address = wallet.next_unused_address(KeychainKind::External);
|
||||||
|
wallet.persist(&mut db)?;
|
||||||
println!("Generated Address: {}", address);
|
println!("Generated Address: {}", address);
|
||||||
|
|
||||||
let balance = wallet.get_balance();
|
let balance = wallet.balance();
|
||||||
println!("Wallet balance before syncing: {} sats", balance.total());
|
println!("Wallet balance before syncing: {} sats", balance.total());
|
||||||
|
|
||||||
print!("Syncing...");
|
print!("Syncing...");
|
||||||
let client = electrum_client::Client::new("ssl://electrum.blockstream.info:60002")?;
|
let client = BdkElectrumClient::new(electrum_client::Client::new(ELECTRUM_URL)?);
|
||||||
|
|
||||||
|
// Populate the electrum client's transaction cache so it doesn't redownload transaction we
|
||||||
|
// already have.
|
||||||
|
client.populate_tx_cache(wallet.tx_graph());
|
||||||
|
|
||||||
let request = wallet
|
let request = wallet
|
||||||
.start_full_scan()
|
.start_full_scan()
|
||||||
@@ -52,9 +64,7 @@ fn main() -> Result<(), anyhow::Error> {
|
|||||||
})
|
})
|
||||||
.inspect_spks_for_all_keychains(|_, _, _| std::io::stdout().flush().expect("must flush"));
|
.inspect_spks_for_all_keychains(|_, _, _| std::io::stdout().flush().expect("must flush"));
|
||||||
|
|
||||||
let mut update = client
|
let mut update = client.full_scan(request, STOP_GAP, BATCH_SIZE, false)?;
|
||||||
.full_scan(request, STOP_GAP, BATCH_SIZE, false)?
|
|
||||||
.with_confirmation_time_height_anchor(&client)?;
|
|
||||||
|
|
||||||
let now = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs();
|
let now = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs();
|
||||||
let _ = update.graph_update.update_last_seen_unconfirmed(now);
|
let _ = update.graph_update.update_last_seen_unconfirmed(now);
|
||||||
@@ -62,9 +72,9 @@ fn main() -> Result<(), anyhow::Error> {
|
|||||||
println!();
|
println!();
|
||||||
|
|
||||||
wallet.apply_update(update)?;
|
wallet.apply_update(update)?;
|
||||||
wallet.commit()?;
|
wallet.persist(&mut db)?;
|
||||||
|
|
||||||
let balance = wallet.get_balance();
|
let balance = wallet.balance();
|
||||||
println!("Wallet balance after syncing: {} sats", balance.total());
|
println!("Wallet balance after syncing: {} sats", balance.total());
|
||||||
|
|
||||||
if balance.total() < SEND_AMOUNT {
|
if balance.total() < SEND_AMOUNT {
|
||||||
@@ -89,7 +99,7 @@ fn main() -> Result<(), anyhow::Error> {
|
|||||||
|
|
||||||
let tx = psbt.extract_tx()?;
|
let tx = psbt.extract_tx()?;
|
||||||
client.transaction_broadcast(&tx)?;
|
client.transaction_broadcast(&tx)?;
|
||||||
println!("Tx broadcasted! Txid: {}", tx.txid());
|
println!("Tx broadcasted! Txid: {}", tx.compute_txid());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,8 +6,7 @@ edition = "2021"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk = { path = "../../crates/bdk" }
|
bdk_wallet = { path = "../../crates/wallet", features = ["rusqlite"] }
|
||||||
bdk_esplora = { path = "../../crates/esplora", features = ["async-https"] }
|
bdk_esplora = { path = "../../crates/esplora", features = ["async-https"] }
|
||||||
bdk_file_store = { path = "../../crates/file_store" }
|
|
||||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] }
|
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] }
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
|
|||||||
@@ -1,72 +1,58 @@
|
|||||||
use std::{collections::BTreeSet, io::Write, str::FromStr};
|
use std::{collections::BTreeSet, io::Write};
|
||||||
|
|
||||||
use bdk::{
|
use anyhow::Ok;
|
||||||
bitcoin::{Address, Amount, Network, Script},
|
use bdk_esplora::{esplora_client, EsploraAsyncExt};
|
||||||
|
use bdk_wallet::{
|
||||||
|
bitcoin::{Amount, Network},
|
||||||
|
rusqlite::Connection,
|
||||||
KeychainKind, SignOptions, Wallet,
|
KeychainKind, SignOptions, Wallet,
|
||||||
};
|
};
|
||||||
use bdk_esplora::{esplora_client, EsploraAsyncExt};
|
|
||||||
use bdk_file_store::Store;
|
|
||||||
|
|
||||||
const DB_MAGIC: &str = "bdk_wallet_esplora_async_example";
|
|
||||||
const SEND_AMOUNT: Amount = Amount::from_sat(5000);
|
const SEND_AMOUNT: Amount = Amount::from_sat(5000);
|
||||||
const STOP_GAP: usize = 50;
|
const STOP_GAP: usize = 5;
|
||||||
const PARALLEL_REQUESTS: usize = 5;
|
const PARALLEL_REQUESTS: usize = 5;
|
||||||
|
|
||||||
|
const DB_PATH: &str = "bdk-example-esplora-async.sqlite";
|
||||||
|
const NETWORK: Network = Network::Signet;
|
||||||
|
const EXTERNAL_DESC: &str = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)";
|
||||||
|
const INTERNAL_DESC: &str = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)";
|
||||||
|
const ESPLORA_URL: &str = "http://signet.bitcoindevkit.net";
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<(), anyhow::Error> {
|
async fn main() -> Result<(), anyhow::Error> {
|
||||||
let db_path = std::env::temp_dir().join("bdk-esplora-async-example");
|
let mut conn = Connection::open(DB_PATH)?;
|
||||||
let db = Store::<bdk::wallet::ChangeSet>::open_or_create_new(DB_MAGIC.as_bytes(), db_path)?;
|
|
||||||
let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)";
|
|
||||||
let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)";
|
|
||||||
|
|
||||||
let mut wallet = Wallet::new_or_load(
|
let wallet_opt = Wallet::load()
|
||||||
external_descriptor,
|
.descriptors(EXTERNAL_DESC, INTERNAL_DESC)
|
||||||
Some(internal_descriptor),
|
.network(NETWORK)
|
||||||
db,
|
.load_wallet(&mut conn)?;
|
||||||
Network::Testnet,
|
let mut wallet = match wallet_opt {
|
||||||
)?;
|
Some(wallet) => wallet,
|
||||||
|
None => Wallet::create(EXTERNAL_DESC, INTERNAL_DESC)
|
||||||
|
.network(NETWORK)
|
||||||
|
.create_wallet(&mut conn)?,
|
||||||
|
};
|
||||||
|
|
||||||
let address = wallet.next_unused_address(KeychainKind::External)?;
|
let address = wallet.next_unused_address(KeychainKind::External);
|
||||||
println!("Generated Address: {}", address);
|
wallet.persist(&mut conn)?;
|
||||||
|
println!("Next unused address: ({}) {}", address.index, address);
|
||||||
|
|
||||||
let balance = wallet.get_balance();
|
let balance = wallet.balance();
|
||||||
println!("Wallet balance before syncing: {} sats", balance.total());
|
println!("Wallet balance before syncing: {} sats", balance.total());
|
||||||
|
|
||||||
print!("Syncing...");
|
print!("Syncing...");
|
||||||
let client =
|
let client = esplora_client::Builder::new(ESPLORA_URL).build_async()?;
|
||||||
esplora_client::Builder::new("https://blockstream.info/testnet/api").build_async()?;
|
|
||||||
|
|
||||||
fn generate_inspect(kind: KeychainKind) -> impl FnMut(u32, &Script) + Send + Sync + 'static {
|
let request = wallet.start_full_scan().inspect_spks_for_all_keychains({
|
||||||
let mut once = Some(());
|
let mut once = BTreeSet::<KeychainKind>::new();
|
||||||
let mut stdout = std::io::stdout();
|
move |keychain, spk_i, _| {
|
||||||
move |spk_i, _| {
|
if once.insert(keychain) {
|
||||||
match once.take() {
|
print!("\nScanning keychain [{:?}] ", keychain);
|
||||||
Some(_) => print!("\nScanning keychain [{:?}]", kind),
|
|
||||||
None => print!(" {:<3}", spk_i),
|
|
||||||
};
|
|
||||||
stdout.flush().expect("must flush");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let request = wallet
|
|
||||||
.start_full_scan()
|
|
||||||
.inspect_spks_for_all_keychains({
|
|
||||||
let mut once = BTreeSet::<KeychainKind>::new();
|
|
||||||
move |keychain, spk_i, _| {
|
|
||||||
match once.insert(keychain) {
|
|
||||||
true => print!("\nScanning keychain [{:?}]", keychain),
|
|
||||||
false => print!(" {:<3}", spk_i),
|
|
||||||
}
|
|
||||||
std::io::stdout().flush().expect("must flush")
|
|
||||||
}
|
}
|
||||||
})
|
print!(" {:<3}", spk_i);
|
||||||
.inspect_spks_for_keychain(
|
std::io::stdout().flush().expect("must flush")
|
||||||
KeychainKind::External,
|
}
|
||||||
generate_inspect(KeychainKind::External),
|
});
|
||||||
)
|
|
||||||
.inspect_spks_for_keychain(
|
|
||||||
KeychainKind::Internal,
|
|
||||||
generate_inspect(KeychainKind::Internal),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut update = client
|
let mut update = client
|
||||||
.full_scan(request, STOP_GAP, PARALLEL_REQUESTS)
|
.full_scan(request, STOP_GAP, PARALLEL_REQUESTS)
|
||||||
@@ -75,10 +61,10 @@ async fn main() -> Result<(), anyhow::Error> {
|
|||||||
let _ = update.graph_update.update_last_seen_unconfirmed(now);
|
let _ = update.graph_update.update_last_seen_unconfirmed(now);
|
||||||
|
|
||||||
wallet.apply_update(update)?;
|
wallet.apply_update(update)?;
|
||||||
wallet.commit()?;
|
wallet.persist(&mut conn)?;
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
let balance = wallet.get_balance();
|
let balance = wallet.balance();
|
||||||
println!("Wallet balance after syncing: {} sats", balance.total());
|
println!("Wallet balance after syncing: {} sats", balance.total());
|
||||||
|
|
||||||
if balance.total() < SEND_AMOUNT {
|
if balance.total() < SEND_AMOUNT {
|
||||||
@@ -89,12 +75,9 @@ async fn main() -> Result<(), anyhow::Error> {
|
|||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?
|
|
||||||
.require_network(Network::Testnet)?;
|
|
||||||
|
|
||||||
let mut tx_builder = wallet.build_tx();
|
let mut tx_builder = wallet.build_tx();
|
||||||
tx_builder
|
tx_builder
|
||||||
.add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT)
|
.add_recipient(address.script_pubkey(), SEND_AMOUNT)
|
||||||
.enable_rbf();
|
.enable_rbf();
|
||||||
|
|
||||||
let mut psbt = tx_builder.finish()?;
|
let mut psbt = tx_builder.finish()?;
|
||||||
@@ -103,7 +86,7 @@ async fn main() -> Result<(), anyhow::Error> {
|
|||||||
|
|
||||||
let tx = psbt.extract_tx()?;
|
let tx = psbt.extract_tx()?;
|
||||||
client.broadcast(&tx).await?;
|
client.broadcast(&tx).await?;
|
||||||
println!("Tx broadcasted! Txid: {}", tx.txid());
|
println!("Tx broadcasted! Txid: {}", tx.compute_txid());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ publish = false
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk = { path = "../../crates/bdk" }
|
bdk_wallet = { path = "../../crates/wallet", features = ["file_store"] }
|
||||||
bdk_esplora = { path = "../../crates/esplora", features = ["blocking"] }
|
bdk_esplora = { path = "../../crates/esplora", features = ["blocking"] }
|
||||||
bdk_file_store = { path = "../../crates/file_store" }
|
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
|
|||||||
@@ -1,47 +1,57 @@
|
|||||||
const DB_MAGIC: &str = "bdk_wallet_esplora_example";
|
use std::{collections::BTreeSet, io::Write};
|
||||||
const SEND_AMOUNT: Amount = Amount::from_sat(1000);
|
|
||||||
const STOP_GAP: usize = 5;
|
|
||||||
const PARALLEL_REQUESTS: usize = 1;
|
|
||||||
|
|
||||||
use std::{collections::BTreeSet, io::Write, str::FromStr};
|
use bdk_esplora::{esplora_client, EsploraExt};
|
||||||
|
use bdk_wallet::{
|
||||||
use bdk::{
|
bitcoin::{Amount, Network},
|
||||||
bitcoin::{Address, Amount, Network},
|
file_store::Store,
|
||||||
KeychainKind, SignOptions, Wallet,
|
KeychainKind, SignOptions, Wallet,
|
||||||
};
|
};
|
||||||
use bdk_esplora::{esplora_client, EsploraExt};
|
|
||||||
use bdk_file_store::Store;
|
const DB_MAGIC: &str = "bdk_wallet_esplora_example";
|
||||||
|
const DB_PATH: &str = "bdk-example-esplora-blocking.db";
|
||||||
|
const SEND_AMOUNT: Amount = Amount::from_sat(5000);
|
||||||
|
const STOP_GAP: usize = 5;
|
||||||
|
const PARALLEL_REQUESTS: usize = 5;
|
||||||
|
|
||||||
|
const NETWORK: Network = Network::Signet;
|
||||||
|
const EXTERNAL_DESC: &str = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)";
|
||||||
|
const INTERNAL_DESC: &str = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)";
|
||||||
|
const ESPLORA_URL: &str = "http://signet.bitcoindevkit.net";
|
||||||
|
|
||||||
fn main() -> Result<(), anyhow::Error> {
|
fn main() -> Result<(), anyhow::Error> {
|
||||||
let db_path = std::env::temp_dir().join("bdk-esplora-example");
|
let mut db = Store::<bdk_wallet::ChangeSet>::open_or_create_new(DB_MAGIC.as_bytes(), DB_PATH)?;
|
||||||
let db = Store::<bdk::wallet::ChangeSet>::open_or_create_new(DB_MAGIC.as_bytes(), db_path)?;
|
|
||||||
let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/0/*)";
|
|
||||||
let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/1'/0'/1/*)";
|
|
||||||
|
|
||||||
let mut wallet = Wallet::new_or_load(
|
let wallet_opt = Wallet::load()
|
||||||
external_descriptor,
|
.descriptors(EXTERNAL_DESC, INTERNAL_DESC)
|
||||||
Some(internal_descriptor),
|
.network(NETWORK)
|
||||||
db,
|
.load_wallet(&mut db)?;
|
||||||
Network::Testnet,
|
let mut wallet = match wallet_opt {
|
||||||
)?;
|
Some(wallet) => wallet,
|
||||||
|
None => Wallet::create(EXTERNAL_DESC, INTERNAL_DESC)
|
||||||
|
.network(NETWORK)
|
||||||
|
.create_wallet(&mut db)?,
|
||||||
|
};
|
||||||
|
|
||||||
let address = wallet.next_unused_address(KeychainKind::External)?;
|
let address = wallet.next_unused_address(KeychainKind::External);
|
||||||
println!("Generated Address: {}", address);
|
wallet.persist(&mut db)?;
|
||||||
|
println!(
|
||||||
|
"Next unused address: ({}) {}",
|
||||||
|
address.index, address.address
|
||||||
|
);
|
||||||
|
|
||||||
let balance = wallet.get_balance();
|
let balance = wallet.balance();
|
||||||
println!("Wallet balance before syncing: {} sats", balance.total());
|
println!("Wallet balance before syncing: {} sats", balance.total());
|
||||||
|
|
||||||
print!("Syncing...");
|
print!("Syncing...");
|
||||||
let client =
|
let client = esplora_client::Builder::new(ESPLORA_URL).build_blocking();
|
||||||
esplora_client::Builder::new("https://blockstream.info/testnet/api").build_blocking();
|
|
||||||
|
|
||||||
let request = wallet.start_full_scan().inspect_spks_for_all_keychains({
|
let request = wallet.start_full_scan().inspect_spks_for_all_keychains({
|
||||||
let mut once = BTreeSet::<KeychainKind>::new();
|
let mut once = BTreeSet::<KeychainKind>::new();
|
||||||
move |keychain, spk_i, _| {
|
move |keychain, spk_i, _| {
|
||||||
match once.insert(keychain) {
|
if once.insert(keychain) {
|
||||||
true => print!("\nScanning keychain [{:?}]", keychain),
|
print!("\nScanning keychain [{:?}] ", keychain);
|
||||||
false => print!(" {:<3}", spk_i),
|
}
|
||||||
};
|
print!(" {:<3}", spk_i);
|
||||||
std::io::stdout().flush().expect("must flush")
|
std::io::stdout().flush().expect("must flush")
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -51,10 +61,12 @@ fn main() -> Result<(), anyhow::Error> {
|
|||||||
let _ = update.graph_update.update_last_seen_unconfirmed(now);
|
let _ = update.graph_update.update_last_seen_unconfirmed(now);
|
||||||
|
|
||||||
wallet.apply_update(update)?;
|
wallet.apply_update(update)?;
|
||||||
wallet.commit()?;
|
if let Some(changeset) = wallet.take_staged() {
|
||||||
|
db.append_changeset(&changeset)?;
|
||||||
|
}
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
let balance = wallet.get_balance();
|
let balance = wallet.balance();
|
||||||
println!("Wallet balance after syncing: {} sats", balance.total());
|
println!("Wallet balance after syncing: {} sats", balance.total());
|
||||||
|
|
||||||
if balance.total() < SEND_AMOUNT {
|
if balance.total() < SEND_AMOUNT {
|
||||||
@@ -65,12 +77,9 @@ fn main() -> Result<(), anyhow::Error> {
|
|||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?
|
|
||||||
.require_network(Network::Testnet)?;
|
|
||||||
|
|
||||||
let mut tx_builder = wallet.build_tx();
|
let mut tx_builder = wallet.build_tx();
|
||||||
tx_builder
|
tx_builder
|
||||||
.add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT)
|
.add_recipient(address.script_pubkey(), SEND_AMOUNT)
|
||||||
.enable_rbf();
|
.enable_rbf();
|
||||||
|
|
||||||
let mut psbt = tx_builder.finish()?;
|
let mut psbt = tx_builder.finish()?;
|
||||||
@@ -79,7 +88,7 @@ fn main() -> Result<(), anyhow::Error> {
|
|||||||
|
|
||||||
let tx = psbt.extract_tx()?;
|
let tx = psbt.extract_tx()?;
|
||||||
client.broadcast(&tx)?;
|
client.broadcast(&tx)?;
|
||||||
println!("Tx broadcasted! Txid: {}", tx.txid());
|
println!("Tx broadcasted! Txid: {}", tx.compute_txid());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,8 +6,7 @@ edition = "2021"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bdk = { path = "../../crates/bdk" }
|
bdk_wallet = { path = "../../crates/wallet", features = ["file_store"] }
|
||||||
bdk_file_store = { path = "../../crates/file_store" }
|
|
||||||
bdk_bitcoind_rpc = { path = "../../crates/bitcoind_rpc" }
|
bdk_bitcoind_rpc = { path = "../../crates/bitcoind_rpc" }
|
||||||
|
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
$ cargo run --bin wallet_rpc -- --help
|
$ cargo run --bin wallet_rpc -- --help
|
||||||
|
|
||||||
wallet_rpc 0.1.0
|
wallet_rpc 0.1.0
|
||||||
Bitcoind RPC example using `bdk::Wallet`
|
Bitcoind RPC example using `bdk_wallet::Wallet`
|
||||||
|
|
||||||
USAGE:
|
USAGE:
|
||||||
wallet_rpc [OPTIONS] <DESCRIPTOR> [CHANGE_DESCRIPTOR]
|
wallet_rpc [OPTIONS] <DESCRIPTOR> [CHANGE_DESCRIPTOR]
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
use bdk::{
|
|
||||||
bitcoin::{Block, Network, Transaction},
|
|
||||||
wallet::Wallet,
|
|
||||||
};
|
|
||||||
use bdk_bitcoind_rpc::{
|
use bdk_bitcoind_rpc::{
|
||||||
bitcoincore_rpc::{Auth, Client, RpcApi},
|
bitcoincore_rpc::{Auth, Client, RpcApi},
|
||||||
Emitter,
|
Emitter,
|
||||||
};
|
};
|
||||||
use bdk_file_store::Store;
|
use bdk_wallet::{
|
||||||
|
bitcoin::{Block, Network, Transaction},
|
||||||
|
file_store::Store,
|
||||||
|
Wallet,
|
||||||
|
};
|
||||||
use clap::{self, Parser};
|
use clap::{self, Parser};
|
||||||
use std::{path::PathBuf, sync::mpsc::sync_channel, thread::spawn, time::Instant};
|
use std::{path::PathBuf, sync::mpsc::sync_channel, thread::spawn, time::Instant};
|
||||||
|
|
||||||
const DB_MAGIC: &str = "bdk-rpc-wallet-example";
|
const DB_MAGIC: &str = "bdk-rpc-wallet-example";
|
||||||
|
|
||||||
/// Bitcoind RPC example using `bdk::Wallet`.
|
/// Bitcoind RPC example using `bdk_wallet::Wallet`.
|
||||||
///
|
///
|
||||||
/// This syncs the chain block-by-block and prints the current balance, transaction count and UTXO
|
/// This syncs the chain block-by-block and prints the current balance, transaction count and UTXO
|
||||||
/// count.
|
/// count.
|
||||||
@@ -25,7 +25,7 @@ pub struct Args {
|
|||||||
pub descriptor: String,
|
pub descriptor: String,
|
||||||
/// Wallet change descriptor
|
/// Wallet change descriptor
|
||||||
#[clap(env = "CHANGE_DESCRIPTOR")]
|
#[clap(env = "CHANGE_DESCRIPTOR")]
|
||||||
pub change_descriptor: Option<String>,
|
pub change_descriptor: String,
|
||||||
/// Earliest block height to start sync from
|
/// Earliest block height to start sync from
|
||||||
#[clap(env = "START_HEIGHT", long, default_value = "481824")]
|
#[clap(env = "START_HEIGHT", long, default_value = "481824")]
|
||||||
pub start_height: u32,
|
pub start_height: u32,
|
||||||
@@ -86,18 +86,24 @@ fn main() -> anyhow::Result<()> {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let start_load_wallet = Instant::now();
|
let start_load_wallet = Instant::now();
|
||||||
let mut wallet = Wallet::new_or_load(
|
let mut db =
|
||||||
&args.descriptor,
|
Store::<bdk_wallet::ChangeSet>::open_or_create_new(DB_MAGIC.as_bytes(), args.db_path)?;
|
||||||
args.change_descriptor.as_ref(),
|
let wallet_opt = Wallet::load()
|
||||||
Store::<bdk::wallet::ChangeSet>::open_or_create_new(DB_MAGIC.as_bytes(), args.db_path)?,
|
.descriptors(args.descriptor.clone(), args.change_descriptor.clone())
|
||||||
args.network,
|
.network(args.network)
|
||||||
)?;
|
.load_wallet(&mut db)?;
|
||||||
|
let mut wallet = match wallet_opt {
|
||||||
|
Some(wallet) => wallet,
|
||||||
|
None => Wallet::create(args.descriptor, args.change_descriptor)
|
||||||
|
.network(args.network)
|
||||||
|
.create_wallet(&mut db)?,
|
||||||
|
};
|
||||||
println!(
|
println!(
|
||||||
"Loaded wallet in {}s",
|
"Loaded wallet in {}s",
|
||||||
start_load_wallet.elapsed().as_secs_f32()
|
start_load_wallet.elapsed().as_secs_f32()
|
||||||
);
|
);
|
||||||
|
|
||||||
let balance = wallet.get_balance();
|
let balance = wallet.balance();
|
||||||
println!("Wallet balance before syncing: {} sats", balance.total());
|
println!("Wallet balance before syncing: {} sats", balance.total());
|
||||||
|
|
||||||
let wallet_tip = wallet.latest_checkpoint();
|
let wallet_tip = wallet.latest_checkpoint();
|
||||||
@@ -140,7 +146,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let connected_to = block_emission.connected_to();
|
let connected_to = block_emission.connected_to();
|
||||||
let start_apply_block = Instant::now();
|
let start_apply_block = Instant::now();
|
||||||
wallet.apply_block_connected_to(&block_emission.block, height, connected_to)?;
|
wallet.apply_block_connected_to(&block_emission.block, height, connected_to)?;
|
||||||
wallet.commit()?;
|
wallet.persist(&mut db)?;
|
||||||
let elapsed = start_apply_block.elapsed().as_secs_f32();
|
let elapsed = start_apply_block.elapsed().as_secs_f32();
|
||||||
println!(
|
println!(
|
||||||
"Applied block {} at height {} in {}s",
|
"Applied block {} at height {} in {}s",
|
||||||
@@ -150,7 +156,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
Emission::Mempool(mempool_emission) => {
|
Emission::Mempool(mempool_emission) => {
|
||||||
let start_apply_mempool = Instant::now();
|
let start_apply_mempool = Instant::now();
|
||||||
wallet.apply_unconfirmed_txs(mempool_emission.iter().map(|(tx, time)| (tx, *time)));
|
wallet.apply_unconfirmed_txs(mempool_emission.iter().map(|(tx, time)| (tx, *time)));
|
||||||
wallet.commit()?;
|
wallet.persist(&mut db)?;
|
||||||
println!(
|
println!(
|
||||||
"Applied unconfirmed transactions in {}s",
|
"Applied unconfirmed transactions in {}s",
|
||||||
start_apply_mempool.elapsed().as_secs_f32()
|
start_apply_mempool.elapsed().as_secs_f32()
|
||||||
@@ -160,7 +166,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let wallet_tip_end = wallet.latest_checkpoint();
|
let wallet_tip_end = wallet.latest_checkpoint();
|
||||||
let balance = wallet.get_balance();
|
let balance = wallet.balance();
|
||||||
println!(
|
println!(
|
||||||
"Synced {} blocks in {}s",
|
"Synced {} blocks in {}s",
|
||||||
blocks_received,
|
blocks_received,
|
||||||
|
|||||||
@@ -305,341 +305,341 @@ where
|
|||||||
}?
|
}?
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(all(test, feature = "miniscript"))]
|
// #[cfg(all(test, feature = "miniscript"))]
|
||||||
mod test {
|
// mod test {
|
||||||
use bitcoin::secp256k1::Secp256k1;
|
// use bitcoin::secp256k1::Secp256k1;
|
||||||
|
//
|
||||||
use crate::coin_select::{evaluate_cs::evaluate, ExcessStrategyKind};
|
// use crate::coin_select::{evaluate_cs::evaluate, ExcessStrategyKind};
|
||||||
|
//
|
||||||
use super::{
|
// use super::{
|
||||||
coin_select_bnb,
|
// coin_select_bnb,
|
||||||
evaluate_cs::{Evaluation, EvaluationError},
|
// evaluate_cs::{Evaluation, EvaluationError},
|
||||||
tester::Tester,
|
// tester::Tester,
|
||||||
CoinSelector, CoinSelectorOpt, Vec, WeightedValue,
|
// CoinSelector, CoinSelectorOpt, Vec, WeightedValue,
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
fn tester() -> Tester {
|
// fn tester() -> Tester {
|
||||||
const DESC_STR: &str = "tr(xprv9uBuvtdjghkz8D1qzsSXS9Vs64mqrUnXqzNccj2xcvnCHPpXKYE1U2Gbh9CDHk8UPyF2VuXpVkDA7fk5ZP4Hd9KnhUmTscKmhee9Dp5sBMK)";
|
// const DESC_STR: &str = "tr(xprv9uBuvtdjghkz8D1qzsSXS9Vs64mqrUnXqzNccj2xcvnCHPpXKYE1U2Gbh9CDHk8UPyF2VuXpVkDA7fk5ZP4Hd9KnhUmTscKmhee9Dp5sBMK)";
|
||||||
Tester::new(&Secp256k1::default(), DESC_STR)
|
// Tester::new(&Secp256k1::default(), DESC_STR)
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
fn evaluate_bnb(
|
// fn evaluate_bnb(
|
||||||
initial_selector: CoinSelector,
|
// initial_selector: CoinSelector,
|
||||||
max_tries: usize,
|
// max_tries: usize,
|
||||||
) -> Result<Evaluation, EvaluationError> {
|
// ) -> Result<Evaluation, EvaluationError> {
|
||||||
evaluate(initial_selector, |cs| {
|
// evaluate(initial_selector, |cs| {
|
||||||
coin_select_bnb(max_tries, cs.clone()).map_or(false, |new_cs| {
|
// coin_select_bnb(max_tries, cs.clone()).map_or(false, |new_cs| {
|
||||||
*cs = new_cs;
|
// *cs = new_cs;
|
||||||
true
|
// true
|
||||||
})
|
// })
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
#[test]
|
// #[test]
|
||||||
fn not_enough_coins() {
|
// fn not_enough_coins() {
|
||||||
let t = tester();
|
// let t = tester();
|
||||||
let candidates: Vec<WeightedValue> = vec![
|
// let candidates: Vec<WeightedValue> = vec![
|
||||||
t.gen_candidate(0, 100_000).into(),
|
// t.gen_candidate(0, 100_000).into(),
|
||||||
t.gen_candidate(1, 100_000).into(),
|
// t.gen_candidate(1, 100_000).into(),
|
||||||
];
|
// ];
|
||||||
let opts = t.gen_opts(200_000);
|
// let opts = t.gen_opts(200_000);
|
||||||
let selector = CoinSelector::new(&candidates, &opts);
|
// let selector = CoinSelector::new(&candidates, &opts);
|
||||||
assert!(!coin_select_bnb(10_000, selector).is_some());
|
// assert!(!coin_select_bnb(10_000, selector).is_some());
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
#[test]
|
// #[test]
|
||||||
fn exactly_enough_coins_preselected() {
|
// fn exactly_enough_coins_preselected() {
|
||||||
let t = tester();
|
// let t = tester();
|
||||||
let candidates: Vec<WeightedValue> = vec![
|
// let candidates: Vec<WeightedValue> = vec![
|
||||||
t.gen_candidate(0, 100_000).into(), // to preselect
|
// t.gen_candidate(0, 100_000).into(), // to preselect
|
||||||
t.gen_candidate(1, 100_000).into(), // to preselect
|
// t.gen_candidate(1, 100_000).into(), // to preselect
|
||||||
t.gen_candidate(2, 100_000).into(),
|
// t.gen_candidate(2, 100_000).into(),
|
||||||
];
|
// ];
|
||||||
let opts = CoinSelectorOpt {
|
// let opts = CoinSelectorOpt {
|
||||||
target_feerate: 0.0,
|
// target_feerate: 0.0,
|
||||||
..t.gen_opts(200_000)
|
// ..t.gen_opts(200_000)
|
||||||
};
|
// };
|
||||||
let selector = {
|
// let selector = {
|
||||||
let mut selector = CoinSelector::new(&candidates, &opts);
|
// let mut selector = CoinSelector::new(&candidates, &opts);
|
||||||
selector.select(0); // preselect
|
// selector.select(0); // preselect
|
||||||
selector.select(1); // preselect
|
// selector.select(1); // preselect
|
||||||
selector
|
// selector
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let evaluation = evaluate_bnb(selector, 10_000).expect("eval failed");
|
// let evaluation = evaluate_bnb(selector, 10_000).expect("eval failed");
|
||||||
println!("{}", evaluation);
|
// println!("{}", evaluation);
|
||||||
assert_eq!(evaluation.solution.selected, (0..=1).collect());
|
// assert_eq!(evaluation.solution.selected, (0..=1).collect());
|
||||||
assert_eq!(evaluation.solution.excess_strategies.len(), 1);
|
// assert_eq!(evaluation.solution.excess_strategies.len(), 1);
|
||||||
assert_eq!(
|
// assert_eq!(
|
||||||
evaluation.feerate_offset(ExcessStrategyKind::ToFee).floor(),
|
// evaluation.feerate_offset(ExcessStrategyKind::ToFee).floor(),
|
||||||
0.0
|
// 0.0
|
||||||
);
|
// );
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
/// `cost_of_change` acts as the upper-bound in Bnb; we check whether these boundaries are
|
// /// `cost_of_change` acts as the upper-bound in Bnb; we check whether these boundaries are
|
||||||
/// enforced in code
|
// /// enforced in code
|
||||||
#[test]
|
// #[test]
|
||||||
fn cost_of_change() {
|
// fn cost_of_change() {
|
||||||
let t = tester();
|
// let t = tester();
|
||||||
let candidates: Vec<WeightedValue> = vec![
|
// let candidates: Vec<WeightedValue> = vec![
|
||||||
t.gen_candidate(0, 200_000).into(),
|
// t.gen_candidate(0, 200_000).into(),
|
||||||
t.gen_candidate(1, 200_000).into(),
|
// t.gen_candidate(1, 200_000).into(),
|
||||||
t.gen_candidate(2, 200_000).into(),
|
// t.gen_candidate(2, 200_000).into(),
|
||||||
];
|
// ];
|
||||||
|
//
|
||||||
// lowest and highest possible `recipient_value` opts for derived `drain_waste`, assuming
|
// // lowest and highest possible `recipient_value` opts for derived `drain_waste`, assuming
|
||||||
// that we want 2 candidates selected
|
// // that we want 2 candidates selected
|
||||||
let (lowest_opts, highest_opts) = {
|
// let (lowest_opts, highest_opts) = {
|
||||||
let opts = t.gen_opts(0);
|
// let opts = t.gen_opts(0);
|
||||||
|
//
|
||||||
let fee_from_inputs =
|
// let fee_from_inputs =
|
||||||
(candidates[0].weight as f32 * opts.target_feerate).ceil() as u64 * 2;
|
// (candidates[0].weight as f32 * opts.target_feerate).ceil() as u64 * 2;
|
||||||
let fee_from_template =
|
// let fee_from_template =
|
||||||
((opts.base_weight + 2) as f32 * opts.target_feerate).ceil() as u64;
|
// ((opts.base_weight + 2) as f32 * opts.target_feerate).ceil() as u64;
|
||||||
|
//
|
||||||
let lowest_opts = CoinSelectorOpt {
|
// let lowest_opts = CoinSelectorOpt {
|
||||||
target_value: Some(
|
// target_value: Some(
|
||||||
400_000 - fee_from_inputs - fee_from_template - opts.drain_waste() as u64,
|
// 400_000 - fee_from_inputs - fee_from_template - opts.drain_waste() as u64,
|
||||||
),
|
// ),
|
||||||
..opts
|
// ..opts
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let highest_opts = CoinSelectorOpt {
|
// let highest_opts = CoinSelectorOpt {
|
||||||
target_value: Some(400_000 - fee_from_inputs - fee_from_template),
|
// target_value: Some(400_000 - fee_from_inputs - fee_from_template),
|
||||||
..opts
|
// ..opts
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
(lowest_opts, highest_opts)
|
// (lowest_opts, highest_opts)
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
// test lowest possible target we can select
|
// // test lowest possible target we can select
|
||||||
let lowest_eval = evaluate_bnb(CoinSelector::new(&candidates, &lowest_opts), 10_000);
|
// let lowest_eval = evaluate_bnb(CoinSelector::new(&candidates, &lowest_opts), 10_000);
|
||||||
assert!(lowest_eval.is_ok());
|
// assert!(lowest_eval.is_ok());
|
||||||
let lowest_eval = lowest_eval.unwrap();
|
// let lowest_eval = lowest_eval.unwrap();
|
||||||
println!("LB {}", lowest_eval);
|
// println!("LB {}", lowest_eval);
|
||||||
assert_eq!(lowest_eval.solution.selected.len(), 2);
|
// assert_eq!(lowest_eval.solution.selected.len(), 2);
|
||||||
assert_eq!(lowest_eval.solution.excess_strategies.len(), 1);
|
// assert_eq!(lowest_eval.solution.excess_strategies.len(), 1);
|
||||||
assert_eq!(
|
// assert_eq!(
|
||||||
lowest_eval
|
// lowest_eval
|
||||||
.feerate_offset(ExcessStrategyKind::ToFee)
|
// .feerate_offset(ExcessStrategyKind::ToFee)
|
||||||
.floor(),
|
// .floor(),
|
||||||
0.0
|
// 0.0
|
||||||
);
|
// );
|
||||||
|
//
|
||||||
// test the highest possible target we can select
|
// // test the highest possible target we can select
|
||||||
let highest_eval = evaluate_bnb(CoinSelector::new(&candidates, &highest_opts), 10_000);
|
// let highest_eval = evaluate_bnb(CoinSelector::new(&candidates, &highest_opts), 10_000);
|
||||||
assert!(highest_eval.is_ok());
|
// assert!(highest_eval.is_ok());
|
||||||
let highest_eval = highest_eval.unwrap();
|
// let highest_eval = highest_eval.unwrap();
|
||||||
println!("UB {}", highest_eval);
|
// println!("UB {}", highest_eval);
|
||||||
assert_eq!(highest_eval.solution.selected.len(), 2);
|
// assert_eq!(highest_eval.solution.selected.len(), 2);
|
||||||
assert_eq!(highest_eval.solution.excess_strategies.len(), 1);
|
// assert_eq!(highest_eval.solution.excess_strategies.len(), 1);
|
||||||
assert_eq!(
|
// assert_eq!(
|
||||||
highest_eval
|
// highest_eval
|
||||||
.feerate_offset(ExcessStrategyKind::ToFee)
|
// .feerate_offset(ExcessStrategyKind::ToFee)
|
||||||
.floor(),
|
// .floor(),
|
||||||
0.0
|
// 0.0
|
||||||
);
|
// );
|
||||||
|
//
|
||||||
// test lower out of bounds
|
// // test lower out of bounds
|
||||||
let loob_opts = CoinSelectorOpt {
|
// let loob_opts = CoinSelectorOpt {
|
||||||
target_value: lowest_opts.target_value.map(|v| v - 1),
|
// target_value: lowest_opts.target_value.map(|v| v - 1),
|
||||||
..lowest_opts
|
// ..lowest_opts
|
||||||
};
|
// };
|
||||||
let loob_eval = evaluate_bnb(CoinSelector::new(&candidates, &loob_opts), 10_000);
|
// let loob_eval = evaluate_bnb(CoinSelector::new(&candidates, &loob_opts), 10_000);
|
||||||
assert!(loob_eval.is_err());
|
// assert!(loob_eval.is_err());
|
||||||
println!("Lower OOB: {}", loob_eval.unwrap_err());
|
// println!("Lower OOB: {}", loob_eval.unwrap_err());
|
||||||
|
//
|
||||||
// test upper out of bounds
|
// // test upper out of bounds
|
||||||
let uoob_opts = CoinSelectorOpt {
|
// let uoob_opts = CoinSelectorOpt {
|
||||||
target_value: highest_opts.target_value.map(|v| v + 1),
|
// target_value: highest_opts.target_value.map(|v| v + 1),
|
||||||
..highest_opts
|
// ..highest_opts
|
||||||
};
|
// };
|
||||||
let uoob_eval = evaluate_bnb(CoinSelector::new(&candidates, &uoob_opts), 10_000);
|
// let uoob_eval = evaluate_bnb(CoinSelector::new(&candidates, &uoob_opts), 10_000);
|
||||||
assert!(uoob_eval.is_err());
|
// assert!(uoob_eval.is_err());
|
||||||
println!("Upper OOB: {}", uoob_eval.unwrap_err());
|
// println!("Upper OOB: {}", uoob_eval.unwrap_err());
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
#[test]
|
// #[test]
|
||||||
fn try_select() {
|
// fn try_select() {
|
||||||
let t = tester();
|
// let t = tester();
|
||||||
let candidates: Vec<WeightedValue> = vec![
|
// let candidates: Vec<WeightedValue> = vec![
|
||||||
t.gen_candidate(0, 300_000).into(),
|
// t.gen_candidate(0, 300_000).into(),
|
||||||
t.gen_candidate(1, 300_000).into(),
|
// t.gen_candidate(1, 300_000).into(),
|
||||||
t.gen_candidate(2, 300_000).into(),
|
// t.gen_candidate(2, 300_000).into(),
|
||||||
t.gen_candidate(3, 200_000).into(),
|
// t.gen_candidate(3, 200_000).into(),
|
||||||
t.gen_candidate(4, 200_000).into(),
|
// t.gen_candidate(4, 200_000).into(),
|
||||||
];
|
// ];
|
||||||
let make_opts = |v: u64| -> CoinSelectorOpt {
|
// let make_opts = |v: u64| -> CoinSelectorOpt {
|
||||||
CoinSelectorOpt {
|
// CoinSelectorOpt {
|
||||||
target_feerate: 0.0,
|
// target_feerate: 0.0,
|
||||||
..t.gen_opts(v)
|
// ..t.gen_opts(v)
|
||||||
}
|
// }
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let test_cases = vec![
|
// let test_cases = vec![
|
||||||
(make_opts(100_000), false, 0),
|
// (make_opts(100_000), false, 0),
|
||||||
(make_opts(200_000), true, 1),
|
// (make_opts(200_000), true, 1),
|
||||||
(make_opts(300_000), true, 1),
|
// (make_opts(300_000), true, 1),
|
||||||
(make_opts(500_000), true, 2),
|
// (make_opts(500_000), true, 2),
|
||||||
(make_opts(1_000_000), true, 4),
|
// (make_opts(1_000_000), true, 4),
|
||||||
(make_opts(1_200_000), false, 0),
|
// (make_opts(1_200_000), false, 0),
|
||||||
(make_opts(1_300_000), true, 5),
|
// (make_opts(1_300_000), true, 5),
|
||||||
(make_opts(1_400_000), false, 0),
|
// (make_opts(1_400_000), false, 0),
|
||||||
];
|
// ];
|
||||||
|
//
|
||||||
for (opts, expect_solution, expect_selected) in test_cases {
|
// for (opts, expect_solution, expect_selected) in test_cases {
|
||||||
let res = evaluate_bnb(CoinSelector::new(&candidates, &opts), 10_000);
|
// let res = evaluate_bnb(CoinSelector::new(&candidates, &opts), 10_000);
|
||||||
assert_eq!(res.is_ok(), expect_solution);
|
// assert_eq!(res.is_ok(), expect_solution);
|
||||||
|
//
|
||||||
match res {
|
// match res {
|
||||||
Ok(eval) => {
|
// Ok(eval) => {
|
||||||
println!("{}", eval);
|
// println!("{}", eval);
|
||||||
assert_eq!(eval.feerate_offset(ExcessStrategyKind::ToFee), 0.0);
|
// assert_eq!(eval.feerate_offset(ExcessStrategyKind::ToFee), 0.0);
|
||||||
assert_eq!(eval.solution.selected.len(), expect_selected as _);
|
// assert_eq!(eval.solution.selected.len(), expect_selected as _);
|
||||||
}
|
// }
|
||||||
Err(err) => println!("expected failure: {}", err),
|
// Err(err) => println!("expected failure: {}", err),
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
#[test]
|
// #[test]
|
||||||
fn early_bailout_optimization() {
|
// fn early_bailout_optimization() {
|
||||||
let t = tester();
|
// let t = tester();
|
||||||
|
//
|
||||||
// target: 300_000
|
// // target: 300_000
|
||||||
// candidates: 2x of 125_000, 1000x of 100_000, 1x of 50_000
|
// // candidates: 2x of 125_000, 1000x of 100_000, 1x of 50_000
|
||||||
// expected solution: 2x 125_000, 1x 50_000
|
// // expected solution: 2x 125_000, 1x 50_000
|
||||||
// set bnb max tries: 1100, should succeed
|
// // set bnb max tries: 1100, should succeed
|
||||||
let candidates = {
|
// let candidates = {
|
||||||
let mut candidates: Vec<WeightedValue> = vec![
|
// let mut candidates: Vec<WeightedValue> = vec![
|
||||||
t.gen_candidate(0, 125_000).into(),
|
// t.gen_candidate(0, 125_000).into(),
|
||||||
t.gen_candidate(1, 125_000).into(),
|
// t.gen_candidate(1, 125_000).into(),
|
||||||
t.gen_candidate(2, 50_000).into(),
|
// t.gen_candidate(2, 50_000).into(),
|
||||||
];
|
// ];
|
||||||
(3..3 + 1000_u32)
|
// (3..3 + 1000_u32)
|
||||||
.for_each(|index| candidates.push(t.gen_candidate(index, 100_000).into()));
|
// .for_each(|index| candidates.push(t.gen_candidate(index, 100_000).into()));
|
||||||
candidates
|
// candidates
|
||||||
};
|
// };
|
||||||
let opts = CoinSelectorOpt {
|
// let opts = CoinSelectorOpt {
|
||||||
target_feerate: 0.0,
|
// target_feerate: 0.0,
|
||||||
..t.gen_opts(300_000)
|
// ..t.gen_opts(300_000)
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 1100);
|
// let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 1100);
|
||||||
assert!(result.is_ok());
|
// assert!(result.is_ok());
|
||||||
|
//
|
||||||
let eval = result.unwrap();
|
// let eval = result.unwrap();
|
||||||
println!("{}", eval);
|
// println!("{}", eval);
|
||||||
assert_eq!(eval.solution.selected, (0..=2).collect());
|
// assert_eq!(eval.solution.selected, (0..=2).collect());
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
#[test]
|
// #[test]
|
||||||
fn should_exhaust_iteration() {
|
// fn should_exhaust_iteration() {
|
||||||
static MAX_TRIES: usize = 1000;
|
// static MAX_TRIES: usize = 1000;
|
||||||
let t = tester();
|
// let t = tester();
|
||||||
let candidates = (0..MAX_TRIES + 1)
|
// let candidates = (0..MAX_TRIES + 1)
|
||||||
.map(|index| t.gen_candidate(index as _, 10_000).into())
|
// .map(|index| t.gen_candidate(index as _, 10_000).into())
|
||||||
.collect::<Vec<WeightedValue>>();
|
// .collect::<Vec<WeightedValue>>();
|
||||||
let opts = t.gen_opts(10_001 * MAX_TRIES as u64);
|
// let opts = t.gen_opts(10_001 * MAX_TRIES as u64);
|
||||||
let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), MAX_TRIES);
|
// let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), MAX_TRIES);
|
||||||
assert!(result.is_err());
|
// assert!(result.is_err());
|
||||||
println!("error as expected: {}", result.unwrap_err());
|
// println!("error as expected: {}", result.unwrap_err());
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
/// Solution should have fee >= min_absolute_fee (or no solution at all)
|
// /// Solution should have fee >= min_absolute_fee (or no solution at all)
|
||||||
#[test]
|
// #[test]
|
||||||
fn min_absolute_fee() {
|
// fn min_absolute_fee() {
|
||||||
let t = tester();
|
// let t = tester();
|
||||||
let candidates = {
|
// let candidates = {
|
||||||
let mut candidates = Vec::new();
|
// let mut candidates = Vec::new();
|
||||||
t.gen_weighted_values(&mut candidates, 5, 10_000);
|
// t.gen_weighted_values(&mut candidates, 5, 10_000);
|
||||||
t.gen_weighted_values(&mut candidates, 5, 20_000);
|
// t.gen_weighted_values(&mut candidates, 5, 20_000);
|
||||||
t.gen_weighted_values(&mut candidates, 5, 30_000);
|
// t.gen_weighted_values(&mut candidates, 5, 30_000);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 10_300);
|
// t.gen_weighted_values(&mut candidates, 10, 10_300);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 10_500);
|
// t.gen_weighted_values(&mut candidates, 10, 10_500);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 10_700);
|
// t.gen_weighted_values(&mut candidates, 10, 10_700);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 10_900);
|
// t.gen_weighted_values(&mut candidates, 10, 10_900);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 11_000);
|
// t.gen_weighted_values(&mut candidates, 10, 11_000);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 12_000);
|
// t.gen_weighted_values(&mut candidates, 10, 12_000);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 13_000);
|
// t.gen_weighted_values(&mut candidates, 10, 13_000);
|
||||||
candidates
|
// candidates
|
||||||
};
|
// };
|
||||||
let mut opts = CoinSelectorOpt {
|
// let mut opts = CoinSelectorOpt {
|
||||||
min_absolute_fee: 1,
|
// min_absolute_fee: 1,
|
||||||
..t.gen_opts(100_000)
|
// ..t.gen_opts(100_000)
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
(1..=120_u64).for_each(|fee_factor| {
|
// (1..=120_u64).for_each(|fee_factor| {
|
||||||
opts.min_absolute_fee = fee_factor * 31;
|
// opts.min_absolute_fee = fee_factor * 31;
|
||||||
|
//
|
||||||
let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 21_000);
|
// let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 21_000);
|
||||||
match result {
|
// match result {
|
||||||
Ok(result) => {
|
// Ok(result) => {
|
||||||
println!("Solution {}", result);
|
// println!("Solution {}", result);
|
||||||
let fee = result.solution.excess_strategies[&ExcessStrategyKind::ToFee].fee;
|
// let fee = result.solution.excess_strategies[&ExcessStrategyKind::ToFee].fee;
|
||||||
assert!(fee >= opts.min_absolute_fee);
|
// assert!(fee >= opts.min_absolute_fee);
|
||||||
assert_eq!(result.solution.excess_strategies.len(), 1);
|
// assert_eq!(result.solution.excess_strategies.len(), 1);
|
||||||
}
|
// }
|
||||||
Err(err) => {
|
// Err(err) => {
|
||||||
println!("No Solution: {}", err);
|
// println!("No Solution: {}", err);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
});
|
// });
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
/// For a decreasing feerate (long-term feerate is lower than effective feerate), we should
|
// /// For a decreasing feerate (long-term feerate is lower than effective feerate), we should
|
||||||
/// select less. For increasing feerate (long-term feerate is higher than effective feerate), we
|
// /// select less. For increasing feerate (long-term feerate is higher than effective feerate), we
|
||||||
/// should select more.
|
// /// should select more.
|
||||||
#[test]
|
// #[test]
|
||||||
fn feerate_difference() {
|
// fn feerate_difference() {
|
||||||
let t = tester();
|
// let t = tester();
|
||||||
let candidates = {
|
// let candidates = {
|
||||||
let mut candidates = Vec::new();
|
// let mut candidates = Vec::new();
|
||||||
t.gen_weighted_values(&mut candidates, 10, 2_000);
|
// t.gen_weighted_values(&mut candidates, 10, 2_000);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 5_000);
|
// t.gen_weighted_values(&mut candidates, 10, 5_000);
|
||||||
t.gen_weighted_values(&mut candidates, 10, 20_000);
|
// t.gen_weighted_values(&mut candidates, 10, 20_000);
|
||||||
candidates
|
// candidates
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let decreasing_feerate_opts = CoinSelectorOpt {
|
// let decreasing_feerate_opts = CoinSelectorOpt {
|
||||||
target_feerate: 1.25,
|
// target_feerate: 1.25,
|
||||||
long_term_feerate: Some(0.25),
|
// long_term_feerate: Some(0.25),
|
||||||
..t.gen_opts(100_000)
|
// ..t.gen_opts(100_000)
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let increasing_feerate_opts = CoinSelectorOpt {
|
// let increasing_feerate_opts = CoinSelectorOpt {
|
||||||
target_feerate: 0.25,
|
// target_feerate: 0.25,
|
||||||
long_term_feerate: Some(1.25),
|
// long_term_feerate: Some(1.25),
|
||||||
..t.gen_opts(100_000)
|
// ..t.gen_opts(100_000)
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let decreasing_res = evaluate_bnb(
|
// let decreasing_res = evaluate_bnb(
|
||||||
CoinSelector::new(&candidates, &decreasing_feerate_opts),
|
// CoinSelector::new(&candidates, &decreasing_feerate_opts),
|
||||||
21_000,
|
// 21_000,
|
||||||
)
|
// )
|
||||||
.expect("no result");
|
// .expect("no result");
|
||||||
let decreasing_len = decreasing_res.solution.selected.len();
|
// let decreasing_len = decreasing_res.solution.selected.len();
|
||||||
|
//
|
||||||
let increasing_res = evaluate_bnb(
|
// let increasing_res = evaluate_bnb(
|
||||||
CoinSelector::new(&candidates, &increasing_feerate_opts),
|
// CoinSelector::new(&candidates, &increasing_feerate_opts),
|
||||||
21_000,
|
// 21_000,
|
||||||
)
|
// )
|
||||||
.expect("no result");
|
// .expect("no result");
|
||||||
let increasing_len = increasing_res.solution.selected.len();
|
// let increasing_len = increasing_res.solution.selected.len();
|
||||||
|
//
|
||||||
println!("decreasing_len: {}", decreasing_len);
|
// println!("decreasing_len: {}", decreasing_len);
|
||||||
println!("increasing_len: {}", increasing_len);
|
// println!("increasing_len: {}", increasing_len);
|
||||||
assert!(decreasing_len < increasing_len);
|
// assert!(decreasing_len < increasing_len);
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
/// TODO: UNIMPLEMENTED TESTS:
|
// /// TODO: UNIMPLEMENTED TESTS:
|
||||||
/// * Excess strategies:
|
// /// * Excess strategies:
|
||||||
/// * We should always have `ExcessStrategy::ToFee`.
|
// /// * We should always have `ExcessStrategy::ToFee`.
|
||||||
/// * We should only have `ExcessStrategy::ToRecipient` when `max_extra_target > 0`.
|
// /// * We should only have `ExcessStrategy::ToRecipient` when `max_extra_target > 0`.
|
||||||
/// * We should only have `ExcessStrategy::ToDrain` when `drain_value >= min_drain_value`.
|
// /// * We should only have `ExcessStrategy::ToDrain` when `drain_value >= min_drain_value`.
|
||||||
/// * Fuzz
|
// /// * Fuzz
|
||||||
/// * Solution feerate should never be lower than target feerate
|
// /// * Solution feerate should never be lower than target feerate
|
||||||
/// * Solution fee should never be lower than `min_absolute_fee`.
|
// /// * Solution fee should never be lower than `min_absolute_fee`.
|
||||||
/// * Preselected should always remain selected
|
// /// * Preselected should always remain selected
|
||||||
fn _todo() {}
|
// fn _todo() {}
|
||||||
}
|
// }
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user