Compare commits

...

34 Commits

Author SHA1 Message Date
Steve Myers
9e4ca516a8 Bump version to 0.12.0 2021-09-30 11:42:21 -07:00
Steve Myers
b60465f31e Bump bdk-macros version to 0.6.0 2021-09-30 11:24:01 -07:00
Steve Myers
1469a3487a Downgrade tiny-bip39 to version < 0.8
This is required until BDK MSRV is changed to 1.51 or we replace
tiny-bip39 dependency.
2021-09-27 12:47:58 -07:00
Steve Myers
c9ed8bdf6c Bump version to 0.12.0-rc.1 2021-09-24 10:25:12 -07:00
Steve Myers
919522a456 Fix clippy warning 2021-09-23 18:57:55 -07:00
Steve Myers
678607e673 Move new CHANGELOG entries to Unreleased 2021-09-23 18:28:27 -07:00
John Cantrell
c06d9f1d33 implement sqlite database 2021-09-23 20:54:08 -04:00
Steve Myers
5a6a2cefdd Merge commit 'refs/pull/442/head' of github.com:bitcoindevkit/bdk 2021-09-23 15:28:57 -07:00
Alekos Filini
3fe2380d6c [esplora] Support proxies in EsploraBlockchain 2021-09-23 21:38:19 +02:00
Lucas Soriano del Pino
eea8b135a4 Activate miniscript/use-serde feature 2021-09-23 19:49:06 +10:00
Steve Myers
a685b22aa6 [ci] Change check-wasm job to use ubuntu-20.04 runner 2021-09-22 10:08:10 -07:00
LLFourn
c601ae3271 [fix-build] Fix version of zeroize_derive to 1.1.0 2021-09-22 11:01:37 +10:00
Riccardo Casatta
c23692824d [rpc] rescan in chunks of 10_000 blocks 2021-09-17 15:19:52 +02:00
Steve Myers
46f7b440f5 Merge commit 'refs/pull/438/head' of github.com:bitcoindevkit/bdk 2021-09-16 11:03:52 -07:00
Steve Myers
562fde7953 Merge commit 'refs/pull/434/head' of github.com:bitcoindevkit/bdk 2021-09-16 08:45:53 -07:00
rajarshimaitra
9e508748a3 Update CI blockchain tests
(cherry picked from commit 10b53a56d7)
2021-09-15 13:44:11 -07:00
rajarshimaitra
84b8579df5 Test refactor
- Fix esplora module level feature flag
- Move esplora blockchain tests to module, to cover for both variants

(cherry picked from commit 8d1d92e71e)
2021-09-15 13:44:09 -07:00
rajarshimaitra
7cb0116c44 Fix reqwest blockchain test
- add back await_or_block! to bdk-macros
- use await_or_block! in reqwest tests

(cherry picked from commit a41a0030dc)
2021-09-15 13:44:06 -07:00
rajarshimaitra
6e12468b12 Update Cargo.toml
- Changed to local bdk-macro
- Added back tokio
- Update esplora-reqwest and test-esplora feature guards

(cherry picked from commit 2459740f72)
2021-09-15 13:44:04 -07:00
Alekos Filini
326b64de3a [descriptor] Add a test for extract_policy() on pk_h() operands 2021-09-15 10:38:36 +02:00
Alekos Filini
5edf663f3d [descriptor] Add an alias for and_or()
The descriptor syntax encodes it with `andor()`, without the underscore
2021-09-15 10:37:35 +02:00
Alekos Filini
e3dd755396 [descriptor] Fix pk_h() in the descriptor!() macro
Instead of accepting just a `DescriptorPublicKey` it now accepts
anything that implements `IntoDescriptorKey` like `pk_k()` does.
2021-09-15 10:37:33 +02:00
Alekos Filini
b500cfe4e5 [descriptor] Fix extract_policy() for descriptors with pk_h() 2021-09-15 10:37:30 +02:00
rajarshimaitra
10b53a56d7 Update CI blockchain tests 2021-09-14 11:29:29 +05:30
rajarshimaitra
8d1d92e71e Test refactor
- Fix esplora module level feature flag
- Move esplora blockchain tests to module, to cover for both variants
2021-09-14 11:29:28 +05:30
rajarshimaitra
a41a0030dc Fix reqwest blockchain test
- add back await_or_block! to bdk-macros
- use await_or_block! in reqwest tests
2021-09-14 11:29:28 +05:30
rajarshimaitra
2459740f72 Update Cargo.toml
- Changed to local bdk-macro
- Added back tokio
- Update esplora-reqwest and test-esplora feature guards
2021-09-14 11:29:28 +05:30
Steve Myers
5694b98304 Bump version to 0.11.1-dev 2021-09-04 11:43:24 -07:00
Steve Myers
dd4bd96f79 Merge commit 'refs/pull/428/head' of github.com:bitcoindevkit/bdk 2021-08-31 08:33:07 -07:00
rajarshimaitra
2caa590438 Use ureq with default features 2021-08-31 14:37:50 +05:30
Steve Myers
2a53cfc23f Merge commit 'refs/pull/426/head' of github.com:bitcoindevkit/bdk 2021-08-30 12:41:25 -07:00
Lucas Soriano del Pino
acf157a99a Fix use statements in populate_test_db macro
- Use re-exported `bitcoin` so that users of the macro don't need to
depend on `bitcoin` directly.
- Add missing `use std::str::FromStr`.
2021-08-30 14:08:17 +10:00
Lucas Soriano del Pino
fb813427eb Use re-exported bitcoin and miniscript in testutils macro
Otherwise users of the macro must depend on `bitcoin` and `miniscript`
directly, which defeats the point of re-exporting these crates in the
first place.
2021-08-30 13:48:34 +10:00
Roman Zeyde
470d02c81c Fix a small typo in log_progress() description 2021-08-24 23:56:57 +03:00
22 changed files with 1256 additions and 146 deletions

View File

@@ -26,6 +26,7 @@ jobs:
- verify
- async-interface
- use-esplora-reqwest
- sqlite
steps:
- name: checkout
uses: actions/checkout@v2
@@ -78,15 +79,20 @@ jobs:
run: cargo test --features test-md-docs --no-default-features -- doctest::ReadmeDoctests
test-blockchains:
name: Test ${{ matrix.blockchain.name }}
name: Blockchain ${{ matrix.blockchain.features }}
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
blockchain:
- name: electrum
features: test-electrum
- name: rpc
features: test-rpc
- name: esplora
features: test-esplora,use-esplora-reqwest
- name: esplora
features: test-esplora,use-esplora-ureq
steps:
- name: Checkout
uses: actions/checkout@v2
@@ -104,11 +110,11 @@ jobs:
toolchain: stable
override: true
- name: Test
run: cargo test --features test-${{ matrix.blockchain.name }} ${{ matrix.blockchain.name }}::bdk_blockchain_tests
run: cargo test --no-default-features --features ${{ matrix.blockchain.features }} ${{ matrix.blockchain.name }}::bdk_blockchain_tests
check-wasm:
name: Check WASM
runs-on: ubuntu-16.04
runs-on: ubuntu-20.04
env:
CC: clang-10
CFLAGS: -I/usr/include
@@ -125,7 +131,7 @@ jobs:
key: ${{ runner.os }}-cargo-${{ github.job }}-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }}
# Install a recent version of clang that supports wasm32
- run: wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add - || exit 1
- run: sudo apt-add-repository "deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-10 main" || exit 1
- run: sudo apt-add-repository "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main" || exit 1
- run: sudo apt-get update || exit 1
- run: sudo apt-get install -y libclang-common-10-dev clang-10 libc6-dev-i386 || exit 1
- name: Set default toolchain
@@ -139,7 +145,6 @@ jobs:
- name: Check
run: cargo check --target wasm32-unknown-unknown --features use-esplora-reqwest --no-default-features
fmt:
name: Rust fmt
runs-on: ubuntu-latest

View File

@@ -24,7 +24,7 @@ jobs:
- name: Update toolchain
run: rustup update
- name: Build docs
run: cargo rustdoc --verbose --features=compiler,electrum,esplora,ureq,compact_filters,key-value-db,all-keys -- --cfg docsrs -Dwarnings
run: cargo rustdoc --verbose --features=compiler,electrum,esplora,ureq,compact_filters,key-value-db,all-keys,sqlite -- --cfg docsrs -Dwarnings
- name: Upload artifact
uses: actions/upload-artifact@v2
with:

View File

@@ -6,6 +6,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
## [v0.12.0] - [v0.11.0]
- Activate `miniscript/use-serde` feature to allow consumers of the library to access it via the re-exported `miniscript` crate.
- Add support for proxies in `EsploraBlockchain`
- Added `SqliteDatabase` that implements `Database` backed by a sqlite database using `rusqlite` crate.
## [v0.11.0] - [v0.10.0]
- Added `flush` method to the `Database` trait to explicitly flush to disk latest changes on the db.
@@ -376,4 +383,5 @@ final transaction is created by calling `finish` on the builder.
[v0.8.0]: https://github.com/bitcoindevkit/bdk/compare/v0.7.0...v0.8.0
[v0.9.0]: https://github.com/bitcoindevkit/bdk/compare/v0.8.0...v0.9.0
[v0.10.0]: https://github.com/bitcoindevkit/bdk/compare/v0.9.0...v0.10.0
[v0.11.0]: https://github.com/bitcoindevkit/bdk/compare/v0.10.0...v0.11.0
[v0.11.0]: https://github.com/bitcoindevkit/bdk/compare/v0.10.0...v0.11.0
[v0.12.0]: https://github.com/bitcoindevkit/bdk/compare/v0.11.0...v0.12.0

View File

@@ -1,6 +1,6 @@
[package]
name = "bdk"
version = "0.11.0"
version = "0.12.0"
edition = "2018"
authors = ["Alekos Filini <alekos.filini@gmail.com>", "Riccardo Casatta <riccardo@casatta.it>"]
homepage = "https://bitcoindevkit.org"
@@ -12,9 +12,9 @@ readme = "README.md"
license = "MIT OR Apache-2.0"
[dependencies]
bdk-macros = "0.5"
bdk-macros = "^0.6"
log = "^0.4"
miniscript = "^6.0"
miniscript = { version = "^6.0", features = ["use-serde"] }
bitcoin = { version = "^0.27", features = ["use-serde", "base64"] }
serde = { version = "^1.0", features = ["derive"] }
serde_json = { version = "^1.0" }
@@ -23,21 +23,30 @@ rand = "^0.7"
# Optional dependencies
sled = { version = "0.34", optional = true }
electrum-client = { version = "0.8", optional = true }
rusqlite = { version = "0.25.3", optional = true }
reqwest = { version = "0.11", optional = true, features = ["json"] }
ureq = { version = "2.1", default-features = false, features = ["json"], optional = true }
ureq = { version = "2.1", features = ["json"], optional = true }
futures = { version = "0.3", optional = true }
async-trait = { version = "0.1", optional = true }
rocksdb = { version = "0.14", default-features = false, features = ["snappy"], optional = true }
cc = { version = ">=1.0.64", optional = true }
socks = { version = "0.3", optional = true }
lazy_static = { version = "1.4", optional = true }
tiny-bip39 = { version = "^0.8", optional = true }
zeroize = { version = "<1.4.0", optional = true }
# the latest 0.8 version of tiny-bip39 depends on zeroize_derive 1.2 which has MSRV 1.51 and our
# MSRV is 1.46, to fix this until we update our MSRV or replace the tiny-bip39
# dependency https://github.com/bitcoindevkit/bdk/issues/399 we can only use an older version
tiny-bip39 = { version = "< 0.8", optional = true }
bitcoinconsensus = { version = "0.19.0-3", optional = true }
# Needed by bdk_blockchain_tests macro
core-rpc = { version = "0.14", optional = true }
# Platform-specific dependencies
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
tokio = { version = "1", features = ["rt"] }
[target.'cfg(target_arch = "wasm32")'.dependencies]
async-trait = "0.1"
js-sys = "0.3"
@@ -48,10 +57,11 @@ minimal = []
compiler = ["miniscript/compiler"]
verify = ["bitcoinconsensus"]
default = ["key-value-db", "electrum"]
sqlite = ["rusqlite"]
compact_filters = ["rocksdb", "socks", "lazy_static", "cc"]
key-value-db = ["sled"]
all-keys = ["keys-bip39"]
keys-bip39 = ["tiny-bip39", "zeroize"]
keys-bip39 = ["tiny-bip39"]
rpc = ["core-rpc"]
# We currently provide mulitple implementations of `Blockchain`, all are
@@ -70,8 +80,8 @@ rpc = ["core-rpc"]
async-interface = ["async-trait"]
electrum = ["electrum-client"]
# MUST ALSO USE `--no-default-features`.
use-esplora-reqwest = ["async-interface", "esplora", "reqwest", "futures"]
use-esplora-ureq = ["esplora", "ureq"]
use-esplora-reqwest = ["esplora", "reqwest", "reqwest/socks", "futures"]
use-esplora-ureq = ["esplora", "ureq", "ureq/socks"]
# Typical configurations will not need to use `esplora` feature directly.
esplora = []
@@ -80,7 +90,7 @@ esplora = []
test-blockchains = ["core-rpc", "electrum-client"]
test-electrum = ["electrum", "electrsd/electrs_0_8_10", "test-blockchains"]
test-rpc = ["rpc", "electrsd/electrs_0_8_10", "test-blockchains"]
test-esplora = ["esplora", "ureq", "electrsd/legacy", "electrsd/esplora_a33e97e1", "test-blockchains"]
test-esplora = ["electrsd/legacy", "electrsd/esplora_a33e97e1", "test-blockchains"]
test-md-docs = ["electrum"]
[dev-dependencies]

View File

@@ -32,7 +32,7 @@ Pre-`v1.0.0` our "major" releases only affect the "minor" semver value. Accordin
- If it's a minor issue you can just fix it in the release branch, since it will be merged back to `master` eventually
- For bigger issues you can fix them on `master` and then *cherry-pick* the commit to the release branch
6. Update the changelog with the new release version.
7. Update `src/lib.rs` with the new version (line ~59)
7. Update `src/lib.rs` with the new version (line ~43)
8. On release day, make a commit on the release branch to bump the version to `x.y.z`. The message should be "Bump version to x.y.z".
9. Add a tag to this commit. The tag name should be `vx.y.z` (for example `v0.5.0`), and the message "Release x.y.z". Make sure the tag is signed, for extra safety use the explicit `--sign` flag.
10. Push the new commits to the upstream release branch, wait for the CI to finish one last time.

View File

@@ -1,6 +1,6 @@
[package]
name = "bdk-macros"
version = "0.5.0"
version = "0.6.0"
authors = ["Alekos Filini <alekos.filini@gmail.com>"]
edition = "2018"
homepage = "https://bitcoindevkit.org"

View File

@@ -121,3 +121,26 @@ pub fn maybe_await(expr: TokenStream) -> TokenStream {
quoted.into()
}
/// Awaits if target_arch is "wasm32", uses `tokio::Runtime::block_on()` otherwise
///
/// Requires the `tokio` crate as a dependecy with `rt-core` or `rt-threaded` to build on non-wasm32 platforms.
#[proc_macro]
pub fn await_or_block(expr: TokenStream) -> TokenStream {
let expr: proc_macro2::TokenStream = expr.into();
let quoted = quote! {
{
#[cfg(all(not(target_arch = "wasm32"), not(feature = "async-interface")))]
{
tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(#expr)
}
#[cfg(any(target_arch = "wasm32", feature = "async-interface"))]
{
#expr.await
}
}
};
quoted.into()
}

View File

@@ -29,38 +29,16 @@ use bitcoin::{BlockHash, Txid};
use crate::error::Error;
use crate::FeeRate;
#[cfg(all(
feature = "esplora",
feature = "reqwest",
any(feature = "async-interface", target_arch = "wasm32"),
))]
#[cfg(feature = "reqwest")]
mod reqwest;
#[cfg(all(
feature = "esplora",
feature = "reqwest",
any(feature = "async-interface", target_arch = "wasm32"),
))]
#[cfg(feature = "reqwest")]
pub use self::reqwest::*;
#[cfg(all(
feature = "esplora",
not(any(
feature = "async-interface",
feature = "reqwest",
target_arch = "wasm32"
)),
))]
#[cfg(feature = "ureq")]
mod ureq;
#[cfg(all(
feature = "esplora",
not(any(
feature = "async-interface",
feature = "reqwest",
target_arch = "wasm32"
)),
))]
#[cfg(feature = "ureq")]
pub use self::ureq::*;
fn into_fee_rate(target: usize, estimates: HashMap<String, f64>) -> Result<FeeRate, Error> {
@@ -141,3 +119,11 @@ impl_error!(io::Error, Io, EsploraError);
impl_error!(std::num::ParseIntError, Parsing, EsploraError);
impl_error!(consensus::encode::Error, BitcoinEncoding, EsploraError);
impl_error!(bitcoin::hashes::hex::Error, Hex, EsploraError);
#[cfg(test)]
#[cfg(feature = "test-esplora")]
crate::bdk_blockchain_tests! {
fn test_instance(test_client: &TestClient) -> EsploraBlockchain {
EsploraBlockchain::new(&format!("http://{}",test_client.electrsd.esplora_url.as_ref().unwrap()), 20)
}
}

View File

@@ -106,19 +106,19 @@ impl Blockchain for EsploraBlockchain {
}
fn get_tx(&self, txid: &Txid) -> Result<Option<Transaction>, Error> {
Ok(self.url_client._get_tx(txid).await?)
Ok(await_or_block!(self.url_client._get_tx(txid))?)
}
fn broadcast(&self, tx: &Transaction) -> Result<(), Error> {
Ok(self.url_client._broadcast(tx).await?)
Ok(await_or_block!(self.url_client._broadcast(tx))?)
}
fn get_height(&self) -> Result<u32, Error> {
Ok(self.url_client._get_height().await?)
Ok(await_or_block!(self.url_client._get_height())?)
}
fn estimate_fee(&self, target: usize) -> Result<FeeRate, Error> {
let estimates = self.url_client._get_fee_estimates().await?;
let estimates = await_or_block!(self.url_client._get_fee_estimates())?;
super::into_fee_rate(target, estimates)
}
}
@@ -287,10 +287,10 @@ impl ElectrumLikeSync for UrlClient {
for script in chunk {
futs.push(self._script_get_history(script));
}
let partial_results: Vec<Vec<ElsGetHistoryRes>> = futs.try_collect().await?;
let partial_results: Vec<Vec<ElsGetHistoryRes>> = await_or_block!(futs.try_collect())?;
results.extend(partial_results);
}
Ok(stream::iter(results).collect().await)
Ok(await_or_block!(stream::iter(results).collect()))
}
fn els_batch_transaction_get<'s, I: IntoIterator<Item = &'s Txid>>(
@@ -303,10 +303,10 @@ impl ElectrumLikeSync for UrlClient {
for txid in chunk {
futs.push(self._get_tx_no_opt(txid));
}
let partial_results: Vec<Transaction> = futs.try_collect().await?;
let partial_results: Vec<Transaction> = await_or_block!(futs.try_collect())?;
results.extend(partial_results);
}
Ok(stream::iter(results).collect().await)
Ok(await_or_block!(stream::iter(results).collect()))
}
fn els_batch_block_header<I: IntoIterator<Item = u32>>(
@@ -319,10 +319,10 @@ impl ElectrumLikeSync for UrlClient {
for height in chunk {
futs.push(self._get_header(height));
}
let partial_results: Vec<BlockHeader> = futs.try_collect().await?;
let partial_results: Vec<BlockHeader> = await_or_block!(futs.try_collect())?;
results.extend(partial_results);
}
Ok(stream::iter(results).collect().await)
Ok(await_or_block!(stream::iter(results).collect()))
}
}
@@ -333,6 +333,17 @@ pub struct EsploraBlockchainConfig {
///
/// eg. `https://blockstream.info/api/`
pub base_url: String,
/// Optional URL of the proxy to use to make requests to the Esplora server
///
/// The string should be formatted as: `<protocol>://<user>:<password>@host:<port>`.
///
/// Note that the format of this value and the supported protocols change slightly between the
/// sync version of esplora (using `ureq`) and the async version (using `reqwest`). For more
/// details check with the documentation of the two crates. Both of them are compiled with
/// the `socks` feature enabled.
///
/// The proxy is ignored when targeting `wasm32`.
pub proxy: Option<String>,
/// Number of parallel requests sent to the esplora service (default: 4)
pub concurrency: Option<u8>,
/// Stop searching addresses for transactions after finding an unused gap of this length.
@@ -343,18 +354,19 @@ impl ConfigurableBlockchain for EsploraBlockchain {
type Config = EsploraBlockchainConfig;
fn from_config(config: &Self::Config) -> Result<Self, Error> {
let map_e = |e: reqwest::Error| Error::Esplora(Box::new(e.into()));
let mut blockchain = EsploraBlockchain::new(config.base_url.as_str(), config.stop_gap);
if let Some(concurrency) = config.concurrency {
blockchain.url_client.concurrency = concurrency;
};
}
#[cfg(not(target_arch = "wasm32"))]
if let Some(proxy) = &config.proxy {
blockchain.url_client.client = Client::builder()
.proxy(reqwest::Proxy::all(proxy).map_err(map_e)?)
.build()
.map_err(map_e)?;
}
Ok(blockchain)
}
}
#[cfg(test)]
#[cfg(feature = "test-esplora")]
crate::bdk_blockchain_tests! {
fn test_instance(test_client: &TestClient) -> EsploraBlockchain {
EsploraBlockchain::new(&format!("http://{}",test_client.electrsd.esplora_url.as_ref().unwrap()), None, 20)
}
}

View File

@@ -19,7 +19,7 @@ use std::time::Duration;
#[allow(unused_imports)]
use log::{debug, error, info, trace};
use ureq::{Agent, Response};
use ureq::{Agent, Proxy, Response};
use bitcoin::consensus::{deserialize, serialize};
use bitcoin::hashes::hex::{FromHex, ToHex};
@@ -59,7 +59,7 @@ impl std::convert::From<UrlClient> for EsploraBlockchain {
}
impl EsploraBlockchain {
/// Create a new instance of the client from a base URL and `stop_gap`.
/// Create a new instance of the client from a base URL and the `stop_gap`.
pub fn new(base_url: &str, stop_gap: usize) -> Self {
EsploraBlockchain {
url_client: UrlClient {
@@ -358,6 +358,17 @@ impl ElectrumLikeSync for UrlClient {
pub struct EsploraBlockchainConfig {
/// Base URL of the esplora service eg. `https://blockstream.info/api/`
pub base_url: String,
/// Optional URL of the proxy to use to make requests to the Esplora server
///
/// The string should be formatted as: `<protocol>://<user>:<password>@host:<port>`.
///
/// Note that the format of this value and the supported protocols change slightly between the
/// sync version of esplora (using `ureq`) and the async version (using `reqwest`). For more
/// details check with the documentation of the two crates. Both of them are compiled with
/// the `socks` feature enabled.
///
/// The proxy is ignored when targeting `wasm32`.
pub proxy: Option<String>,
/// Socket read timeout.
pub timeout_read: u64,
/// Socket write timeout.
@@ -370,10 +381,18 @@ impl ConfigurableBlockchain for EsploraBlockchain {
type Config = EsploraBlockchainConfig;
fn from_config(config: &Self::Config) -> Result<Self, Error> {
let agent: Agent = ureq::AgentBuilder::new()
let mut agent_builder = ureq::AgentBuilder::new()
.timeout_read(Duration::from_secs(config.timeout_read))
.timeout_write(Duration::from_secs(config.timeout_write))
.build();
Ok(EsploraBlockchain::new(config.base_url.as_str(), config.stop_gap).with_agent(agent))
.timeout_write(Duration::from_secs(config.timeout_write));
if let Some(proxy) = &config.proxy {
agent_builder = agent_builder
.proxy(Proxy::new(proxy).map_err(|e| Error::Esplora(Box::new(e.into())))?);
}
Ok(
EsploraBlockchain::new(config.base_url.as_str(), config.stop_gap)
.with_agent(agent_builder.build()),
)
}
}

View File

@@ -201,7 +201,7 @@ impl Progress for NoopProgress {
#[derive(Clone, Copy)]
pub struct LogProgress;
/// Create a nwe instance of [`LogProgress`]
/// Create a new instance of [`LogProgress`]
pub fn log_progress() -> LogProgress {
LogProgress
}

View File

@@ -169,22 +169,25 @@ impl Blockchain for RpcBlockchain {
//TODO maybe convenient using import_descriptor for compatible descriptor and import_multi as fallback
self.client.import_multi(&requests, Some(&options))?;
let current_height = self.get_height()?;
loop {
let current_height = self.get_height()?;
// min because block invalidate may cause height to go down
let node_synced = self.get_node_synced_height()?.min(current_height);
// min because block invalidate may cause height to go down
let node_synced = self.get_node_synced_height()?.min(current_height);
//TODO call rescan in chunks (updating node_synced_height) so that in case of
// interruption work can be partially recovered
debug!(
"rescan_blockchain from:{} to:{}",
node_synced, current_height
);
self.client
.rescan_blockchain(Some(node_synced as usize), Some(current_height as usize))?;
progress_update.update(1.0, None)?;
let sync_up_to = node_synced.saturating_add(10_000).min(current_height);
self.set_node_synced_height(current_height)?;
debug!("rescan_blockchain from:{} to:{}", node_synced, sync_up_to);
self.client
.rescan_blockchain(Some(node_synced as usize), Some(sync_up_to as usize))?;
progress_update.update((sync_up_to as f32) / (current_height as f32), None)?;
self.set_node_synced_height(sync_up_to)?;
if sync_up_to == current_height {
break;
}
}
self.sync(database, progress_update)
}

View File

@@ -65,6 +65,8 @@ macro_rules! impl_inner_method {
$enum_name::Memory(inner) => inner.$name( $($args, )* ),
#[cfg(feature = "key-value-db")]
$enum_name::Sled(inner) => inner.$name( $($args, )* ),
#[cfg(feature = "sqlite")]
$enum_name::Sqlite(inner) => inner.$name( $($args, )* ),
}
}
}
@@ -82,10 +84,15 @@ pub enum AnyDatabase {
#[cfg_attr(docsrs, doc(cfg(feature = "key-value-db")))]
/// Simple key-value embedded database based on [`sled`]
Sled(sled::Tree),
#[cfg(feature = "sqlite")]
#[cfg_attr(docsrs, doc(cfg(feature = "sqlite")))]
/// Sqlite embedded database using [`rusqlite`]
Sqlite(sqlite::SqliteDatabase),
}
impl_from!(memory::MemoryDatabase, AnyDatabase, Memory,);
impl_from!(sled::Tree, AnyDatabase, Sled, #[cfg(feature = "key-value-db")]);
impl_from!(sqlite::SqliteDatabase, AnyDatabase, Sqlite, #[cfg(feature = "sqlite")]);
/// Type that contains any of the [`BatchDatabase::Batch`] types defined by the library
pub enum AnyBatch {
@@ -95,6 +102,10 @@ pub enum AnyBatch {
#[cfg_attr(docsrs, doc(cfg(feature = "key-value-db")))]
/// Simple key-value embedded database based on [`sled`]
Sled(<sled::Tree as BatchDatabase>::Batch),
#[cfg(feature = "sqlite")]
#[cfg_attr(docsrs, doc(cfg(feature = "sqlite")))]
/// Sqlite embedded database using [`rusqlite`]
Sqlite(<sqlite::SqliteDatabase as BatchDatabase>::Batch),
}
impl_from!(
@@ -103,6 +114,7 @@ impl_from!(
Memory,
);
impl_from!(<sled::Tree as BatchDatabase>::Batch, AnyBatch, Sled, #[cfg(feature = "key-value-db")]);
impl_from!(<sqlite::SqliteDatabase as BatchDatabase>::Batch, AnyBatch, Sqlite, #[cfg(feature = "sqlite")]);
impl BatchOperations for AnyDatabase {
fn set_script_pubkey(
@@ -300,19 +312,26 @@ impl BatchDatabase for AnyDatabase {
AnyDatabase::Memory(inner) => inner.begin_batch().into(),
#[cfg(feature = "key-value-db")]
AnyDatabase::Sled(inner) => inner.begin_batch().into(),
#[cfg(feature = "sqlite")]
AnyDatabase::Sqlite(inner) => inner.begin_batch().into(),
}
}
fn commit_batch(&mut self, batch: Self::Batch) -> Result<(), Error> {
match self {
AnyDatabase::Memory(db) => match batch {
AnyBatch::Memory(batch) => db.commit_batch(batch),
#[cfg(feature = "key-value-db")]
_ => unimplemented!("Sled batch shouldn't be used with Memory db."),
#[cfg(any(feature = "key-value-db", feature = "sqlite"))]
_ => unimplemented!("Other batch shouldn't be used with Memory db."),
},
#[cfg(feature = "key-value-db")]
AnyDatabase::Sled(db) => match batch {
AnyBatch::Sled(batch) => db.commit_batch(batch),
_ => unimplemented!("Memory batch shouldn't be used with Sled db."),
_ => unimplemented!("Other batch shouldn't be used with Sled db."),
},
#[cfg(feature = "sqlite")]
AnyDatabase::Sqlite(db) => match batch {
AnyBatch::Sqlite(batch) => db.commit_batch(batch),
_ => unimplemented!("Other batch shouldn't be used with Sqlite db."),
},
}
}
@@ -337,6 +356,23 @@ impl ConfigurableDatabase for sled::Tree {
}
}
/// Configuration type for a [`sqlite::SqliteDatabase`] database
#[cfg(feature = "sqlite")]
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct SqliteDbConfiguration {
/// Main directory of the db
pub path: String,
}
#[cfg(feature = "sqlite")]
impl ConfigurableDatabase for sqlite::SqliteDatabase {
type Config = SqliteDbConfiguration;
fn from_config(config: &Self::Config) -> Result<Self, Error> {
Ok(sqlite::SqliteDatabase::new(config.path.clone()))
}
}
/// Type that can contain any of the database configurations defined by the library
///
/// This allows storing a single configuration that can be loaded into an [`AnyDatabase`]
@@ -350,6 +386,10 @@ pub enum AnyDatabaseConfig {
#[cfg_attr(docsrs, doc(cfg(feature = "key-value-db")))]
/// Simple key-value embedded database based on [`sled`]
Sled(SledDbConfiguration),
#[cfg(feature = "sqlite")]
#[cfg_attr(docsrs, doc(cfg(feature = "sqlite")))]
/// Sqlite embedded database using [`rusqlite`]
Sqlite(SqliteDbConfiguration),
}
impl ConfigurableDatabase for AnyDatabase {
@@ -362,9 +402,14 @@ impl ConfigurableDatabase for AnyDatabase {
}
#[cfg(feature = "key-value-db")]
AnyDatabaseConfig::Sled(inner) => AnyDatabase::Sled(sled::Tree::from_config(inner)?),
#[cfg(feature = "sqlite")]
AnyDatabaseConfig::Sqlite(inner) => {
AnyDatabase::Sqlite(sqlite::SqliteDatabase::from_config(inner)?)
}
})
}
}
impl_from!((), AnyDatabaseConfig, Memory,);
impl_from!(SledDbConfiguration, AnyDatabaseConfig, Sled, #[cfg(feature = "key-value-db")]);
impl_from!(SqliteDbConfiguration, AnyDatabaseConfig, Sqlite, #[cfg(feature = "sqlite")]);

View File

@@ -456,20 +456,21 @@ impl ConfigurableDatabase for MemoryDatabase {
/// don't have `test` set.
macro_rules! populate_test_db {
($db:expr, $tx_meta:expr, $current_height:expr$(,)?) => {{
use std::str::FromStr;
use $crate::database::BatchOperations;
let mut db = $db;
let tx_meta = $tx_meta;
let current_height: Option<u32> = $current_height;
let tx = Transaction {
let tx = $crate::bitcoin::Transaction {
version: 1,
lock_time: 0,
input: vec![],
output: tx_meta
.output
.iter()
.map(|out_meta| bitcoin::TxOut {
.map(|out_meta| $crate::bitcoin::TxOut {
value: out_meta.value,
script_pubkey: bitcoin::Address::from_str(&out_meta.to_address)
script_pubkey: $crate::bitcoin::Address::from_str(&out_meta.to_address)
.unwrap()
.script_pubkey(),
})
@@ -477,12 +478,14 @@ macro_rules! populate_test_db {
};
let txid = tx.txid();
let confirmation_time = tx_meta.min_confirmations.map(|conf| ConfirmationTime {
height: current_height.unwrap().checked_sub(conf as u32).unwrap(),
timestamp: 0,
});
let confirmation_time = tx_meta
.min_confirmations
.map(|conf| $crate::ConfirmationTime {
height: current_height.unwrap().checked_sub(conf as u32).unwrap(),
timestamp: 0,
});
let tx_details = TransactionDetails {
let tx_details = $crate::TransactionDetails {
transaction: Some(tx.clone()),
txid,
fee: Some(0),
@@ -494,13 +497,13 @@ macro_rules! populate_test_db {
db.set_tx(&tx_details).unwrap();
for (vout, out) in tx.output.iter().enumerate() {
db.set_utxo(&LocalUtxo {
db.set_utxo(&$crate::LocalUtxo {
txout: out.clone(),
outpoint: OutPoint {
outpoint: $crate::bitcoin::OutPoint {
txid,
vout: vout as u32,
},
keychain: KeychainKind::External,
keychain: $crate::KeychainKind::External,
})
.unwrap();
}

View File

@@ -36,6 +36,11 @@ pub use any::{AnyDatabase, AnyDatabaseConfig};
#[cfg(feature = "key-value-db")]
pub(crate) mod keyvalue;
#[cfg(feature = "sqlite")]
pub(crate) mod sqlite;
#[cfg(feature = "sqlite")]
pub use sqlite::SqliteDatabase;
pub mod memory;
pub use memory::MemoryDatabase;

968
src/database/sqlite.rs Normal file
View File

@@ -0,0 +1,968 @@
// Bitcoin Dev Kit
// Written in 2020 by Alekos Filini <alekos.filini@gmail.com>
//
// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers
//
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// You may not use this file except in accordance with one or both of these
// licenses.
use bitcoin::consensus::encode::{deserialize, serialize};
use bitcoin::hash_types::Txid;
use bitcoin::{OutPoint, Script, Transaction, TxOut};
use crate::database::{BatchDatabase, BatchOperations, Database};
use crate::error::Error;
use crate::types::*;
use rusqlite::{named_params, Connection};
static MIGRATIONS: &[&str] = &[
"CREATE TABLE version (version INTEGER)",
"INSERT INTO version VALUES (1)",
"CREATE TABLE script_pubkeys (keychain TEXT, child INTEGER, script BLOB);",
"CREATE INDEX idx_keychain_child ON script_pubkeys(keychain, child);",
"CREATE INDEX idx_script ON script_pubkeys(script);",
"CREATE TABLE utxos (value INTEGER, keychain TEXT, vout INTEGER, txid BLOB, script BLOB);",
"CREATE INDEX idx_txid_vout ON utxos(txid, vout);",
"CREATE TABLE transactions (txid BLOB, raw_tx BLOB);",
"CREATE INDEX idx_txid ON transactions(txid);",
"CREATE TABLE transaction_details (txid BLOB, timestamp INTEGER, received INTEGER, sent INTEGER, fee INTEGER, height INTEGER, verified INTEGER DEFAULT 0);",
"CREATE INDEX idx_txdetails_txid ON transaction_details(txid);",
"CREATE TABLE last_derivation_indices (keychain TEXT, value INTEGER);",
"CREATE UNIQUE INDEX idx_indices_keychain ON last_derivation_indices(keychain);",
"CREATE TABLE checksums (keychain TEXT, checksum BLOB);",
"CREATE INDEX idx_checksums_keychain ON checksums(keychain);",
];
/// Sqlite database stored on filesystem
///
/// This is a permanent storage solution for devices and platforms that provide a filesystem.
/// [`crate::database`]
#[derive(Debug)]
pub struct SqliteDatabase {
/// Path on the local filesystem to store the sqlite file
pub path: String,
/// A rusqlite connection object to the sqlite database
pub connection: Connection,
}
impl SqliteDatabase {
/// Instantiate a new SqliteDatabase instance by creating a connection
/// to the database stored at path
pub fn new(path: String) -> Self {
let connection = get_connection(&path).unwrap();
SqliteDatabase { path, connection }
}
fn insert_script_pubkey(
&self,
keychain: String,
child: u32,
script: &[u8],
) -> Result<i64, Error> {
let mut statement = self.connection.prepare_cached("INSERT INTO script_pubkeys (keychain, child, script) VALUES (:keychain, :child, :script)")?;
statement.execute(named_params! {
":keychain": keychain,
":child": child,
":script": script
})?;
Ok(self.connection.last_insert_rowid())
}
fn insert_utxo(
&self,
value: u64,
keychain: String,
vout: u32,
txid: &[u8],
script: &[u8],
) -> Result<i64, Error> {
let mut statement = self.connection.prepare_cached("INSERT INTO utxos (value, keychain, vout, txid, script) VALUES (:value, :keychain, :vout, :txid, :script)")?;
statement.execute(named_params! {
":value": value,
":keychain": keychain,
":vout": vout,
":txid": txid,
":script": script
})?;
Ok(self.connection.last_insert_rowid())
}
fn insert_transaction(&self, txid: &[u8], raw_tx: &[u8]) -> Result<i64, Error> {
let mut statement = self
.connection
.prepare_cached("INSERT INTO transactions (txid, raw_tx) VALUES (:txid, :raw_tx)")?;
statement.execute(named_params! {
":txid": txid,
":raw_tx": raw_tx,
})?;
Ok(self.connection.last_insert_rowid())
}
fn update_transaction(&self, txid: &[u8], raw_tx: &[u8]) -> Result<(), Error> {
let mut statement = self
.connection
.prepare_cached("UPDATE transactions SET raw_tx=:raw_tx WHERE txid=:txid")?;
statement.execute(named_params! {
":txid": txid,
":raw_tx": raw_tx,
})?;
Ok(())
}
fn insert_transaction_details(&self, transaction: &TransactionDetails) -> Result<i64, Error> {
let (timestamp, height) = match &transaction.confirmation_time {
Some(confirmation_time) => (
Some(confirmation_time.timestamp),
Some(confirmation_time.height),
),
None => (None, None),
};
let txid: &[u8] = &transaction.txid;
let mut statement = self.connection.prepare_cached("INSERT INTO transaction_details (txid, timestamp, received, sent, fee, height, verified) VALUES (:txid, :timestamp, :received, :sent, :fee, :height, :verified)")?;
statement.execute(named_params! {
":txid": txid,
":timestamp": timestamp,
":received": transaction.received,
":sent": transaction.sent,
":fee": transaction.fee,
":height": height,
":verified": transaction.verified
})?;
Ok(self.connection.last_insert_rowid())
}
fn update_transaction_details(&self, transaction: &TransactionDetails) -> Result<(), Error> {
let (timestamp, height) = match &transaction.confirmation_time {
Some(confirmation_time) => (
Some(confirmation_time.timestamp),
Some(confirmation_time.height),
),
None => (None, None),
};
let txid: &[u8] = &transaction.txid;
let mut statement = self.connection.prepare_cached("UPDATE transaction_details SET timestamp=:timestamp, received=:received, sent=:sent, fee=:fee, height=:height, verified=:verified WHERE txid=:txid")?;
statement.execute(named_params! {
":txid": txid,
":timestamp": timestamp,
":received": transaction.received,
":sent": transaction.sent,
":fee": transaction.fee,
":height": height,
":verified": transaction.verified,
})?;
Ok(())
}
fn insert_last_derivation_index(&self, keychain: String, value: u32) -> Result<i64, Error> {
let mut statement = self.connection.prepare_cached(
"INSERT INTO last_derivation_indices (keychain, value) VALUES (:keychain, :value)",
)?;
statement.execute(named_params! {
":keychain": keychain,
":value": value,
})?;
Ok(self.connection.last_insert_rowid())
}
fn insert_checksum(&self, keychain: String, checksum: &[u8]) -> Result<i64, Error> {
let mut statement = self.connection.prepare_cached(
"INSERT INTO checksums (keychain, checksum) VALUES (:keychain, :checksum)",
)?;
statement.execute(named_params! {
":keychain": keychain,
":checksum": checksum,
})?;
Ok(self.connection.last_insert_rowid())
}
fn update_last_derivation_index(&self, keychain: String, value: u32) -> Result<(), Error> {
let mut statement = self.connection.prepare_cached(
"INSERT INTO last_derivation_indices (keychain, value) VALUES (:keychain, :value) ON CONFLICT(keychain) DO UPDATE SET value=:value WHERE keychain=:keychain",
)?;
statement.execute(named_params! {
":keychain": keychain,
":value": value,
})?;
Ok(())
}
fn select_script_pubkeys(&self) -> Result<Vec<Script>, Error> {
let mut statement = self
.connection
.prepare_cached("SELECT script FROM script_pubkeys")?;
let mut scripts: Vec<Script> = vec![];
let mut rows = statement.query([])?;
while let Some(row) = rows.next()? {
let raw_script: Vec<u8> = row.get(0)?;
scripts.push(raw_script.into());
}
Ok(scripts)
}
fn select_script_pubkeys_by_keychain(&self, keychain: String) -> Result<Vec<Script>, Error> {
let mut statement = self
.connection
.prepare_cached("SELECT script FROM script_pubkeys WHERE keychain=:keychain")?;
let mut scripts: Vec<Script> = vec![];
let mut rows = statement.query(named_params! {":keychain": keychain})?;
while let Some(row) = rows.next()? {
let raw_script: Vec<u8> = row.get(0)?;
scripts.push(raw_script.into());
}
Ok(scripts)
}
fn select_script_pubkey_by_path(
&self,
keychain: String,
child: u32,
) -> Result<Option<Script>, Error> {
let mut statement = self.connection.prepare_cached(
"SELECT script FROM script_pubkeys WHERE keychain=:keychain AND child=:child",
)?;
let mut rows = statement.query(named_params! {":keychain": keychain,":child": child})?;
match rows.next()? {
Some(row) => {
let script: Vec<u8> = row.get(0)?;
let script: Script = script.into();
Ok(Some(script))
}
None => Ok(None),
}
}
fn select_script_pubkey_by_script(
&self,
script: &[u8],
) -> Result<Option<(KeychainKind, u32)>, Error> {
let mut statement = self
.connection
.prepare_cached("SELECT keychain, child FROM script_pubkeys WHERE script=:script")?;
let mut rows = statement.query(named_params! {":script": script})?;
match rows.next()? {
Some(row) => {
let keychain: String = row.get(0)?;
let keychain: KeychainKind = serde_json::from_str(&keychain)?;
let child: u32 = row.get(1)?;
Ok(Some((keychain, child)))
}
None => Ok(None),
}
}
fn select_utxos(&self) -> Result<Vec<LocalUtxo>, Error> {
let mut statement = self
.connection
.prepare_cached("SELECT value, keychain, vout, txid, script FROM utxos")?;
let mut utxos: Vec<LocalUtxo> = vec![];
let mut rows = statement.query([])?;
while let Some(row) = rows.next()? {
let value = row.get(0)?;
let keychain: String = row.get(1)?;
let vout = row.get(2)?;
let txid: Vec<u8> = row.get(3)?;
let script: Vec<u8> = row.get(4)?;
let keychain: KeychainKind = serde_json::from_str(&keychain)?;
utxos.push(LocalUtxo {
outpoint: OutPoint::new(deserialize(&txid)?, vout),
txout: TxOut {
value,
script_pubkey: script.into(),
},
keychain,
})
}
Ok(utxos)
}
fn select_utxo_by_outpoint(
&self,
txid: &[u8],
vout: u32,
) -> Result<Option<(u64, KeychainKind, Script)>, Error> {
let mut statement = self.connection.prepare_cached(
"SELECT value, keychain, script FROM utxos WHERE txid=:txid AND vout=:vout",
)?;
let mut rows = statement.query(named_params! {":txid": txid,":vout": vout})?;
match rows.next()? {
Some(row) => {
let value: u64 = row.get(0)?;
let keychain: String = row.get(1)?;
let keychain: KeychainKind = serde_json::from_str(&keychain)?;
let script: Vec<u8> = row.get(2)?;
let script: Script = script.into();
Ok(Some((value, keychain, script)))
}
None => Ok(None),
}
}
fn select_transactions(&self) -> Result<Vec<Transaction>, Error> {
let mut statement = self
.connection
.prepare_cached("SELECT raw_tx FROM transactions")?;
let mut txs: Vec<Transaction> = vec![];
let mut rows = statement.query([])?;
while let Some(row) = rows.next()? {
let raw_tx: Vec<u8> = row.get(0)?;
let tx: Transaction = deserialize(&raw_tx)?;
txs.push(tx);
}
Ok(txs)
}
fn select_transaction_by_txid(&self, txid: &[u8]) -> Result<Option<Transaction>, Error> {
let mut statement = self
.connection
.prepare_cached("SELECT raw_tx FROM transactions WHERE txid=:txid")?;
let mut rows = statement.query(named_params! {":txid": txid})?;
match rows.next()? {
Some(row) => {
let raw_tx: Vec<u8> = row.get(0)?;
let tx: Transaction = deserialize(&raw_tx)?;
Ok(Some(tx))
}
None => Ok(None),
}
}
fn select_transaction_details_with_raw(&self) -> Result<Vec<TransactionDetails>, Error> {
let mut statement = self.connection.prepare_cached("SELECT transaction_details.txid, transaction_details.timestamp, transaction_details.received, transaction_details.sent, transaction_details.fee, transaction_details.height, transaction_details.verified, transactions.raw_tx FROM transaction_details, transactions WHERE transaction_details.txid = transactions.txid")?;
let mut transaction_details: Vec<TransactionDetails> = vec![];
let mut rows = statement.query([])?;
while let Some(row) = rows.next()? {
let txid: Vec<u8> = row.get(0)?;
let txid: Txid = deserialize(&txid)?;
let timestamp: Option<u64> = row.get(1)?;
let received: u64 = row.get(2)?;
let sent: u64 = row.get(3)?;
let fee: Option<u64> = row.get(4)?;
let height: Option<u32> = row.get(5)?;
let verified: bool = row.get(6)?;
let raw_tx: Option<Vec<u8>> = row.get(7)?;
let tx: Option<Transaction> = match raw_tx {
Some(raw_tx) => {
let tx: Transaction = deserialize(&raw_tx)?;
Some(tx)
}
None => None,
};
let confirmation_time = match (height, timestamp) {
(Some(height), Some(timestamp)) => Some(ConfirmationTime { height, timestamp }),
_ => None,
};
transaction_details.push(TransactionDetails {
transaction: tx,
txid,
received,
sent,
fee,
confirmation_time,
verified,
});
}
Ok(transaction_details)
}
fn select_transaction_details(&self) -> Result<Vec<TransactionDetails>, Error> {
let mut statement = self.connection.prepare_cached(
"SELECT txid, timestamp, received, sent, fee, height, verified FROM transaction_details",
)?;
let mut transaction_details: Vec<TransactionDetails> = vec![];
let mut rows = statement.query([])?;
while let Some(row) = rows.next()? {
let txid: Vec<u8> = row.get(0)?;
let txid: Txid = deserialize(&txid)?;
let timestamp: Option<u64> = row.get(1)?;
let received: u64 = row.get(2)?;
let sent: u64 = row.get(3)?;
let fee: Option<u64> = row.get(4)?;
let height: Option<u32> = row.get(5)?;
let verified: bool = row.get(6)?;
let confirmation_time = match (height, timestamp) {
(Some(height), Some(timestamp)) => Some(ConfirmationTime { height, timestamp }),
_ => None,
};
transaction_details.push(TransactionDetails {
transaction: None,
txid,
received,
sent,
fee,
confirmation_time,
verified,
});
}
Ok(transaction_details)
}
fn select_transaction_details_by_txid(
&self,
txid: &[u8],
) -> Result<Option<TransactionDetails>, Error> {
let mut statement = self.connection.prepare_cached("SELECT transaction_details.timestamp, transaction_details.received, transaction_details.sent, transaction_details.fee, transaction_details.height, transaction_details.verified, transactions.raw_tx FROM transaction_details, transactions WHERE transaction_details.txid=transactions.txid AND transaction_details.txid=:txid")?;
let mut rows = statement.query(named_params! { ":txid": txid })?;
match rows.next()? {
Some(row) => {
let timestamp: Option<u64> = row.get(0)?;
let received: u64 = row.get(1)?;
let sent: u64 = row.get(2)?;
let fee: Option<u64> = row.get(3)?;
let height: Option<u32> = row.get(4)?;
let verified: bool = row.get(5)?;
let raw_tx: Option<Vec<u8>> = row.get(6)?;
let tx: Option<Transaction> = match raw_tx {
Some(raw_tx) => {
let tx: Transaction = deserialize(&raw_tx)?;
Some(tx)
}
None => None,
};
let confirmation_time = match (height, timestamp) {
(Some(height), Some(timestamp)) => Some(ConfirmationTime { height, timestamp }),
_ => None,
};
Ok(Some(TransactionDetails {
transaction: tx,
txid: deserialize(txid)?,
received,
sent,
fee,
confirmation_time,
verified,
}))
}
None => Ok(None),
}
}
fn select_last_derivation_index_by_keychain(
&self,
keychain: String,
) -> Result<Option<u32>, Error> {
let mut statement = self
.connection
.prepare_cached("SELECT value FROM last_derivation_indices WHERE keychain=:keychain")?;
let mut rows = statement.query(named_params! {":keychain": keychain})?;
match rows.next()? {
Some(row) => {
let value: u32 = row.get(0)?;
Ok(Some(value))
}
None => Ok(None),
}
}
fn select_checksum_by_keychain(&self, keychain: String) -> Result<Option<Vec<u8>>, Error> {
let mut statement = self
.connection
.prepare_cached("SELECT checksum FROM checksums WHERE keychain=:keychain")?;
let mut rows = statement.query(named_params! {":keychain": keychain})?;
match rows.next()? {
Some(row) => {
let checksum: Vec<u8> = row.get(0)?;
Ok(Some(checksum))
}
None => Ok(None),
}
}
fn delete_script_pubkey_by_path(&self, keychain: String, child: u32) -> Result<(), Error> {
let mut statement = self.connection.prepare_cached(
"DELETE FROM script_pubkeys WHERE keychain=:keychain AND child=:child",
)?;
statement.execute(named_params! {
":keychain": keychain,
":child": child
})?;
Ok(())
}
fn delete_script_pubkey_by_script(&self, script: &[u8]) -> Result<(), Error> {
let mut statement = self
.connection
.prepare_cached("DELETE FROM script_pubkeys WHERE script=:script")?;
statement.execute(named_params! {
":script": script
})?;
Ok(())
}
fn delete_utxo_by_outpoint(&self, txid: &[u8], vout: u32) -> Result<(), Error> {
let mut statement = self
.connection
.prepare_cached("DELETE FROM utxos WHERE txid=:txid AND vout=:vout")?;
statement.execute(named_params! {
":txid": txid,
":vout": vout
})?;
Ok(())
}
fn delete_transaction_by_txid(&self, txid: &[u8]) -> Result<(), Error> {
let mut statement = self
.connection
.prepare_cached("DELETE FROM transactions WHERE txid=:txid")?;
statement.execute(named_params! {":txid": txid})?;
Ok(())
}
fn delete_transaction_details_by_txid(&self, txid: &[u8]) -> Result<(), Error> {
let mut statement = self
.connection
.prepare_cached("DELETE FROM transaction_details WHERE txid=:txid")?;
statement.execute(named_params! {":txid": txid})?;
Ok(())
}
fn delete_last_derivation_index_by_keychain(&self, keychain: String) -> Result<(), Error> {
let mut statement = self
.connection
.prepare_cached("DELETE FROM last_derivation_indices WHERE keychain=:keychain")?;
statement.execute(named_params! {
":keychain": &keychain
})?;
Ok(())
}
}
impl BatchOperations for SqliteDatabase {
fn set_script_pubkey(
&mut self,
script: &Script,
keychain: KeychainKind,
child: u32,
) -> Result<(), Error> {
let keychain = serde_json::to_string(&keychain)?;
self.insert_script_pubkey(keychain, child, script.as_bytes())?;
Ok(())
}
fn set_utxo(&mut self, utxo: &LocalUtxo) -> Result<(), Error> {
self.insert_utxo(
utxo.txout.value,
serde_json::to_string(&utxo.keychain)?,
utxo.outpoint.vout,
&utxo.outpoint.txid,
utxo.txout.script_pubkey.as_bytes(),
)?;
Ok(())
}
fn set_raw_tx(&mut self, transaction: &Transaction) -> Result<(), Error> {
match self.select_transaction_by_txid(&transaction.txid())? {
Some(_) => {
self.update_transaction(&transaction.txid(), &serialize(transaction))?;
}
None => {
self.insert_transaction(&transaction.txid(), &serialize(transaction))?;
}
}
Ok(())
}
fn set_tx(&mut self, transaction: &TransactionDetails) -> Result<(), Error> {
match self.select_transaction_details_by_txid(&transaction.txid)? {
Some(_) => {
self.update_transaction_details(transaction)?;
}
None => {
self.insert_transaction_details(transaction)?;
}
}
if let Some(tx) = &transaction.transaction {
self.set_raw_tx(tx)?;
}
Ok(())
}
fn set_last_index(&mut self, keychain: KeychainKind, value: u32) -> Result<(), Error> {
self.update_last_derivation_index(serde_json::to_string(&keychain)?, value)?;
Ok(())
}
fn del_script_pubkey_from_path(
&mut self,
keychain: KeychainKind,
child: u32,
) -> Result<Option<Script>, Error> {
let keychain = serde_json::to_string(&keychain)?;
let script = self.select_script_pubkey_by_path(keychain.clone(), child)?;
match script {
Some(script) => {
self.delete_script_pubkey_by_path(keychain, child)?;
Ok(Some(script))
}
None => Ok(None),
}
}
fn del_path_from_script_pubkey(
&mut self,
script: &Script,
) -> Result<Option<(KeychainKind, u32)>, Error> {
match self.select_script_pubkey_by_script(script.as_bytes())? {
Some((keychain, child)) => {
self.delete_script_pubkey_by_script(script.as_bytes())?;
Ok(Some((keychain, child)))
}
None => Ok(None),
}
}
fn del_utxo(&mut self, outpoint: &OutPoint) -> Result<Option<LocalUtxo>, Error> {
match self.select_utxo_by_outpoint(&outpoint.txid, outpoint.vout)? {
Some((value, keychain, script_pubkey)) => {
self.delete_utxo_by_outpoint(&outpoint.txid, outpoint.vout)?;
Ok(Some(LocalUtxo {
outpoint: *outpoint,
txout: TxOut {
value,
script_pubkey,
},
keychain,
}))
}
None => Ok(None),
}
}
fn del_raw_tx(&mut self, txid: &Txid) -> Result<Option<Transaction>, Error> {
match self.select_transaction_by_txid(txid)? {
Some(tx) => {
self.delete_transaction_by_txid(txid)?;
Ok(Some(tx))
}
None => Ok(None),
}
}
fn del_tx(
&mut self,
txid: &Txid,
include_raw: bool,
) -> Result<Option<TransactionDetails>, Error> {
match self.select_transaction_details_by_txid(txid)? {
Some(transaction_details) => {
self.delete_transaction_details_by_txid(txid)?;
if include_raw {
self.delete_transaction_by_txid(txid)?;
}
Ok(Some(transaction_details))
}
None => Ok(None),
}
}
fn del_last_index(&mut self, keychain: KeychainKind) -> Result<Option<u32>, Error> {
let keychain = serde_json::to_string(&keychain)?;
match self.select_last_derivation_index_by_keychain(keychain.clone())? {
Some(value) => {
self.delete_last_derivation_index_by_keychain(keychain)?;
Ok(Some(value))
}
None => Ok(None),
}
}
}
impl Database for SqliteDatabase {
fn check_descriptor_checksum<B: AsRef<[u8]>>(
&mut self,
keychain: KeychainKind,
bytes: B,
) -> Result<(), Error> {
let keychain = serde_json::to_string(&keychain)?;
match self.select_checksum_by_keychain(keychain.clone())? {
Some(checksum) => {
if checksum == bytes.as_ref().to_vec() {
Ok(())
} else {
Err(Error::ChecksumMismatch)
}
}
None => {
self.insert_checksum(keychain, bytes.as_ref())?;
Ok(())
}
}
}
fn iter_script_pubkeys(&self, keychain: Option<KeychainKind>) -> Result<Vec<Script>, Error> {
match keychain {
Some(keychain) => {
let keychain = serde_json::to_string(&keychain)?;
self.select_script_pubkeys_by_keychain(keychain)
}
None => self.select_script_pubkeys(),
}
}
fn iter_utxos(&self) -> Result<Vec<LocalUtxo>, Error> {
self.select_utxos()
}
fn iter_raw_txs(&self) -> Result<Vec<Transaction>, Error> {
self.select_transactions()
}
fn iter_txs(&self, include_raw: bool) -> Result<Vec<TransactionDetails>, Error> {
match include_raw {
true => self.select_transaction_details_with_raw(),
false => self.select_transaction_details(),
}
}
fn get_script_pubkey_from_path(
&self,
keychain: KeychainKind,
child: u32,
) -> Result<Option<Script>, Error> {
let keychain = serde_json::to_string(&keychain)?;
match self.select_script_pubkey_by_path(keychain, child)? {
Some(script) => Ok(Some(script)),
None => Ok(None),
}
}
fn get_path_from_script_pubkey(
&self,
script: &Script,
) -> Result<Option<(KeychainKind, u32)>, Error> {
match self.select_script_pubkey_by_script(script.as_bytes())? {
Some((keychain, child)) => Ok(Some((keychain, child))),
None => Ok(None),
}
}
fn get_utxo(&self, outpoint: &OutPoint) -> Result<Option<LocalUtxo>, Error> {
match self.select_utxo_by_outpoint(&outpoint.txid, outpoint.vout)? {
Some((value, keychain, script_pubkey)) => Ok(Some(LocalUtxo {
outpoint: *outpoint,
txout: TxOut {
value,
script_pubkey,
},
keychain,
})),
None => Ok(None),
}
}
fn get_raw_tx(&self, txid: &Txid) -> Result<Option<Transaction>, Error> {
match self.select_transaction_by_txid(txid)? {
Some(tx) => Ok(Some(tx)),
None => Ok(None),
}
}
fn get_tx(&self, txid: &Txid, include_raw: bool) -> Result<Option<TransactionDetails>, Error> {
match self.select_transaction_details_by_txid(txid)? {
Some(mut transaction_details) => {
if !include_raw {
transaction_details.transaction = None;
}
Ok(Some(transaction_details))
}
None => Ok(None),
}
}
fn get_last_index(&self, keychain: KeychainKind) -> Result<Option<u32>, Error> {
let keychain = serde_json::to_string(&keychain)?;
let value = self.select_last_derivation_index_by_keychain(keychain)?;
Ok(value)
}
fn increment_last_index(&mut self, keychain: KeychainKind) -> Result<u32, Error> {
let keychain_string = serde_json::to_string(&keychain)?;
match self.get_last_index(keychain)? {
Some(value) => {
self.update_last_derivation_index(keychain_string, value + 1)?;
Ok(value + 1)
}
None => {
self.insert_last_derivation_index(keychain_string, 0)?;
Ok(0)
}
}
}
fn flush(&mut self) -> Result<(), Error> {
Ok(())
}
}
impl BatchDatabase for SqliteDatabase {
type Batch = SqliteDatabase;
fn begin_batch(&self) -> Self::Batch {
let db = SqliteDatabase::new(self.path.clone());
db.connection.execute("BEGIN TRANSACTION", []).unwrap();
db
}
fn commit_batch(&mut self, batch: Self::Batch) -> Result<(), Error> {
batch.connection.execute("COMMIT TRANSACTION", [])?;
Ok(())
}
}
pub fn get_connection(path: &str) -> Result<Connection, Error> {
let connection = Connection::open(path)?;
migrate(&connection)?;
Ok(connection)
}
pub fn get_schema_version(conn: &Connection) -> rusqlite::Result<i32> {
let statement = conn.prepare_cached("SELECT version FROM version");
match statement {
Err(rusqlite::Error::SqliteFailure(e, Some(msg))) => {
if msg == "no such table: version" {
Ok(0)
} else {
Err(rusqlite::Error::SqliteFailure(e, Some(msg)))
}
}
Ok(mut stmt) => {
let mut rows = stmt.query([])?;
match rows.next()? {
Some(row) => {
let version: i32 = row.get(0)?;
Ok(version)
}
None => Ok(0),
}
}
_ => Ok(0),
}
}
pub fn set_schema_version(conn: &Connection, version: i32) -> rusqlite::Result<usize> {
conn.execute(
"UPDATE version SET version=:version",
named_params! {":version": version},
)
}
pub fn migrate(conn: &Connection) -> rusqlite::Result<()> {
let version = get_schema_version(conn)?;
let stmts = &MIGRATIONS[(version as usize)..];
let mut i: i32 = version;
if version == MIGRATIONS.len() as i32 {
log::info!("db up to date, no migration needed");
return Ok(());
}
for stmt in stmts {
let res = conn.execute(stmt, []);
if res.is_err() {
println!("migration failed on:\n{}\n{:?}", stmt, res);
break;
}
i += 1;
}
set_schema_version(conn, i)?;
Ok(())
}
#[cfg(test)]
pub mod test {
use crate::database::SqliteDatabase;
use std::time::{SystemTime, UNIX_EPOCH};
fn get_database() -> SqliteDatabase {
let time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let mut dir = std::env::temp_dir();
dir.push(format!("bdk_{}", time.as_nanos()));
SqliteDatabase::new(String::from(dir.to_str().unwrap()))
}
#[test]
fn test_script_pubkey() {
crate::database::test::test_script_pubkey(get_database());
}
#[test]
fn test_batch_script_pubkey() {
crate::database::test::test_batch_script_pubkey(get_database());
}
#[test]
fn test_iter_script_pubkey() {
crate::database::test::test_iter_script_pubkey(get_database());
}
#[test]
fn test_del_script_pubkey() {
crate::database::test::test_del_script_pubkey(get_database());
}
#[test]
fn test_utxo() {
crate::database::test::test_utxo(get_database());
}
#[test]
fn test_raw_tx() {
crate::database::test::test_raw_tx(get_database());
}
#[test]
fn test_tx() {
crate::database::test::test_tx(get_database());
}
#[test]
fn test_last_index() {
crate::database::test::test_last_index(get_database());
}
}

View File

@@ -571,8 +571,9 @@ macro_rules! fragment {
( pk ( $key:expr ) ) => ({
$crate::fragment!(c:pk_k ( $key ))
});
( pk_h ( $key_hash:expr ) ) => ({
$crate::impl_leaf_opcode_value!(PkH, $key_hash)
( pk_h ( $key:expr ) ) => ({
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();
$crate::keys::make_pkh($key, &secp)
});
( after ( $value:expr ) ) => ({
$crate::impl_leaf_opcode_value!(After, $value)
@@ -601,6 +602,9 @@ macro_rules! fragment {
( and_or ( $( $inner:tt )* ) ) => ({
$crate::impl_node_opcode_three!(AndOr, $( $inner )*)
});
( andor ( $( $inner:tt )* ) ) => ({
$crate::impl_node_opcode_three!(AndOr, $( $inner )*)
});
( or_b ( $( $inner:tt )* ) ) => ({
$crate::impl_node_opcode_two!(OrB, $( $inner )*)
});

View File

@@ -47,14 +47,12 @@ use bitcoin::util::bip32::Fingerprint;
use bitcoin::PublicKey;
use miniscript::descriptor::{DescriptorPublicKey, ShInner, SortedMultiVec, WshInner};
use miniscript::{
Descriptor, Miniscript, MiniscriptKey, Satisfier, ScriptContext, Terminal, ToPublicKey,
};
use miniscript::{Descriptor, Miniscript, MiniscriptKey, Satisfier, ScriptContext, Terminal};
#[allow(unused_imports)]
use log::{debug, error, info, trace};
use crate::descriptor::{DerivedDescriptorKey, ExtractPolicy};
use crate::descriptor::ExtractPolicy;
use crate::wallet::signer::{SignerId, SignersContainer};
use crate::wallet::utils::{self, After, Older, SecpCtx};
@@ -88,13 +86,6 @@ impl PkOrF {
},
}
}
fn from_key_hash(k: hash160::Hash) -> Self {
PkOrF {
pubkey_hash: Some(k),
..Default::default()
}
}
}
/// An item that needs to be satisfied
@@ -779,25 +770,6 @@ fn signature_in_psbt(psbt: &Psbt, key: &DescriptorPublicKey, secp: &SecpCtx) ->
})
}
fn signature_key(
key: &<DescriptorPublicKey as MiniscriptKey>::Hash,
signers: &SignersContainer,
secp: &SecpCtx,
) -> Policy {
let key_hash = DerivedDescriptorKey::new(key.clone(), secp)
.to_public_key()
.to_pubkeyhash();
let mut policy: Policy = SatisfiableItem::Signature(PkOrF::from_key_hash(key_hash)).into();
if signers.find(SignerId::PkHash(key_hash)).is_some() {
policy.contribution = Satisfaction::Complete {
condition: Default::default(),
}
}
policy
}
impl<Ctx: ScriptContext> ExtractPolicy for Miniscript<DescriptorPublicKey, Ctx> {
fn extract_policy(
&self,
@@ -809,7 +781,7 @@ impl<Ctx: ScriptContext> ExtractPolicy for Miniscript<DescriptorPublicKey, Ctx>
// Leaves
Terminal::True | Terminal::False => None,
Terminal::PkK(pubkey) => Some(signature(pubkey, signers, build_sat, secp)),
Terminal::PkH(pubkey_hash) => Some(signature_key(pubkey_hash, signers, secp)),
Terminal::PkH(pubkey_hash) => Some(signature(pubkey_hash, signers, build_sat, secp)),
Terminal::After(value) => {
let mut policy: Policy = SatisfiableItem::AbsoluteTimelock { value: *value }.into();
policy.contribution = Satisfaction::Complete {
@@ -1444,6 +1416,7 @@ mod test {
const ALICE_TPRV_STR:&str = "tprv8ZgxMBicQKsPf6T5X327efHnvJDr45Xnb8W4JifNWtEoqXu9MRYS4v1oYe6DFcMVETxy5w3bqpubYRqvcVTqovG1LifFcVUuJcbwJwrhYzP";
const BOB_TPRV_STR:&str = "tprv8ZgxMBicQKsPeinZ155cJAn117KYhbaN6MV3WeG6sWhxWzcvX1eg1awd4C9GpUN1ncLEM2rzEvunAg3GizdZD4QPPCkisTz99tXXB4wZArp";
const CAROL_TPRV_STR:&str = "tprv8ZgxMBicQKsPdC3CicFifuLCEyVVdXVUNYorxUWj3iGZ6nimnLAYAY9SYB7ib8rKzRxrCKFcEytCt6szwd2GHnGPRCBLAEAoSVDefSNk4Bt";
const ALICE_BOB_PATH: &str = "m/0'";
#[test]
@@ -1602,4 +1575,28 @@ mod test {
);
//println!("{}", serde_json::to_string(&policy_expired_signed).unwrap());
}
#[test]
fn test_extract_pkh() {
let secp = Secp256k1::new();
let (prvkey_alice, _, _) = setup_keys(ALICE_TPRV_STR, ALICE_BOB_PATH, &secp);
let (prvkey_bob, _, _) = setup_keys(BOB_TPRV_STR, ALICE_BOB_PATH, &secp);
let (prvkey_carol, _, _) = setup_keys(CAROL_TPRV_STR, ALICE_BOB_PATH, &secp);
let desc = descriptor!(wsh(c: andor(
pk(prvkey_alice),
pk_k(prvkey_bob),
pk_h(prvkey_carol),
)))
.unwrap();
let (wallet_desc, keymap) = desc
.into_wallet_descriptor(&secp, Network::Testnet)
.unwrap();
let signers_container = Arc::new(SignersContainer::from(keymap));
let policy = wallet_desc.extract_policy(&signers_container, BuildSatisfaction::None, &secp);
assert!(policy.is_ok());
}
}

View File

@@ -140,6 +140,9 @@ pub enum Error {
#[cfg(feature = "rpc")]
/// Rpc client error
Rpc(core_rpc::Error),
#[cfg(feature = "sqlite")]
/// Rusqlite client error
Rusqlite(rusqlite::Error),
}
impl fmt::Display for Error {
@@ -194,6 +197,8 @@ impl_error!(electrum_client::Error, Electrum);
impl_error!(sled::Error, Sled);
#[cfg(feature = "rpc")]
impl_error!(core_rpc::Error, Rpc);
#[cfg(feature = "sqlite")]
impl_error!(rusqlite::Error, Rusqlite);
#[cfg(feature = "compact_filters")]
impl From<crate::blockchain::compact_filters::CompactFiltersError> for Error {

View File

@@ -753,6 +753,20 @@ pub fn make_pk<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
Ok((minisc, key_map, valid_networks))
}
// Used internally by `bdk::fragment!` to build `pk_h()` fragments
#[doc(hidden)]
pub fn make_pkh<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(
descriptor_key: Pk,
secp: &SecpCtx,
) -> Result<(Miniscript<DescriptorPublicKey, Ctx>, KeyMap, ValidNetworks), DescriptorError> {
let (key, key_map, valid_networks) = descriptor_key.into_descriptor_key()?.extract(secp)?;
let minisc = Miniscript::from_ast(Terminal::PkH(key))?;
minisc.check_minsicript()?;
Ok((minisc, key_map, valid_networks))
}
// Used internally by `bdk::fragment!` to build `multi()` fragments
#[doc(hidden)]
pub fn make_multi<Pk: IntoDescriptorKey<Ctx>, Ctx: ScriptContext>(

View File

@@ -40,7 +40,7 @@
//! interact with the bitcoin P2P network.
//!
//! ```toml
//! bdk = "0.11.0"
//! bdk = "0.12.0"
//! ```
#![cfg_attr(
feature = "electrum",
@@ -244,6 +244,9 @@ pub extern crate electrum_client;
#[cfg(feature = "key-value-db")]
pub extern crate sled;
#[cfg(feature = "sqlite")]
pub extern crate rusqlite;
#[allow(unused_imports)]
#[macro_use]
pub(crate) mod error;

View File

@@ -100,8 +100,8 @@ impl TranslateDescriptor for Descriptor<DescriptorPublicKey> {
#[macro_export]
macro_rules! testutils {
( @external $descriptors:expr, $child:expr ) => ({
use bitcoin::secp256k1::Secp256k1;
use miniscript::descriptor::{Descriptor, DescriptorPublicKey, DescriptorTrait};
use $crate::bitcoin::secp256k1::Secp256k1;
use $crate::miniscript::descriptor::{Descriptor, DescriptorPublicKey, DescriptorTrait};
use $crate::testutils::TranslateDescriptor;
@@ -111,15 +111,15 @@ macro_rules! testutils {
parsed.derive_translated(&secp, $child).address(bitcoin::Network::Regtest).expect("No address form")
});
( @internal $descriptors:expr, $child:expr ) => ({
use bitcoin::secp256k1::Secp256k1;
use miniscript::descriptor::{Descriptor, DescriptorPublicKey, DescriptorTrait};
use $crate::bitcoin::secp256k1::Secp256k1;
use $crate::miniscript::descriptor::{Descriptor, DescriptorPublicKey, DescriptorTrait};
use $crate::testutils::TranslateDescriptor;
let secp = Secp256k1::new();
let parsed = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &$descriptors.1.expect("Missing internal descriptor")).expect("Failed to parse descriptor in `testutils!(@internal)`").0;
parsed.derive_translated(&secp, $child).address(bitcoin::Network::Regtest).expect("No address form")
parsed.derive_translated(&secp, $child).address($crate::bitcoin::Network::Regtest).expect("No address form")
});
( @e $descriptors:expr, $child:expr ) => ({ testutils!(@external $descriptors, $child) });
( @i $descriptors:expr, $child:expr ) => ({ testutils!(@internal $descriptors, $child) });
@@ -145,8 +145,8 @@ macro_rules! testutils {
let mut seed = [0u8; 32];
rand::thread_rng().fill(&mut seed[..]);
let key = bitcoin::util::bip32::ExtendedPrivKey::new_master(
bitcoin::Network::Testnet,
let key = $crate::bitcoin::util::bip32::ExtendedPrivKey::new_master(
$crate::bitcoin::Network::Testnet,
&seed,
);
@@ -158,13 +158,13 @@ macro_rules! testutils {
( @generate_wif ) => ({
use rand::Rng;
let mut key = [0u8; bitcoin::secp256k1::constants::SECRET_KEY_SIZE];
let mut key = [0u8; $crate::bitcoin::secp256k1::constants::SECRET_KEY_SIZE];
rand::thread_rng().fill(&mut key[..]);
(bitcoin::PrivateKey {
($crate::bitcoin::PrivateKey {
compressed: true,
network: bitcoin::Network::Testnet,
key: bitcoin::secp256k1::SecretKey::from_slice(&key).unwrap(),
network: $crate::bitcoin::Network::Testnet,
key: $crate::bitcoin::secp256k1::SecretKey::from_slice(&key).unwrap(),
}.to_string(), None::<String>, None::<String>)
});
@@ -181,8 +181,8 @@ macro_rules! testutils {
( @descriptors ( $external_descriptor:expr ) $( ( $internal_descriptor:expr ) )? $( ( @keys $( $keys:tt )* ) )* ) => ({
use std::str::FromStr;
use std::collections::HashMap;
use miniscript::descriptor::Descriptor;
use miniscript::TranslatePk;
use $crate::miniscript::descriptor::Descriptor;
use $crate::miniscript::TranslatePk;
#[allow(unused_assignments, unused_mut)]
let mut keys: HashMap<&'static str, (String, Option<String>, Option<String>)> = HashMap::new();