Merge branch 'upstream/master' into search-bar-network-regex

This commit is contained in:
natsee 2023-12-30 19:18:49 +01:00
commit 6cea6ceb7e
No known key found for this signature in database
GPG Key ID: 233CF3150A89BED8
476 changed files with 121475 additions and 73248 deletions

View File

@ -1,24 +1,42 @@
version: 2
updates:
- package-ecosystem: npm
versioning-strategy: increase
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]
allow:
- dependency-type: "production"
- package-ecosystem: npm
directory: "/frontend"
versioning-strategy: increase
groups:
frontend-angular-dependencies:
patterns:
- "@angular*"
- "@ng-*"
- "ngx-*"
frontend-jest-dependencies:
patterns:
- "@types/jest"
- "jest"
frontend-eslint-dependencies:
patterns:
- "@typescript-eslint*"
- "eslint"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]
allow:
- dependency-type: "production"
@ -28,7 +46,8 @@ updates:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]
- package-ecosystem: docker
directory: "/docker/frontend"
@ -36,7 +55,8 @@ updates:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]
- package-ecosystem: "github-actions"
directory: "/"
@ -44,4 +64,5 @@ updates:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
update-types:
["version-update:semver-major", "version-update:semver-patch"]

View File

@ -9,7 +9,7 @@ jobs:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16", "17", "18"]
node: ["18", "20"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
@ -27,10 +27,17 @@ jobs:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install 1.70.x Rust toolchain
uses: actions-rs/toolchain@v1
- name: Read rust-toolchain file from repository
id: gettoolchain
run: echo "::set-output name=toolchain::$(cat rust-toolchain)"
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}
- name: Install ${{ steps.gettoolchain.outputs.toolchain }} Rust toolchain
# Latest version available on this commit is 1.71.1
# Commit date is Aug 3, 2023
uses: dtolnay/rust-toolchain@be73d7920c329f220ce78e0234b8f96b7ae60248
with:
toolchain: 1.70
toolchain: ${{ steps.gettoolchain.outputs.toolchain }}
- name: Install
if: ${{ matrix.flavor == 'dev'}}
@ -49,7 +56,7 @@ jobs:
- name: Unit Tests
if: ${{ matrix.flavor == 'dev'}}
run: npm run test
run: npm run test:ci
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Build
@ -60,7 +67,7 @@ jobs:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16", "17", "18"]
node: ["18", "20"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
@ -99,3 +106,6 @@ jobs:
- name: Build
run: npm run build
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -38,7 +38,7 @@ jobs:
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: 16.15.0
node-version: 20
cache: "npm"
cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json

19
.github/workflows/get_backend_hash.yml vendored Normal file
View File

@ -0,0 +1,19 @@
name: 'Print backend hashes'
on: [workflow_dispatch]
jobs:
print-backend-sha:
runs-on: 'ubuntu-latest'
name: Print backend hashes
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: repo
- name: Run script
working-directory: repo
run: |
chmod +x ./scripts/get_backend_hash.sh
sh ./scripts/get_backend_hash.sh

View File

@ -68,17 +68,17 @@ jobs:
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
- name: Checkout project
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Init repo for Dockerization
run: docker/init.sh "$TAG"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
id: qemu
- name: Setup Docker buildx action
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
id: buildx
- name: Available platforms
@ -98,7 +98,7 @@ jobs:
docker buildx build \
--cache-from "type=local,src=/tmp/.buildx-cache" \
--cache-to "type=local,dest=/tmp/.buildx-cache" \
--platform linux/amd64,linux/arm64,linux/arm/v7 \
--platform linux/amd64,linux/arm64 \
--tag ${{ secrets.DOCKER_HUB_USER }}/${{ matrix.service }}:$TAG \
--tag ${{ secrets.DOCKER_HUB_USER }}/${{ matrix.service }}:latest \
--output "type=registry" ./${{ matrix.service }}/ \

2
.nvmrc
View File

@ -1 +1 @@
v16.16.0
v20.8.0

View File

@ -1,47 +0,0 @@
# If you see pwd_unknown showing up check permissions
PWD ?= pwd_unknown
# DATABASE DEPLOY FOLDER CONFIG - default ./data
ifeq ($(data),)
DATA := data
export DATA
else
DATA := $(data)
export DATA
endif
.PHONY: help
help:
@echo ''
@echo ''
@echo ' Usage: make [COMMAND]'
@echo ''
@echo ' make all # build init mempool and electrs'
@echo ' make init # setup some useful configs'
@echo ' make mempool # build q dockerized mempool.space'
@echo ' make electrs # build a docker electrs image'
@echo ''
.PHONY: init
init:
@echo ''
mkdir -p $(DATA) $(DATA)/mysql $(DATA)/mysql/data
#REF: https://github.com/mempool/mempool/blob/master/docker/README.md
cat docker/docker-compose.yml > docker-compose.yml
cat backend/mempool-config.sample.json > backend/mempool-config.json
.PHONY: mempool
mempool: init
@echo ''
docker-compose up --force-recreate --always-recreate-deps
@echo ''
.PHONY: electrs
electrum:
#REF: https://hub.docker.com/r/beli/electrum
@echo ''
docker build -f docker/electrum/Dockerfile .
@echo ''
.PHONY: all
all: init
make mempool
#######################
-include Makefile

35
LICENSE
View File

@ -1,29 +1,28 @@
The Mempool Open Source Project
Copyright (c) 2019-2023 The Mempool Open Source Project Developers
The Mempool Open Source Project®
Copyright (c) 2019-2023 Mempool Space K.K. and other shadowy super-coders
This program is free software; you can redistribute it and/or modify it under
the terms of (at your option) either:
the terms of the GNU Affero General Public License as published by the Free
Software Foundation, either version 3 of the License or any later version
approved by a proxy statement published on <https://mempool.space/about>.
1) the GNU Affero General Public License as published by the Free Software
Foundation, either version 3 of the License or any later version approved by a
proxy statement published on <https://mempool.space/about>; or
However, this copyright license does not include an implied right or license
to use any trademarks, service marks, logos, or trade names of Mempool Space K.K.
or any other contributor to The Mempool Open Source Project.
2) the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License or any later version approved by a
proxy statement published on <https://mempool.space/about>.
The Mempool Open Source Project®, Mempool Accelerator™, Mempool Enterprise®,
Mempool Liquidity™, mempool.space®, Be your own explorer™, Explore the full
Bitcoin ecosystem™, the mempool Logo, the mempool Square logo, the mempool Blocks
logo, the mempool Blocks 3 | 2 logo, the mempool.space Vertical Logo, and the
mempool.space Horizontal logo are registered trademarks or trademarks of Mempool
Space K.K in Japan, the United States, and/or other countries.
However, this copyright license does not include an implied right or license to
use our trademarks: The Mempool Open Source Project™, mempool.space™, the
mempool Logo™, the mempool.space Vertical Logo™, the mempool.space Horizontal
Logo™, the mempool Square Logo™, and the mempool Blocks logo™ are registered
trademarks or trademarks of Mempool Space K.K in Japan, the United States,
and/or other countries. See our full Trademark Policy and Guidelines for more
details, published on <https://mempool.space/trademark-policy>.
See our full Trademark Policy and Guidelines for more details, published on
<https://mempool.space/trademark-policy>.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the full license terms for more details.
You should have received a copy of both the GNU Affero General Public License
and the GNU General Public License along with this program. If not, see
<http://www.gnu.org/licenses/>.
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@ -1 +0,0 @@
# For additional configs/scripting

View File

@ -1,4 +1,4 @@
# The Mempool Open Source Project [![mempool](https://img.shields.io/endpoint?url=https://dashboard.cypress.io/badge/simple/ry4br7/master&style=flat-square)](https://dashboard.cypress.io/projects/ry4br7/runs)
# The Mempool Open Source Project® [![mempool](https://img.shields.io/endpoint?url=https://dashboard.cypress.io/badge/simple/ry4br7/master&style=flat-square)](https://dashboard.cypress.io/projects/ry4br7/runs)
https://user-images.githubusercontent.com/93150691/226236121-375ea64f-b4a1-4cc0-8fad-a6fb33226840.mp4
@ -12,7 +12,9 @@ It is an open-source project developed and operated for the benefit of the Bitco
Mempool can be self-hosted on a wide variety of your own hardware, ranging from a simple one-click installation on a Raspberry Pi full-node distro all the way to a robust production instance on a powerful FreeBSD server.
**Most people should use a one-click install method.** Other install methods are meant for developers and others with experience managing servers.
Most people should use a <a href="#one-click-installation">one-click install method</a>.
Other install methods are meant for developers and others with experience managing servers. If you want support for your own production instance of Mempool, or if you'd like to have your own instance of Mempool run by the mempool.space team on their own global ISP infrastructure—check out <a href="https://mempool.space/enterprise" target="_blank">Mempool Enterprise®</a>.
<a id="one-click-installation"></a>
## One-Click Installation
@ -22,7 +24,8 @@ Mempool can be conveniently installed on the following full-node distros:
- [RaspiBlitz](https://github.com/rootzoll/raspiblitz)
- [RoninDojo](https://code.samourai.io/ronindojo/RoninDojo)
- [myNode](https://github.com/mynodebtc/mynode)
- [Start9](https://github.com/Start9Labs/embassy-os)
- [StartOS](https://github.com/Start9Labs/start-os)
- [nix-bitcoin](https://github.com/fort-nix/nix-bitcoin/blob/a1eacce6768ca4894f365af8f79be5bbd594e1c3/examples/configuration.nix#L129)
**We highly recommend you deploy your own Mempool instance this way.** No matter which option you pick, you'll be able to get your own fully-sovereign instance of Mempool up quickly without needing to fiddle with any settings.

1
backend/.dockerignore Normal file
View File

@ -0,0 +1 @@
Dockerfile

3
backend/.gitignore vendored
View File

@ -45,3 +45,6 @@ testem.log
#System Files
.DS_Store
Thumbs.db
# package folder (npm run package output)
/package

View File

@ -2,7 +2,7 @@
These instructions are mostly intended for developers.
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool only provides support for custom setups to [enterprise sponsors](https://mempool.space/enterprise).
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool only provides support for custom setups to project sponsors through [Mempool Enterprise®](https://mempool.space/enterprise).
See other ways to set up Mempool on [the main README](/../../#installation-methods).
@ -85,7 +85,7 @@ Install dependencies with `npm` and build the backend:
```
cd backend
npm install
npm install --no-install-links # npm@9.4.2 and later can omit the --no-install-links
npm run build
```

View File

@ -8,6 +8,7 @@
"API_URL_PREFIX": "/api/v1/",
"POLL_RATE_MS": 2000,
"CACHE_DIR": "./cache",
"CACHE_ENABLED": true,
"CLEAR_PROTECTION_MINUTES": 20,
"RECOMMENDED_FEE_PERCENTILE": 50,
"BLOCK_WEIGHT_UNITS": 4000000,
@ -31,14 +32,17 @@
"CPFP_INDEXING": false,
"DISK_CACHE_BLOCK_INTERVAL": 6,
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
"ALLOW_UNREACHABLE": true
"ALLOW_UNREACHABLE": true,
"PRICE_UPDATES_PER_HOUR": 1
},
"CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool",
"TIMEOUT": 60000
"TIMEOUT": 60000,
"COOKIE": false,
"COOKIE_PATH": "/path/to/bitcoin/.cookie"
},
"ELECTRUM": {
"HOST": "127.0.0.1",
@ -48,14 +52,20 @@
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000",
"UNIX_SOCKET_PATH": "/tmp/esplora-bitcoin-mainnet",
"RETRY_UNIX_SOCKET_AFTER": 30000
"BATCH_QUERY_BASE_SIZE": 1000,
"RETRY_UNIX_SOCKET_AFTER": 30000,
"REQUEST_TIMEOUT": 10000,
"FALLBACK_TIMEOUT": 5000,
"FALLBACK": []
},
"SECOND_CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool",
"TIMEOUT": 60000
"TIMEOUT": 60000,
"COOKIE": false,
"COOKIE_PATH": "/path/to/bitcoin/.cookie"
},
"DATABASE": {
"ENABLED": true,
@ -65,7 +75,8 @@
"DATABASE": "mempool",
"USERNAME": "mempool",
"PASSWORD": "mempool",
"TIMEOUT": 180000
"TIMEOUT": 180000,
"PID_DIR": ""
},
"SYSLOG": {
"ENABLED": true,
@ -114,10 +125,6 @@
"USERNAME": "",
"PASSWORD": ""
},
"PRICE_DATA_SERVER": {
"TOR_URL": "http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices",
"CLEARNET_URL": "https://price.bisq.wiz.biz/getAllMarketPrices"
},
"EXTERNAL_DATA_SERVER": {
"MEMPOOL_API": "https://mempool.space/api/v1",
"MEMPOOL_ONION": "http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1",
@ -126,6 +133,11 @@
"BISQ_URL": "https://bisq.markets/api",
"BISQ_ONION": "http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api"
},
"REDIS": {
"ENABLED": false,
"UNIX_SOCKET_PATH": "/tmp/redis.sock",
"BATCH_QUERY_BASE_SIZE": 5000
},
"REPLICATION": {
"ENABLED": false,
"AUDIT": false,
@ -136,5 +148,9 @@
"trusted",
"servers"
]
},
"MEMPOOL_SERVICES": {
"API": "https://mempool.space/api",
"ACCELERATIONS": false
}
}

17
backend/npm_package.sh Executable file
View File

@ -0,0 +1,17 @@
#/bin/sh
set -e
# Remove previous dist folder
rm -rf dist
# Build new dist folder
npm run build
# Remove previous package folder
rm -rf package
# Move JS and deps
mv dist package
cp -R node_modules package
# Remove symlink for rust-gbt and insert real folder
rm package/node_modules/rust-gbt
cp -R rust-gbt package/node_modules
# Clean up deps
npm run package-rm-build-deps

View File

@ -0,0 +1,12 @@
#/bin/sh
set -e
# Cleaning up inside the node_modules folder
cd package/node_modules
rm -r \
typescript \
@typescript-eslint \
@napi-rs \
./rust-gbt/src \
./rust-gbt/Cargo.toml \
./rust-gbt/build.rs

File diff suppressed because it is too large Load Diff

View File

@ -22,38 +22,40 @@
"main": "index.ts",
"scripts": {
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
"build": "npm run build-rust && npm run tsc && npm run create-resources",
"build": "npm run rust-build && npm run tsc && npm run create-resources",
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && mv rust-gbt package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint @napi-rs ../rust-gbt/target ../rust-gbt/node_modules ../rust-gbt/src)",
"package": "./npm_package.sh",
"package-rm-build-deps": "./npm_package_rm_build_deps.sh",
"start": "node --max-old-space-size=2048 dist/index.js",
"start-production": "node --max-old-space-size=16384 dist/index.js",
"reindex-updated-pools": "npm run start-production --update-pools",
"reindex-all-blocks": "npm run start-production --update-pools --reindex-blocks",
"test": "./node_modules/.bin/jest --coverage",
"test:ci": "CI=true ./node_modules/.bin/jest --coverage",
"lint": "./node_modules/.bin/eslint . --ext .ts",
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\"",
"build-rust": "cd rust-gbt && npm install"
"rust-build": "cd rust-gbt && npm run build-release"
},
"dependencies": {
"@babel/core": "^7.21.3",
"@babel/core": "^7.23.2",
"@mempool/electrum-client": "1.1.9",
"@types/node": "^18.15.3",
"axios": "~1.4.0",
"axios": "~1.6.1",
"bitcoinjs-lib": "~6.1.3",
"crypto-js": "~4.1.1",
"crypto-js": "~4.2.0",
"express": "~4.18.2",
"maxmind": "~4.3.11",
"mysql2": "~3.5.2",
"mysql2": "~3.6.0",
"rust-gbt": "file:./rust-gbt",
"redis": "^4.6.6",
"socks-proxy-agent": "~7.0.0",
"typescript": "~4.9.3",
"ws": "~8.13.0"
},
"devDependencies": {
"@babel/core": "^7.21.3",
"@babel/code-frame": "^7.18.6",
"@babel/core": "^7.23.2",
"@types/compression": "^1.7.2",
"@types/crypto-js": "^4.1.1",
"@types/express": "^4.17.17",

View File

@ -12,6 +12,10 @@ export interface ThreadTransaction {
effectiveFeePerVsize: number
inputs: Array<number>
}
export interface ThreadAcceleration {
uid: number
delta: number
}
export class GbtGenerator {
constructor()
/**
@ -19,13 +23,13 @@ export class GbtGenerator {
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
make(mempool: Array<ThreadTransaction>, maxUid: number): Promise<GbtResult>
make(mempool: Array<ThreadTransaction>, accelerations: Array<ThreadAcceleration>, maxUid: number): Promise<GbtResult>
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
update(newTxs: Array<ThreadTransaction>, removeTxs: Array<number>, maxUid: number): Promise<GbtResult>
update(newTxs: Array<ThreadTransaction>, removeTxs: Array<number>, accelerations: Array<ThreadAcceleration>, maxUid: number): Promise<GbtResult>
}
/**
* The result from calling the gbt function.

View File

@ -1,6 +1,6 @@
use crate::{
u32_hasher_types::{u32hashset_new, U32HasherState},
ThreadTransaction,
ThreadTransaction, thread_acceleration::ThreadAcceleration,
};
use std::{
cmp::Ordering,
@ -88,44 +88,49 @@ impl Ord for AuditTransaction {
}
#[inline]
fn calc_fee_rate(fee: f64, vsize: f64) -> f64 {
fee / (if vsize == 0.0 { 1.0 } else { vsize })
fn calc_fee_rate(fee: u64, vsize: f64) -> f64 {
(fee as f64) / (if vsize == 0.0 { 1.0 } else { vsize })
}
impl AuditTransaction {
pub fn from_thread_transaction(tx: &ThreadTransaction) -> Self {
pub fn from_thread_transaction(tx: &ThreadTransaction, maybe_acceleration: Option<Option<&ThreadAcceleration>>) -> Self {
let fee_delta = match maybe_acceleration {
Some(Some(acceleration)) => acceleration.delta,
_ => 0.0
};
let fee = (tx.fee as u64) + (fee_delta as u64);
// rounded up to the nearest integer
let is_adjusted = tx.weight < (tx.sigops * 20);
let sigop_adjusted_vsize = ((tx.weight + 3) / 4).max(tx.sigops * 5);
let sigop_adjusted_weight = tx.weight.max(tx.sigops * 20);
let effective_fee_per_vsize = if is_adjusted {
calc_fee_rate(tx.fee, f64::from(sigop_adjusted_weight) / 4.0)
let effective_fee_per_vsize = if is_adjusted || fee_delta > 0.0 {
calc_fee_rate(fee, f64::from(sigop_adjusted_weight) / 4.0)
} else {
tx.effective_fee_per_vsize
};
Self {
uid: tx.uid,
order: tx.order,
fee: tx.fee as u64,
fee,
weight: tx.weight,
sigop_adjusted_weight,
sigop_adjusted_vsize,
sigops: tx.sigops,
adjusted_fee_per_vsize: calc_fee_rate(tx.fee, f64::from(sigop_adjusted_vsize)),
adjusted_fee_per_vsize: calc_fee_rate(fee, f64::from(sigop_adjusted_vsize)),
effective_fee_per_vsize,
dependency_rate: f64::INFINITY,
inputs: tx.inputs.clone(),
relatives_set_flag: false,
ancestors: u32hashset_new(),
children: u32hashset_new(),
ancestor_fee: tx.fee as u64,
ancestor_fee: fee,
ancestor_sigop_adjusted_weight: sigop_adjusted_weight,
ancestor_sigop_adjusted_vsize: sigop_adjusted_vsize,
ancestor_sigops: tx.sigops,
score: 0.0,
used: false,
modified: false,
dirty: effective_fee_per_vsize != tx.effective_fee_per_vsize,
dirty: effective_fee_per_vsize != tx.effective_fee_per_vsize || fee_delta > 0.0,
}
}
@ -156,7 +161,7 @@ impl AuditTransaction {
// grows, so if we think of 0 as "grew infinitely" then dependency_rate would be
// the smaller of the two. If either side is NaN, the other side is returned.
self.dependency_rate.min(calc_fee_rate(
self.ancestor_fee as f64,
self.ancestor_fee,
f64::from(self.ancestor_sigop_adjusted_weight) / 4.0,
))
}
@ -172,7 +177,7 @@ impl AuditTransaction {
#[inline]
fn calc_new_score(&mut self) {
self.score = self.adjusted_fee_per_vsize.min(calc_fee_rate(
self.ancestor_fee as f64,
self.ancestor_fee,
f64::from(self.ancestor_sigop_adjusted_vsize),
));
}

View File

@ -5,7 +5,7 @@ use tracing::{info, trace};
use crate::{
audit_transaction::{partial_cmp_uid_score, AuditTransaction},
u32_hasher_types::{u32hashset_new, u32priority_queue_with_capacity, U32HasherState},
GbtResult, ThreadTransactionsMap,
GbtResult, ThreadTransactionsMap, thread_acceleration::ThreadAcceleration,
};
const MAX_BLOCK_WEIGHT_UNITS: u32 = 4_000_000 - 4_000;
@ -53,7 +53,13 @@ impl Ord for TxPriority {
// TODO: Make gbt smaller to fix these lints.
#[allow(clippy::too_many_lines)]
#[allow(clippy::cognitive_complexity)]
pub fn gbt(mempool: &mut ThreadTransactionsMap, max_uid: usize) -> GbtResult {
pub fn gbt(mempool: &mut ThreadTransactionsMap, accelerations: &[ThreadAcceleration], max_uid: usize) -> GbtResult {
let mut indexed_accelerations = Vec::with_capacity(max_uid + 1);
indexed_accelerations.resize(max_uid + 1, None);
for acceleration in accelerations {
indexed_accelerations[acceleration.uid as usize] = Some(acceleration);
}
let mempool_len = mempool.len();
let mut audit_pool: AuditPool = Vec::with_capacity(max_uid + 1);
audit_pool.resize(max_uid + 1, None);
@ -63,7 +69,8 @@ pub fn gbt(mempool: &mut ThreadTransactionsMap, max_uid: usize) -> GbtResult {
info!("Initializing working structs");
for (uid, tx) in &mut *mempool {
let audit_tx = AuditTransaction::from_thread_transaction(tx);
let acceleration = indexed_accelerations.get(*uid as usize);
let audit_tx = AuditTransaction::from_thread_transaction(tx, acceleration.copied());
// Safety: audit_pool and mempool_stack must always contain the same transactions
audit_pool[*uid as usize] = Some(ManuallyDrop::new(audit_tx));
mempool_stack.push(*uid);
@ -328,13 +335,15 @@ fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
let mut total_sigops: u32 = 0;
for ancestor_id in &ancestors {
let Some(ancestor) = audit_pool
if let Some(ancestor) = audit_pool
.get(*ancestor_id as usize)
.expect("audit_pool contains all ancestors") else { todo!() };
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
.expect("audit_pool contains all ancestors")
{
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
} else { todo!() };
}
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {

View File

@ -9,6 +9,7 @@
use napi::bindgen_prelude::Result;
use napi_derive::napi;
use thread_transaction::ThreadTransaction;
use thread_acceleration::ThreadAcceleration;
use tracing::{debug, info, trace};
use tracing_log::LogTracer;
use tracing_subscriber::{EnvFilter, FmtSubscriber};
@ -19,6 +20,7 @@ use std::sync::{Arc, Mutex};
mod audit_transaction;
mod gbt;
mod thread_transaction;
mod thread_acceleration;
mod u32_hasher_types;
use u32_hasher_types::{u32hashmap_with_capacity, U32HasherState};
@ -74,10 +76,11 @@ impl GbtGenerator {
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn make(&self, mempool: Vec<ThreadTransaction>, max_uid: u32) -> Result<GbtResult> {
pub async fn make(&self, mempool: Vec<ThreadTransaction>, accelerations: Vec<ThreadAcceleration>, max_uid: u32) -> Result<GbtResult> {
trace!("make: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
accelerations,
max_uid as usize,
move |map| {
for tx in mempool {
@ -96,11 +99,13 @@ impl GbtGenerator {
&self,
new_txs: Vec<ThreadTransaction>,
remove_txs: Vec<u32>,
accelerations: Vec<ThreadAcceleration>,
max_uid: u32,
) -> Result<GbtResult> {
trace!("update: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
accelerations,
max_uid as usize,
move |map| {
for tx in new_txs {
@ -141,6 +146,7 @@ pub struct GbtResult {
/// to the `HashMap` as the only argument. (A move closure is recommended to meet the bounds)
async fn run_task<F>(
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
accelerations: Vec<ThreadAcceleration>,
max_uid: usize,
callback: F,
) -> Result<GbtResult>
@ -159,7 +165,7 @@ where
callback(&mut map);
info!("Starting gbt algorithm for {} elements...", map.len());
let result = gbt::gbt(&mut map, max_uid);
let result = gbt::gbt(&mut map, &accelerations, max_uid);
info!("Finished gbt algorithm for {} elements...", map.len());
debug!(

View File

@ -0,0 +1,8 @@
use napi_derive::napi;
#[derive(Debug)]
#[napi(object)]
pub struct ThreadAcceleration {
pub uid: u32,
pub delta: f64, // fee delta
}

View File

@ -10,6 +10,7 @@
"AUTOMATIC_BLOCK_REINDEXING": false,
"POLL_RATE_MS": 3,
"CACHE_DIR": "__MEMPOOL_CACHE_DIR__",
"CACHE_ENABLED": true,
"CLEAR_PROTECTION_MINUTES": 4,
"RECOMMENDED_FEE_PERCENTILE": 5,
"BLOCK_WEIGHT_UNITS": 6,
@ -22,8 +23,8 @@
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
"INDEXING_BLOCKS_AMOUNT": 14,
"POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
"POOLS_JSON_TREE_URL": "__MEMPOOL_POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__MEMPOOL_POOLS_JSON_URL__",
"AUDIT": true,
"ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": true,
@ -32,14 +33,17 @@
"MAX_BLOCKS_BULK_QUERY": 999,
"DISK_CACHE_BLOCK_INTERVAL": 999,
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
"ALLOW_UNREACHABLE": true
"ALLOW_UNREACHABLE": true,
"PRICE_UPDATES_PER_HOUR": 1
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
"PORT": 15,
"USERNAME": "__CORE_RPC_USERNAME__",
"PASSWORD": "__CORE_RPC_PASSWORD__",
"TIMEOUT": 1000
"TIMEOUT": 1000,
"COOKIE": false,
"COOKIE_PATH": "__CORE_RPC_COOKIE_PATH__"
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",
@ -49,14 +53,20 @@
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__",
"UNIX_SOCKET_PATH": "__ESPLORA_UNIX_SOCKET_PATH__",
"RETRY_UNIX_SOCKET_AFTER": 888
"BATCH_QUERY_BASE_SIZE": 1000,
"RETRY_UNIX_SOCKET_AFTER": 888,
"REQUEST_TIMEOUT": 10000,
"FALLBACK_TIMEOUT": 5000,
"FALLBACK": []
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
"PORT": 17,
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__",
"TIMEOUT": 2000
"TIMEOUT": 2000,
"COOKIE": false,
"COOKIE_PATH": "__SECOND_CORE_RPC_COOKIE_PATH__"
},
"DATABASE": {
"ENABLED": false,
@ -66,6 +76,7 @@
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__",
"PID_DIR": "__DATABASE_PID_FILE__",
"TIMEOUT": 3000
},
"SYSLOG": {
@ -91,10 +102,6 @@
"USERNAME": "__SOCKS5PROXY_USERNAME__",
"PASSWORD": "__SOCKS5PROXY_PASSWORD__"
},
"PRICE_DATA_SERVER": {
"TOR_URL": "__PRICE_DATA_SERVER_TOR_URL__",
"CLEARNET_URL": "__PRICE_DATA_SERVER_CLEARNET_URL__"
},
"EXTERNAL_DATA_SERVER": {
"MEMPOOL_API": "__EXTERNAL_DATA_SERVER_MEMPOOL_API__",
"MEMPOOL_ONION": "__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__",
@ -127,5 +134,14 @@
"AUDIT": false,
"AUDIT_START_HEIGHT": 774000,
"SERVERS": []
},
"MEMPOOL_SERVICES": {
"API": "",
"ACCELERATIONS": false
},
"REDIS": {
"ENABLED": false,
"UNIX_SOCKET_PATH": "/tmp/redis.sock",
"BATCH_QUERY_BASE_SIZE": 5000
}
}

View File

@ -0,0 +1,24 @@
import { Common } from '../../api/common';
import { MempoolTransactionExtended } from '../../mempool.interfaces';
const randomTransactions = require('./test-data/transactions-random.json');
const replacedTransactions = require('./test-data/transactions-replaced.json');
const rbfTransactions = require('./test-data/transactions-rbfs.json');
describe('Mempool Utils', () => {
test('should detect RBF transactions with fast method', () => {
const newTransactions = rbfTransactions.concat(randomTransactions);
const result: { [txid: string]: MempoolTransactionExtended[] } = Common.findRbfTransactions(newTransactions, replacedTransactions);
expect(Object.values(result).length).toEqual(2);
expect(result).toHaveProperty('7219d95161f3718335991ac6d967d24eedec370908c9879bb1e192e6d797d0a6');
expect(result).toHaveProperty('5387881d695d4564d397026dc5f740f816f8390b4b2c5ec8c20309122712a875');
});
test.only('should detect RBF transactions with scalable method', () => {
const newTransactions = rbfTransactions.concat(randomTransactions);
const result: { [txid: string]: MempoolTransactionExtended[] } = Common.findRbfTransactions(newTransactions, replacedTransactions, true);
expect(Object.values(result).length).toEqual(2);
expect(result).toHaveProperty('7219d95161f3718335991ac6d967d24eedec370908c9879bb1e192e6d797d0a6');
expect(result).toHaveProperty('5387881d695d4564d397026dc5f740f816f8390b4b2c5ec8c20309122712a875');
});
});

View File

@ -1,4 +1,8 @@
import { calcDifficultyAdjustment, DifficultyAdjustment } from '../../api/difficulty-adjustment';
import {
calcBitsDifference,
calcDifficultyAdjustment,
DifficultyAdjustment,
} from '../../api/difficulty-adjustment';
describe('Mempool Difficulty Adjustment', () => {
test('should calculate Difficulty Adjustments properly', () => {
@ -86,4 +90,46 @@ describe('Mempool Difficulty Adjustment', () => {
expect(result).toStrictEqual(vector[1]);
}
});
test('should calculate Difficulty change from bits fields of two blocks', () => {
// Check same exponent + check min max for output
expect(calcBitsDifference(0x1d000200, 0x1d000100)).toEqual(100);
expect(calcBitsDifference(0x1d000400, 0x1d000100)).toEqual(300);
expect(calcBitsDifference(0x1d000800, 0x1d000100)).toEqual(300); // Actually 700
expect(calcBitsDifference(0x1d000100, 0x1d000200)).toEqual(-50);
expect(calcBitsDifference(0x1d000100, 0x1d000400)).toEqual(-75);
expect(calcBitsDifference(0x1d000100, 0x1d000800)).toEqual(-75); // Actually -87.5
// Check new higher exponent
expect(calcBitsDifference(0x1c000200, 0x1d000001)).toEqual(100);
expect(calcBitsDifference(0x1c000400, 0x1d000001)).toEqual(300);
expect(calcBitsDifference(0x1c000800, 0x1d000001)).toEqual(300);
expect(calcBitsDifference(0x1c000100, 0x1d000002)).toEqual(-50);
expect(calcBitsDifference(0x1c000100, 0x1d000004)).toEqual(-75);
expect(calcBitsDifference(0x1c000100, 0x1d000008)).toEqual(-75);
// Check new lower exponent
expect(calcBitsDifference(0x1d000002, 0x1c000100)).toEqual(100);
expect(calcBitsDifference(0x1d000004, 0x1c000100)).toEqual(300);
expect(calcBitsDifference(0x1d000008, 0x1c000100)).toEqual(300);
expect(calcBitsDifference(0x1d000001, 0x1c000200)).toEqual(-50);
expect(calcBitsDifference(0x1d000001, 0x1c000400)).toEqual(-75);
expect(calcBitsDifference(0x1d000001, 0x1c000800)).toEqual(-75);
// Check error when exponents are too far apart
expect(() => calcBitsDifference(0x1d000001, 0x1a000800)).toThrow(
/Impossible exponent difference/
);
// Check invalid inputs
expect(() => calcBitsDifference(0x7f000001, 0x1a000800)).toThrow(
/Invalid bits/
);
expect(() => calcBitsDifference(0, 0x1a000800)).toThrow(/Invalid bits/);
expect(() => calcBitsDifference(100.2783, 0x1a000800)).toThrow(
/Invalid bits/
);
expect(() => calcBitsDifference(0x00800000, 0x1a000800)).toThrow(
/Invalid bits/
);
expect(() => calcBitsDifference(0x1c000000, 0x1a000800)).toThrow(
/Invalid bits/
);
});
});

View File

@ -0,0 +1,277 @@
[
{
"txid": "13f007241d78e8b0b4e57d2ae3fd37bcfe3226534d7cadeba5a549860d960db0",
"version": 2,
"locktime": 0,
"vin": [
{
"txid": "cb8f206f4e88bec97107089f3e9e61d50cde53d4541992ae19759b71103cf75c",
"vout": 0,
"prevout": {
"scriptpubkey": "0014fd6d15ff832c12f1ff04a5ccd5039f7227b260bd",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 fd6d15ff832c12f1ff04a5ccd5039f7227b260bd",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1ql4k3tlur9sf0rlcy5hxd2qulwgnmyc9akehvth",
"value": 610677
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304302205c430b36ebd2bb327951d83440af1f58f127871b2baada4c4dde2bc0b6721f56021f3445099f1a40e35baeda32e8e3727b505ffba0d882b11f498c7762f4184e9901",
"0236b5edd4fbbcfb045960e42ec8a9968944084785932e32940e8cd2583b37da67"
],
"is_coinbase": false,
"sequence": 2147483648
}
],
"vout": [
{
"scriptpubkey": "76a9149d32ef812385f3811634e0c0117dd153a5de10a488ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 9d32ef812385f3811634e0c0117dd153a5de10a4 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1FLC7Bag7okAkKPCyZbgZZg3Hh1EuGZ5Rd",
"value": 344697
},
{
"scriptpubkey": "00147dee8a7a38abbfb00dbfba365c8d6712934cc491",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 7dee8a7a38abbfb00dbfba365c8d6712934cc491",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q0hhg573c4wlmqrdlhgm9ert8z2f5e3y3lf9hvx",
"value": 265396
}
],
"size": 224,
"weight": 572,
"fee": 584,
"status": {
"confirmed": false
},
"order": 2953680397,
"vsize": 143,
"adjustedVsize": 143,
"sigops": 5,
"feePerVsize": 4.083916083916084,
"adjustedFeePerVsize": 4.083916083916084,
"effectiveFeePerVsize": 4.083916083916084,
"firstSeen": 1691222538,
"uid": 526973,
"inputs": [
526728
],
"position": {
"block": 7,
"vsize": 21429708.5
},
"bestDescendant": null,
"cpfpChecked": true
},
{
"txid": "8e89b20f8a7fadb0e4cdbe57a00eee224f5076bac5387fc276916724e7c4a16a",
"version": 2,
"locktime": 800571,
"vin": [
{
"txid": "35e16762459539f3a8e52c5dee6a9ccaa9e9268efed33aa2c6e1b7805e849f24",
"vout": 0,
"prevout": {
"scriptpubkey": "0014d4f16ef275b3e1c4a4ecbef55a164933e0f6460f",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 d4f16ef275b3e1c4a4ecbef55a164933e0f6460f",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q6nckaun4k0suff8vhm6459jfx0s0v3s0ff4ukl",
"value": 1528924
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3044022019008b26e885bb43da25a11ffac147a057722072eedb68411f114f6e7eb82ebc02201b618264bb97756b88fc3bbc365b73044ac18b33b1067e31cfd5bcd0f50ed2c701",
"039b71145070bd3e8af28e27fa577f2e12ab6bb4e212d3eeaef08b4bc39e8cbc13"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "67c27ed0f767526234bcd5f795a31fab8ec4d0251bf12c68f2746951f4110d90",
"vout": 3,
"prevout": {
"scriptpubkey": "0014a7c3d613b321375054b2ac9b6114367bc034ad6f",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 a7c3d613b321375054b2ac9b6114367bc034ad6f",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q5lpavyanyym4q49j4jdkz9pk00qrftt0yqzvk3",
"value": 436523
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402204e67285fc656bc45ed082499b076d5dba2fa21d0d7e64a0ae52b19d69a11760002200f037d81ee540b74397844513b72b08ed92b06db76bd20b08f7a0a3b36ab13d501",
"02a3ebae85f0225b6fbb5ff060afce683a4683507a57544605a29ee7d287e591b4"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "21c38fb9a2521e438c614f53b19ddd7a5594bcc4b77480e762fd4b702fad3374",
"vout": 1,
"prevout": {
"scriptpubkey": "00149660e34ef88106536c816c037b5b28dd64a812e2",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 9660e34ef88106536c816c037b5b28dd64a812e2",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1qjeswxnhcsyr9xmypdsphkkegm4j2syhztgzxv4",
"value": 758149
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3044022021b556f0aa99329076bcc435338aceaf534963efcab306931b1b2b0461e16e0c02203a78942a3745c4da656bddfd8cf16b85dc04d652904e88682127cdd9ca63339001",
"0298963be4a8f66aca9fcf1c6dc95547aeaa82347543190c91e094c2321142b9f0"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "aa998dbae65240a7386bf7d468459551d99c3de8e2f9057ff5f2d38e17daf788",
"vout": 0,
"prevout": {
"scriptpubkey": "00147bb7413a39943b21ded98ad5e6ad7a222d273e17",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 7bb7413a39943b21ded98ad5e6ad7a222d273e17",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q0wm5zw3ejsajrhke3t27dtt6ygkjw0sh9lltg6",
"value": 1067200
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402205e2269f7d4ee0513b34354c38e920aef2dabac6f4350afb2dd105ff3ee43ae7b02202870322f2cb85cb0b2b0e38152f018bfff271dc3ec5aed0515854d0b259aaf3d01",
"03b87320cf3263a644a0d3f89c1b4a7304d9dfda9eb8c891560716abcb73e88b99"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "230253d195d779d4688ba16993985cd27b2e7a687d8b889b3bc63f19ece36f20",
"vout": 0,
"prevout": {
"scriptpubkey": "001439647bd997819d12dfc72b0fb9ff9ffcb84946f8",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 39647bd997819d12dfc72b0fb9ff9ffcb84946f8",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q89j8hkvhsxw39h789v8mnlulljuyj3hc9zve97",
"value": 361950
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402204f7ca868bb9b92a07fecdc6b9dd56e4e1d007ca1405952f98ed6bc416345b5f2022055320a97791417abf6628fcf6513ac5785b06c630f854d8595e96ea06c3841d301",
"03a3ffe8e3ef2eea129b227e9658164bae0a6d21c17da6de9973ba34d9e04b21a0"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "670771e265a0b62dbd3c1fec2b865177eaf0bafd0ae49dd40a1c9fcd9a847a81",
"vout": 0,
"prevout": {
"scriptpubkey": "0014d45d1b0022c7387e42c5452ced561bdb8fd4b521",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 d45d1b0022c7387e42c5452ced561bdb8fd4b521",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q63w3kqpzcuu8usk9g5kw64smmw8afdfpxmc2m0",
"value": 453275
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3044022071312921800441903b2099e723add8702dd0f92ec11526ff87acf6967ec64cbd02203deabe7ed56d5daaa9a95c5a607b1ab705ff1c46bc6984a6dca120e63a91768601",
"0257302ac8d9c4c8f9b1744f19bb432359326b9cc7bdddeeab9202749a6d92be58"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "0af82159eee2b69242f2ff032636e410b67ec1ace52e55fb0d20ed814cd64803",
"vout": 0,
"prevout": {
"scriptpubkey": "001459e4d6bfefc6b45f955a69c4aeca26348e9d54ed",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 59e4d6bfefc6b45f955a69c4aeca26348e9d54ed",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1qt8jdd0l0c669l926d8z2aj3xxj8f648dtyn7tc",
"value": 439961
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3044022027540322e92c23c5513aa2587e7feb56a8ce82f879269d6b3cbd425634b44f8e022045572dee7262b02130bfe32d8aa8abbfaa64e101abfc819bba5380c78876692d01",
"03fe02262d87f4a5289d3dd66e3d9a74cd49fa1cad0249284a7451896a827249a5"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "68cf9c784870a4f888f044755f7ce318557f652461db8ef887d279672f186018",
"vout": 0,
"prevout": {
"scriptpubkey": "001454822b2d5d52597a78b630921cf439a41e32f2f9",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 54822b2d5d52597a78b630921cf439a41e32f2f9",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q2jpzkt2a2fvh579kxzfpeape5s0r9uhewhl5n4",
"value": 227639
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402203ad511d6a8730748b8828bc38897d360451adf620ebdc1d229c08c097c80bef202202f50c793d95b5200cf2258e03896a3be7720df0eb3b8c810c86db74341a7e83e01",
"0294992e9f4546e6e119741f908411ae531e9d1ff732d69b4dff8172aaf2a4b216"
],
"is_coinbase": false,
"sequence": 4294967293
},
{
"txid": "793f01dfdb19bf41f958fd917c16d9c4dd5d5e1a5c0434bfdb367212659d1b5b",
"vout": 0,
"prevout": {
"scriptpubkey": "0014f54edf8ae647b5300e2674523254e923d93d169f",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 f54edf8ae647b5300e2674523254e923d93d169f",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q748dlzhxg76nqr3xw3fry48fy0vn695lvhlkxv",
"value": 227070
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402206e807ab616f4f2887ba703ae744d856142d9aca8128698419bbb67fb4fad8177022060fc65c7cd66baa88ad1e1d317a6edd5f6cb52fe8bff6e5405ffa1acf9d945d901",
"02a0ad0167c6e9edf62677404d74d3b80ea276e47e758ffaa6ca17bd65ac79f7aa"
],
"is_coinbase": false,
"sequence": 4294967293
}
],
"vout": [
{
"scriptpubkey": "00148a5c45ccfc29d209940d94525e2edb7743a1ad8a",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 8a5c45ccfc29d209940d94525e2edb7743a1ad8a",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q3fwytn8u98fqn9qdj3f9utkmwap6rtv2ym33zm",
"value": 5500000
}
],
"size": 1375,
"weight": 2605,
"fee": 691,
"status": {
"confirmed": false
},
"order": 1788986599,
"vsize": 651,
"adjustedVsize": 651.25,
"sigops": 9,
"feePerVsize": 1.0610364683301343,
"adjustedFeePerVsize": 1.0610364683301343,
"effectiveFeePerVsize": 1.0610364683301343,
"firstSeen": 1691163298,
"uid": 120494,
"inputs": [],
"position": {
"block": 7,
"vsize": 93780091.5
},
"bestDescendant": null,
"cpfpChecked": true
}
]

View File

@ -0,0 +1,121 @@
[
{
"txid": "7219d95161f3718335991ac6d967d24eedec370908c9879bb1e192e6d797d0a6",
"version": 1,
"locktime": 0,
"vin": [
{
"txid": "d863deb706de5a611028f7547e16ea81d7819e44beb640fb30a9ba30c585140f",
"vout": 0,
"prevout": {
"scriptpubkey": "76a914cd5b6566b455d043558829f6932edaae5d8f0ad388ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 cd5b6566b455d043558829f6932edaae5d8f0ad3 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1Kiq1dyVBzYLWGrBPWjChvKyzB2H95x5RJ",
"value": 799995000
},
"scriptsig": "483045022100aeeddfb9785c5a4b70e90d0445785c68b7a44e28853441134a70ddc4da39527602203dfe1ec1a377aaacb64ae65c7c944caf1398d2dc063f712251b4cf696d44d3cb01210314338e3e191aea3ac9e9292611faeedf0379bbe62c30fd76c7450722a1ac47c6",
"scriptsig_asm": "OP_PUSHBYTES_72 3045022100aeeddfb9785c5a4b70e90d0445785c68b7a44e28853441134a70ddc4da39527602203dfe1ec1a377aaacb64ae65c7c944caf1398d2dc063f712251b4cf696d44d3cb01 OP_PUSHBYTES_33 0314338e3e191aea3ac9e9292611faeedf0379bbe62c30fd76c7450722a1ac47c6",
"is_coinbase": false,
"sequence": 4294967293
}
],
"vout": [
{
"scriptpubkey": "6a4c5058325b8669baa9259e082f064005bc92274b559337ac317798f5d76f2d0577ed5a96042fce8c33d841b6c47a99f9597000ab04a10b34cd419fc19784d9e36f1a33fd7b000c3bce00b6000c1d1e00614b",
"scriptpubkey_asm": "OP_RETURN OP_PUSHDATA1 58325b8669baa9259e082f064005bc92274b559337ac317798f5d76f2d0577ed5a96042fce8c33d841b6c47a99f9597000ab04a10b34cd419fc19784d9e36f1a33fd7b000c3bce00b6000c1d1e00614b",
"scriptpubkey_type": "op_return",
"value": 0
},
{
"scriptpubkey": "a9144890aae025c84cb72a9730b49ca12595d6f6088d87",
"scriptpubkey_asm": "OP_HASH160 OP_PUSHBYTES_20 4890aae025c84cb72a9730b49ca12595d6f6088d OP_EQUAL",
"scriptpubkey_type": "p2sh",
"scriptpubkey_address": "38Jht2bzmJL4EwoFvvyFzejhfEb4J7KxLb",
"value": 155000
},
{
"scriptpubkey": "76a91486e7dad6617303942a448b7f8afe9653e5624a5e88ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 86e7dad6617303942a448b7f8afe9653e5624a5e OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1DJKJGApgX4W8BSQ8FRPLqX78UaCskT4r2",
"value": 155000
},
{
"scriptpubkey": "76a914cd5b6566b455d043558829f6932edaae5d8f0ad388ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 cd5b6566b455d043558829f6932edaae5d8f0ad3 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1Kiq1dyVBzYLWGrBPWjChvKyzB2H95x5RJ",
"value": 799675549
}
],
"size": 350,
"weight": 1400,
"fee": 9451,
"status": {
"confirmed": false
},
"order": 2798688215,
"vsize": 350,
"adjustedVsize": 350,
"sigops": 8,
"feePerVsize": 27.002857142857142,
"adjustedFeePerVsize": 27.002857142857142,
"effectiveFeePerVsize": 27.002857142857142,
"firstSeen": 1691218536,
"uid": 513598,
"inputs": [],
"position": {
"block": 0,
"vsize": 22166
},
"bestDescendant": null,
"cpfpChecked": true
},
{
"txid": "5387881d695d4564d397026dc5f740f816f8390b4b2c5ec8c20309122712a875",
"version": 2,
"locktime": 0,
"vin": [
{
"txid": "b50225a04a1d6fbbfa7a2122bc0580396f614027b3957f476229633576f06130",
"vout": 0,
"prevout": {
"scriptpubkey": "0014a24f913f8a9c30a4c302c2c78f2fd7addb08fd07",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 a24f913f8a9c30a4c302c2c78f2fd7addb08fd07",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q5f8ez0u2nsc2fsczctrc7t7h4hds3lg82ewqhz",
"value": 612917
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"3045022100a0c23953ace5d022b7a6d45d1ae1730bf20a4d594bb5d4fa7aa80e4881b44d320220008f9b144805bb91995fc0f452a56e09f4ad16fa149d71ae9b5d57c742e8e2cc01",
"03dc2c7b687019b40a68d713322675206cc266e34e5340ec982c13ff0222c3b2b6"
],
"is_coinbase": false,
"sequence": 2147483649
}
],
"vout": [
{
"scriptpubkey": "0014199a98f9589364ffe5ef5bbae45ce5dfcbb873bd",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 199a98f9589364ffe5ef5bbae45ce5dfcbb873bd",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1qrxdf372cjdj0le00twawgh89ml9msuaau62gk4",
"value": 611909
}
],
"size": 192,
"weight": 438,
"fee": 1008,
"status": {
"confirmed": false
},
"bestDescendant": null,
"descendants": null,
"adjustedFeePerVsize": 10.2283,
"sigops": 1,
"adjustedVsize": 109.5
}
]

View File

@ -0,0 +1,139 @@
[
{
"txid": "008592364e21c1e3d62ba9538ac78a81779897b52100af5707ab063df98964f2",
"version": 1,
"locktime": 0,
"vin": [
{
"txid": "d863deb706de5a611028f7547e16ea81d7819e44beb640fb30a9ba30c585140f",
"vout": 0,
"prevout": {
"scriptpubkey": "76a914cd5b6566b455d043558829f6932edaae5d8f0ad388ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 cd5b6566b455d043558829f6932edaae5d8f0ad3 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1Kiq1dyVBzYLWGrBPWjChvKyzB2H95x5RJ",
"value": 799995000
},
"scriptsig": "483045022100c1fb331d155a7d299a0451d14fa1122b328e0e239afc9ba8dc2aff449ddc5a3a02201c1e19030d1efa432f5069cd369d7ad09a67f68501345e4db35f7b799605f55601210314338e3e191aea3ac9e9292611faeedf0379bbe62c30fd76c7450722a1ac47c6",
"scriptsig_asm": "OP_PUSHBYTES_72 3045022100c1fb331d155a7d299a0451d14fa1122b328e0e239afc9ba8dc2aff449ddc5a3a02201c1e19030d1efa432f5069cd369d7ad09a67f68501345e4db35f7b799605f55601 OP_PUSHBYTES_33 0314338e3e191aea3ac9e9292611faeedf0379bbe62c30fd76c7450722a1ac47c6",
"is_coinbase": false,
"sequence": 4294967293
}
],
"vout": [
{
"scriptpubkey": "6a4c5058325b78064160b631b5a15d9078d99c0db066449fb4c59bbfa4d987ba906e2990088b2fce8c33d841b6c47a99f9597000ab04a10b34cd419fc19784d9e36f1a33fd7b000c3bce00b6000c1d1e00614b",
"scriptpubkey_asm": "OP_RETURN OP_PUSHDATA1 58325b78064160b631b5a15d9078d99c0db066449fb4c59bbfa4d987ba906e2990088b2fce8c33d841b6c47a99f9597000ab04a10b34cd419fc19784d9e36f1a33fd7b000c3bce00b6000c1d1e00614b",
"scriptpubkey_type": "op_return",
"value": 0
},
{
"scriptpubkey": "a9144890aae025c84cb72a9730b49ca12595d6f6088d87",
"scriptpubkey_asm": "OP_HASH160 OP_PUSHBYTES_20 4890aae025c84cb72a9730b49ca12595d6f6088d OP_EQUAL",
"scriptpubkey_type": "p2sh",
"scriptpubkey_address": "38Jht2bzmJL4EwoFvvyFzejhfEb4J7KxLb",
"value": 155000
},
{
"scriptpubkey": "76a91486e7dad6617303942a448b7f8afe9653e5624a5e88ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 86e7dad6617303942a448b7f8afe9653e5624a5e OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1DJKJGApgX4W8BSQ8FRPLqX78UaCskT4r2",
"value": 155000
},
{
"scriptpubkey": "76a914cd5b6566b455d043558829f6932edaae5d8f0ad388ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 cd5b6566b455d043558829f6932edaae5d8f0ad3 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1Kiq1dyVBzYLWGrBPWjChvKyzB2H95x5RJ",
"value": 799676250
}
],
"size": 350,
"weight": 1400,
"fee": 8750,
"status": {
"confirmed": false
},
"order": 4066675193,
"vsize": 350,
"adjustedVsize": 350,
"sigops": 8,
"feePerVsize": 25,
"adjustedFeePerVsize": 25,
"effectiveFeePerVsize": 25,
"firstSeen": 1691218516,
"uid": 512584,
"inputs": [],
"position": {
"block": 0,
"vsize": 13846
},
"bestDescendant": null,
"cpfpChecked": true
},
{
"txid": "b7981a624e4261c11f1246314d41e74be56af82eb557bcd054a5e0f94c023668",
"version": 2,
"locktime": 0,
"vin": [
{
"txid": "b50225a04a1d6fbbfa7a2122bc0580396f614027b3957f476229633576f06130",
"vout": 0,
"prevout": {
"scriptpubkey": "0014a24f913f8a9c30a4c302c2c78f2fd7addb08fd07",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 a24f913f8a9c30a4c302c2c78f2fd7addb08fd07",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1q5f8ez0u2nsc2fsczctrc7t7h4hds3lg82ewqhz",
"value": 612917
},
"scriptsig": "",
"scriptsig_asm": "",
"witness": [
"304402204dd10f14afa41bc76d8278140ff1ec3d3f87f2c207bbb5418cc76dab30d7f6a402207877cc9c6a2c724b6ea7a1c24ac00022469f194fd1a4bd8030bbca1787d3f5f301",
"03dc2c7b687019b40a68d713322675206cc266e34e5340ec982c13ff0222c3b2b6"
],
"is_coinbase": false,
"sequence": 2147483648
}
],
"vout": [
{
"scriptpubkey": "76a9149d32ef812385f3811634e0c0117dd153a5de10a488ac",
"scriptpubkey_asm": "OP_DUP OP_HASH160 OP_PUSHBYTES_20 9d32ef812385f3811634e0c0117dd153a5de10a4 OP_EQUALVERIFY OP_CHECKSIG",
"scriptpubkey_type": "p2pkh",
"scriptpubkey_address": "1FLC7Bag7okAkKPCyZbgZZg3Hh1EuGZ5Rd",
"value": 344697
},
{
"scriptpubkey": "00144c2671336ca8761863b4c68d64d4672491fec1b9",
"scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 4c2671336ca8761863b4c68d64d4672491fec1b9",
"scriptpubkey_type": "v0_p2wpkh",
"scriptpubkey_address": "bc1qfsn8zvmv4pmpsca5c6xkf4r8yjglasdesrawcx",
"value": 267636
}
],
"size": 225,
"weight": 573,
"fee": 584,
"status": {
"confirmed": false
},
"order": 1748369996,
"vsize": 143,
"adjustedVsize": 143.25,
"sigops": 5,
"feePerVsize": 4.076788830715532,
"adjustedFeePerVsize": 4.076788830715532,
"effectiveFeePerVsize": 4.076788830715532,
"firstSeen": 1691222376,
"uid": 526515,
"inputs": [],
"position": {
"block": 7,
"vsize": 22021095.5
},
"bestDescendant": null,
"cpfpChecked": true
}
]

View File

@ -23,6 +23,7 @@ describe('Mempool Backend Config', () => {
AUTOMATIC_BLOCK_REINDEXING: false,
POLL_RATE_MS: 2000,
CACHE_DIR: './cache',
CACHE_ENABLED: true,
CLEAR_PROTECTION_MINUTES: 20,
RECOMMENDED_FEE_PERCENTILE: 50,
BLOCK_WEIGHT_UNITS: 4000000,
@ -46,18 +47,29 @@ describe('Mempool Backend Config', () => {
DISK_CACHE_BLOCK_INTERVAL: 6,
MAX_PUSH_TX_SIZE_WEIGHT: 400000,
ALLOW_UNREACHABLE: true,
PRICE_UPDATES_PER_HOUR: 1,
});
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
expect(config.ESPLORA).toStrictEqual({ REST_API_URL: 'http://127.0.0.1:3000', UNIX_SOCKET_PATH: null, RETRY_UNIX_SOCKET_AFTER: 30000 });
expect(config.ESPLORA).toStrictEqual({
REST_API_URL: 'http://127.0.0.1:3000',
UNIX_SOCKET_PATH: null,
BATCH_QUERY_BASE_SIZE: 1000,
RETRY_UNIX_SOCKET_AFTER: 30000,
REQUEST_TIMEOUT: 10000,
FALLBACK_TIMEOUT: 5000,
FALLBACK: [],
});
expect(config.CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool',
TIMEOUT: 60000
TIMEOUT: 60000,
COOKIE: false,
COOKIE_PATH: '/bitcoin/.cookie'
});
expect(config.SECOND_CORE_RPC).toStrictEqual({
@ -65,7 +77,9 @@ describe('Mempool Backend Config', () => {
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool',
TIMEOUT: 60000
TIMEOUT: 60000,
COOKIE: false,
COOKIE_PATH: '/bitcoin/.cookie'
});
expect(config.DATABASE).toStrictEqual({
@ -77,6 +91,7 @@ describe('Mempool Backend Config', () => {
USERNAME: 'mempool',
PASSWORD: 'mempool',
TIMEOUT: 180000,
PID_DIR: ''
});
expect(config.SYSLOG).toStrictEqual({
@ -100,11 +115,6 @@ describe('Mempool Backend Config', () => {
PASSWORD: ''
});
expect(config.PRICE_DATA_SERVER).toStrictEqual({
TOR_URL: 'http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices',
CLEARNET_URL: 'https://price.bisq.wiz.biz/getAllMarketPrices'
});
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual({
MEMPOOL_API: 'https://mempool.space/api/v1',
MEMPOOL_ONION: 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
@ -127,6 +137,17 @@ describe('Mempool Backend Config', () => {
AUDIT_START_HEIGHT: 774000,
SERVERS: []
});
expect(config.MEMPOOL_SERVICES).toStrictEqual({
API: "",
ACCELERATIONS: false,
});
expect(config.REDIS).toStrictEqual({
ENABLED: false,
UNIX_SOCKET_PATH: '',
BATCH_QUERY_BASE_SIZE: 5000,
});
});
});
@ -157,9 +178,11 @@ describe('Mempool Backend Config', () => {
expect(config.SOCKS5PROXY).toStrictEqual(fixture.SOCKS5PROXY);
expect(config.PRICE_DATA_SERVER).toStrictEqual(fixture.PRICE_DATA_SERVER);
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual(fixture.EXTERNAL_DATA_SERVER);
expect(config.MEMPOOL_SERVICES).toStrictEqual(fixture.MEMPOOL_SERVICES);
expect(config.REDIS).toStrictEqual(fixture.REDIS);
});
});
@ -172,41 +195,50 @@ describe('Mempool Backend Config', () => {
for (const [key, value] of Object.entries(jsonObj)) {
// We have a few cases where we can't follow the pattern
if (root === 'MEMPOOL' && key === 'HTTP_PORT') {
console.log('skipping check for MEMPOOL_HTTP_PORT');
return;
}
switch (typeof value) {
case 'object': {
if (Array.isArray(value)) {
return;
} else {
parseJson(value, key);
}
break;
if (process.env.CI) {
console.log('skipping check for MEMPOOL_HTTP_PORT');
}
default: {
continue;
}
if (root) {
//The flattened string, i.e, __MEMPOOL_ENABLED__
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
//The string used as the environment variable, i.e, MEMPOOL_ENABLED
const envVarStr = `${root ? root : ''}_${key}`;
let defaultEntry;
//The string used as the default value, to be checked as a regex, i.e, __MEMPOOL_ENABLED__=${MEMPOOL_ENABLED:=(.*)}
const defaultEntry = replaceStr + '=' + '\\${' + envVarStr + ':=(.*)' + '}';
console.log(`looking for ${defaultEntry} in the start.sh script`);
const re = new RegExp(defaultEntry);
expect(startSh).toMatch(re);
if (Array.isArray(value)) {
defaultEntry = `${replaceStr}=\${${envVarStr}:=[]}`;
if (process.env.CI) {
console.log(`looking for ${defaultEntry} in the start.sh script`);
}
//Regex matching does not work with the array values
expect(startSh).toContain(defaultEntry);
} else {
defaultEntry = replaceStr + '=' + '\\${' + envVarStr + ':=(.*)' + '}';
if (process.env.CI) {
console.log(`looking for ${defaultEntry} in the start.sh script`);
}
const re = new RegExp(defaultEntry);
expect(startSh).toMatch(re);
}
//The string that actually replaces the values in the config file
const sedStr = 'sed -i "s!' + replaceStr + '!${' + replaceStr + '}!g" mempool-config.json';
console.log(`looking for ${sedStr} in the start.sh script`);
if (process.env.CI) {
console.log(`looking for ${sedStr} in the start.sh script`);
}
expect(startSh).toContain(sedStr);
break;
}
else {
parseJson(value, key);
}
}
}
parseJson(fixture);
});
});

View File

@ -1,5 +1,5 @@
import fs from 'fs';
import { GbtGenerator, ThreadTransaction } from '../../../rust-gbt';
import { GbtGenerator, ThreadTransaction } from 'rust-gbt';
import path from 'path';
const baseline = require('./test-data/target-template.json');
@ -15,7 +15,7 @@ describe('Rust GBT', () => {
test('should produce the same template as getBlockTemplate from Bitcoin Core', async () => {
const rustGbt = new GbtGenerator();
const { mempool, maxUid } = mempoolFromArrayBuffer(vectorBuffer.buffer);
const result = await rustGbt.make(mempool, maxUid);
const result = await rustGbt.make(mempool, [], maxUid);
const blocks: [string, number][][] = result.blocks.map(block => {
return block.map(uid => [vectorUidMap.get(uid) || 'missing', uid]);

View File

@ -6,16 +6,17 @@ import rbfCache from './rbf-cache';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended })
: { censored: string[], added: string[], fresh: string[], sigop: string[], fullrbf: string[], score: number, similarity: number } {
auditBlock(transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended }, useAccelerations: boolean = false)
: { censored: string[], added: string[], fresh: string[], sigop: string[], fullrbf: string[], accelerated: string[], score: number, similarity: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], fresh: [], sigop: [], fullrbf: [], score: 0, similarity: 1 };
return { censored: [], added: [], fresh: [], sigop: [], fullrbf: [], accelerated: [], score: 1, similarity: 1 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen or lastBoosted within PROPAGATION_MARGIN
const fullrbf: string[] = []; // either missing or present, and part of a fullrbf replacement
const rbf: string[] = []; // either missing or present, and either part of a full-rbf replacement, or a conflict with the mined block
const accelerated: string[] = []; // prioritized by the mempool accelerator
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
@ -28,6 +29,9 @@ class Audit {
const now = Math.round((Date.now() / 1000));
for (const tx of transactions) {
inBlock[tx.txid] = tx;
if (mempool[tx.txid] && mempool[tx.txid].acceleration) {
accelerated.push(tx.txid);
}
}
// coinbase is always expected
if (transactions[0]) {
@ -36,8 +40,9 @@ class Audit {
// look for transactions that were expected in the template, but missing from the mined block
for (const txid of projectedBlocks[0].transactionIds) {
if (!inBlock[txid]) {
if (rbfCache.isFullRbf(txid)) {
fullrbf.push(txid);
// allow missing transactions which either belong to a full rbf tree, or conflict with any transaction in the mined block
if (rbfCache.has(txid) && (rbfCache.isFullRbf(txid) || rbfCache.anyInSameTree(txid, (tx) => inBlock[tx.txid]))) {
rbf.push(txid);
} else if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
// tx is recent, may have reached the miner too late for inclusion
fresh.push(txid);
@ -98,8 +103,8 @@ class Audit {
if (inTemplate[tx.txid]) {
matches.push(tx.txid);
} else {
if (rbfCache.isFullRbf(tx.txid)) {
fullrbf.push(tx.txid);
if (rbfCache.has(tx.txid)) {
rbf.push(tx.txid);
} else if (!isDisplaced[tx.txid]) {
added.push(tx.txid);
}
@ -139,7 +144,12 @@ class Audit {
const numCensored = Object.keys(isCensored).length;
const numMatches = matches.length - 1; // adjust for coinbase tx
const score = numMatches > 0 ? (numMatches / (numMatches + numCensored)) : 0;
let score = 0;
if (numMatches <= 0 && numCensored <= 0) {
score = 1;
} else if (numMatches > 0) {
score = (numMatches / (numMatches + numCensored));
}
const similarity = projectedWeight ? matchedWeight / projectedWeight : 1;
return {
@ -147,7 +157,8 @@ class Audit {
added,
fresh,
sigop: [],
fullrbf,
fullrbf: rbf,
accelerated,
score,
similarity,
};

View File

@ -3,10 +3,14 @@ import { IEsploraApi } from './esplora-api.interface';
export interface AbstractBitcoinApi {
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]>;
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean, lazyPrevouts?: boolean): Promise<IEsploraApi.Transaction>;
$getRawTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]>;
$getMempoolTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]>;
$getAllMempoolTransactions(lastTxid?: string, max_txs?: number);
$getTransactionHex(txId: string): Promise<string>;
$getBlockHeightTip(): Promise<number>;
$getBlockHashTip(): Promise<string>;
$getTxIdsForBlock(hash: string): Promise<string[]>;
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]>;
$getBlockHash(height: number): Promise<string>;
$getBlockHeader(hash: string): Promise<string>;
$getBlock(hash: string): Promise<IEsploraApi.Block>;
@ -14,10 +18,16 @@ export interface AbstractBitcoinApi {
$getAddress(address: string): Promise<IEsploraApi.Address>;
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$getAddressPrefix(prefix: string): string[];
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash>;
$getScriptHashTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$sendRawTransaction(rawTransaction: string): Promise<string>;
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend>;
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]>;
$getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]>;
$getBatchedOutspendsInternal(txId: string[]): Promise<IEsploraApi.Outspend[][]>;
$getOutSpendsByOutpoint(outpoints: { txid: string, vout: number }[]): Promise<IEsploraApi.Outspend[]>;
startHealthChecks(): void;
}
export interface BitcoinRpcCredentials {
host: string;
@ -25,4 +35,5 @@ export interface BitcoinRpcCredentials {
user: string;
pass: string;
timeout: number;
cookie?: string;
}

View File

@ -5,6 +5,7 @@ import { IEsploraApi } from './esplora-api.interface';
import blocks from '../blocks';
import mempool from '../mempool';
import { TransactionExtended } from '../../mempool.interfaces';
import transactionUtils from '../transaction-utils';
class BitcoinApi implements AbstractBitcoinApi {
private rawMempoolCache: IBitcoinApi.RawMempool | null = null;
@ -59,9 +60,38 @@ class BitcoinApi implements AbstractBitcoinApi {
});
}
$getTransactionHex(txId: string): Promise<string> {
return this.$getRawTransaction(txId, true)
.then((tx) => tx.hex || '');
async $getRawTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]> {
const txs: IEsploraApi.Transaction[] = [];
for (const txid of txids) {
try {
const tx = await this.$getRawTransaction(txid, false, true);
txs.push(tx);
} catch (err) {
// skip failures
}
}
return txs;
}
$getMempoolTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getMempoolTransactions not supported by the Bitcoin RPC API.');
}
$getAllMempoolTransactions(lastTxid?: string, max_txs?: number): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getAllMempoolTransactions not supported by the Bitcoin RPC API.');
}
async $getTransactionHex(txId: string): Promise<string> {
const txInMempool = mempool.getMempool()[txId];
if (txInMempool && txInMempool.hex) {
return txInMempool.hex;
}
return this.bitcoindClient.getRawTransaction(txId, true)
.then((transaction: IBitcoinApi.Transaction) => {
return transaction.hex;
});
}
$getBlockHeightTip(): Promise<number> {
@ -77,6 +107,10 @@ class BitcoinApi implements AbstractBitcoinApi {
.then((rpcBlock: IBitcoinApi.Block) => rpcBlock.tx);
}
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getTxsForBlock not supported by the Bitcoin RPC API.');
}
$getRawBlock(hash: string): Promise<Buffer> {
return this.bitcoindClient.getBlock(hash, 0)
.then((raw: string) => Buffer.from(raw, "hex"));
@ -108,6 +142,14 @@ class BitcoinApi implements AbstractBitcoinApi {
throw new Error('Method getAddressTransactions not supported by the Bitcoin RPC API.');
}
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
throw new Error('Method getScriptHash not supported by the Bitcoin RPC API.');
}
$getScriptHashTransactions(scripthash: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getScriptHashTransactions not supported by the Bitcoin RPC API.');
}
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return this.bitcoindClient.getRawMemPool();
}
@ -169,6 +211,19 @@ class BitcoinApi implements AbstractBitcoinApi {
return outspends;
}
async $getBatchedOutspendsInternal(txId: string[]): Promise<IEsploraApi.Outspend[][]> {
return this.$getBatchedOutspends(txId);
}
async $getOutSpendsByOutpoint(outpoints: { txid: string, vout: number }[]): Promise<IEsploraApi.Outspend[]> {
const outspends: IEsploraApi.Outspend[] = [];
for (const outpoint of outpoints) {
const outspend = await this.$getOutspend(outpoint.txid, outpoint.vout);
outspends.push(outspend);
}
return outspends;
}
$getEstimatedHashrate(blockHeight: number): Promise<number> {
// 120 is the default block span in Core
return this.bitcoindClient.getNetworkHashPs(120, blockHeight);
@ -193,7 +248,7 @@ class BitcoinApi implements AbstractBitcoinApi {
scriptpubkey: vout.scriptPubKey.hex,
scriptpubkey_address: vout.scriptPubKey && vout.scriptPubKey.address ? vout.scriptPubKey.address
: vout.scriptPubKey.addresses ? vout.scriptPubKey.addresses[0] : '',
scriptpubkey_asm: vout.scriptPubKey.asm ? this.convertScriptSigAsm(vout.scriptPubKey.hex) : '',
scriptpubkey_asm: vout.scriptPubKey.asm ? transactionUtils.convertScriptSigAsm(vout.scriptPubKey.hex) : '',
scriptpubkey_type: this.translateScriptPubKeyType(vout.scriptPubKey.type),
};
});
@ -203,7 +258,7 @@ class BitcoinApi implements AbstractBitcoinApi {
is_coinbase: !!vin.coinbase,
prevout: null,
scriptsig: vin.scriptSig && vin.scriptSig.hex || vin.coinbase || '',
scriptsig_asm: vin.scriptSig && this.convertScriptSigAsm(vin.scriptSig.hex) || '',
scriptsig_asm: vin.scriptSig && transactionUtils.convertScriptSigAsm(vin.scriptSig.hex) || '',
sequence: vin.sequence,
txid: vin.txid || '',
vout: vin.vout || 0,
@ -275,7 +330,7 @@ class BitcoinApi implements AbstractBitcoinApi {
}
const innerTx = await this.$getRawTransaction(vin.txid, false, false);
vin.prevout = innerTx.vout[vin.vout];
this.addInnerScriptsToVin(vin);
transactionUtils.addInnerScriptsToVin(vin);
}
return transaction;
}
@ -314,7 +369,7 @@ class BitcoinApi implements AbstractBitcoinApi {
}
const innerTx = await this.$getRawTransaction(transaction.vin[i].txid, false, false);
transaction.vin[i].prevout = innerTx.vout[transaction.vin[i].vout];
this.addInnerScriptsToVin(transaction.vin[i]);
transactionUtils.addInnerScriptsToVin(transaction.vin[i]);
totalIn += innerTx.vout[transaction.vin[i].vout].value;
}
if (lazyPrevouts && transaction.vin.length > 12) {
@ -326,122 +381,7 @@ class BitcoinApi implements AbstractBitcoinApi {
return transaction;
}
private convertScriptSigAsm(hex: string): string {
const buf = Buffer.from(hex, 'hex');
const b: string[] = [];
let i = 0;
while (i < buf.length) {
const op = buf[i];
if (op >= 0x01 && op <= 0x4e) {
i++;
let push: number;
if (op === 0x4c) {
push = buf.readUInt8(i);
b.push('OP_PUSHDATA1');
i += 1;
} else if (op === 0x4d) {
push = buf.readUInt16LE(i);
b.push('OP_PUSHDATA2');
i += 2;
} else if (op === 0x4e) {
push = buf.readUInt32LE(i);
b.push('OP_PUSHDATA4');
i += 4;
} else {
push = op;
b.push('OP_PUSHBYTES_' + push);
}
const data = buf.slice(i, i + push);
if (data.length !== push) {
break;
}
b.push(data.toString('hex'));
i += data.length;
} else {
if (op === 0x00) {
b.push('OP_0');
} else if (op === 0x4f) {
b.push('OP_PUSHNUM_NEG1');
} else if (op === 0xb1) {
b.push('OP_CLTV');
} else if (op === 0xb2) {
b.push('OP_CSV');
} else if (op === 0xba) {
b.push('OP_CHECKSIGADD');
} else {
const opcode = bitcoinjs.script.toASM([ op ]);
if (opcode && op < 0xfd) {
if (/^OP_(\d+)$/.test(opcode)) {
b.push(opcode.replace(/^OP_(\d+)$/, 'OP_PUSHNUM_$1'));
} else {
b.push(opcode);
}
} else {
b.push('OP_RETURN_' + op);
}
}
i += 1;
}
}
return b.join(' ');
}
private addInnerScriptsToVin(vin: IEsploraApi.Vin): void {
if (!vin.prevout) {
return;
}
if (vin.prevout.scriptpubkey_type === 'p2sh') {
const redeemScript = vin.scriptsig_asm.split(' ').reverse()[0];
vin.inner_redeemscript_asm = this.convertScriptSigAsm(redeemScript);
if (vin.witness && vin.witness.length > 2) {
const witnessScript = vin.witness[vin.witness.length - 1];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
if (vin.prevout.scriptpubkey_type === 'v0_p2wsh' && vin.witness) {
const witnessScript = vin.witness[vin.witness.length - 1];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness) {
const witnessScript = this.witnessToP2TRScript(vin.witness);
if (witnessScript !== null) {
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
}
/**
* This function must only be called when we know the witness we are parsing
* is a taproot witness.
* @param witness An array of hex strings that represents the witness stack of
* the input.
* @returns null if the witness is not a script spend, and the hex string of
* the script item if it is a script spend.
*/
private witnessToP2TRScript(witness: string[]): string | null {
if (witness.length < 2) return null;
// Note: see BIP341 for parsing details of witness stack
// If there are at least two witness elements, and the first byte of the
// last element is 0x50, this last element is called annex a and
// is removed from the witness stack.
const hasAnnex = witness[witness.length - 1].substring(0, 2) === '50';
// If there are at least two witness elements left, script path spending is used.
// Call the second-to-last stack element s, the script.
// (Note: this phrasing from BIP341 assumes we've *removed* the annex from the stack)
if (hasAnnex && witness.length < 3) return null;
const positionOfScript = hasAnnex ? witness.length - 3 : witness.length - 2;
return witness[positionOfScript];
}
public startHealthChecks(): void {};
}
export default BitcoinApi;

View File

@ -8,6 +8,7 @@ const nodeRpcCredentials: BitcoinRpcCredentials = {
user: config.CORE_RPC.USERNAME,
pass: config.CORE_RPC.PASSWORD,
timeout: config.CORE_RPC.TIMEOUT,
cookie: config.CORE_RPC.COOKIE ? config.CORE_RPC.COOKIE_PATH : undefined,
};
export default new bitcoin.Client(nodeRpcCredentials);

View File

@ -8,6 +8,7 @@ const nodeRpcCredentials: BitcoinRpcCredentials = {
user: config.SECOND_CORE_RPC.USERNAME,
pass: config.SECOND_CORE_RPC.PASSWORD,
timeout: config.SECOND_CORE_RPC.TIMEOUT,
cookie: config.SECOND_CORE_RPC.COOKIE ? config.SECOND_CORE_RPC.COOKIE_PATH : undefined,
};
export default new bitcoin.Client(nodeRpcCredentials);

View File

@ -6,7 +6,7 @@ import websocketHandler from '../websocket-handler';
import mempool from '../mempool';
import feeApi from '../fee-api';
import mempoolBlocks from '../mempool-blocks';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin-api-factory';
import bitcoinApi from './bitcoin-api-factory';
import { Common } from '../common';
import backendInfo from '../backend-info';
import transactionUtils from '../transaction-utils';
@ -24,7 +24,6 @@ class BitcoinRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.$getCpfpInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
@ -112,6 +111,7 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/hex', this.getRawTransaction)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', this.getTransactionStatus)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', this.getTransactionOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'txs/outspends', this.$getBatchedOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', this.getBlockHeader)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/hash', this.getBlockTipHash)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/raw', this.getRawBlock)
@ -121,6 +121,8 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', this.getBlockHeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', this.getAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', this.getAddressTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'scripthash/:scripthash', this.getScriptHash)
.get(config.MEMPOOL.API_URL_PREFIX + 'scripthash/:scripthash/txs', this.getScriptHashTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', this.getAddressPrefix)
;
}
@ -172,24 +174,20 @@ class BitcoinRoutes {
res.json(times);
}
private async $getBatchedOutspends(req: Request, res: Response) {
if (!Array.isArray(req.query.txId)) {
res.status(500).send('Not an array');
private async $getBatchedOutspends(req: Request, res: Response): Promise<IEsploraApi.Outspend[][] | void> {
const txids_csv = req.query.txids;
if (!txids_csv || typeof txids_csv !== 'string') {
res.status(500).send('Invalid txids format');
return;
}
if (req.query.txId.length > 50) {
const txids = txids_csv.split(',');
if (txids.length > 50) {
res.status(400).send('Too many txids requested');
return;
}
const txIds: string[] = [];
for (const _txId in req.query.txId) {
if (typeof req.query.txId[_txId] === 'string') {
txIds.push(req.query.txId[_txId].toString());
}
}
try {
const batchedOutspends = await bitcoinApi.$getBatchedOutspends(txIds);
const batchedOutspends = await bitcoinApi.$getBatchedOutspends(txids);
res.json(batchedOutspends);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
@ -212,6 +210,7 @@ class BitcoinRoutes {
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
sigops: tx.sigops,
adjustedVsize: tx.adjustedVsize,
acceleration: tx.acceleration
});
return;
}
@ -248,7 +247,7 @@ class BitcoinRoutes {
private async getTransaction(req: Request, res: Response) {
try {
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true, false, false, true);
res.json(transaction);
} catch (e) {
let statusCode = 500;
@ -475,13 +474,13 @@ class BitcoinRoutes {
}
let nextHash = startFromHash;
for (let i = 0; i < 10 && nextHash; i++) {
for (let i = 0; i < 15 && nextHash; i++) {
const localBlock = blocks.getBlocks().find((b) => b.id === nextHash);
if (localBlock) {
returnBlocks.push(localBlock);
nextHash = localBlock.previousblockhash;
} else {
const block = await bitcoinCoreApi.$getBlock(nextHash);
const block = await bitcoinApi.$getBlock(nextHash);
returnBlocks.push(block);
nextHash = block.previousblockhash;
}
@ -567,6 +566,49 @@ class BitcoinRoutes {
}
}
private async getScriptHash(req: Request, res: Response) {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
// electrum expects scripthashes in little-endian
const electrumScripthash = req.params.scripthash.match(/../g)?.reverse().join('') ?? '';
const addressData = await bitcoinApi.$getScriptHash(electrumScripthash);
res.json(addressData);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
return res.status(413).send(e instanceof Error ? e.message : e);
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getScriptHashTransactions(req: Request, res: Response): Promise<void> {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
// electrum expects scripthashes in little-endian
const electrumScripthash = req.params.scripthash.match(/../g)?.reverse().join('') ?? '';
let lastTxId: string = '';
if (req.query.after_txid && typeof req.query.after_txid === 'string') {
lastTxId = req.query.after_txid;
}
const transactions = await bitcoinApi.$getScriptHashTransactions(electrumScripthash, lastTxId);
res.json(transactions);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
res.status(413).send(e instanceof Error ? e.message : e);
return;
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAddressPrefix(req: Request, res: Response) {
try {
const blockHash = await bitcoinApi.$getAddressPrefix(req.params.prefix);

View File

@ -126,6 +126,77 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
}
}
async $getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
try {
const balance = await this.electrumClient.blockchainScripthash_getBalance(scripthash);
let history = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scripthash);
if (!history) {
history = await this.electrumClient.blockchainScripthash_getHistory(scripthash);
memoryCache.set('Scripthash_getHistory', scripthash, history, 2);
}
const unconfirmed = history ? history.filter((h) => h.fee).length : 0;
return {
'scripthash': scripthash,
'chain_stats': {
'funded_txo_count': 0,
'funded_txo_sum': balance.confirmed ? balance.confirmed : 0,
'spent_txo_count': 0,
'spent_txo_sum': balance.confirmed < 0 ? balance.confirmed : 0,
'tx_count': (history?.length || 0) - unconfirmed,
},
'mempool_stats': {
'funded_txo_count': 0,
'funded_txo_sum': balance.unconfirmed > 0 ? balance.unconfirmed : 0,
'spent_txo_count': 0,
'spent_txo_sum': balance.unconfirmed < 0 ? -balance.unconfirmed : 0,
'tx_count': unconfirmed,
},
'electrum': true,
};
} catch (e: any) {
throw new Error(typeof e === 'string' ? e : e && e.message || e);
}
}
async $getScriptHashTransactions(scripthash: string, lastSeenTxId?: string): Promise<IEsploraApi.Transaction[]> {
try {
loadingIndicators.setProgress('address-' + scripthash, 0);
const transactions: IEsploraApi.Transaction[] = [];
let history = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scripthash);
if (!history) {
history = await this.electrumClient.blockchainScripthash_getHistory(scripthash);
memoryCache.set('Scripthash_getHistory', scripthash, history, 2);
}
if (!history) {
throw new Error('failed to get scripthash history');
}
history.sort((a, b) => (b.height || 9999999) - (a.height || 9999999));
let startingIndex = 0;
if (lastSeenTxId) {
const pos = history.findIndex((historicalTx) => historicalTx.tx_hash === lastSeenTxId);
if (pos) {
startingIndex = pos + 1;
}
}
const endIndex = Math.min(startingIndex + 10, history.length);
for (let i = startingIndex; i < endIndex; i++) {
const tx = await this.$getRawTransaction(history[i].tx_hash, false, true);
transactions.push(tx);
loadingIndicators.setProgress('address-' + scripthash, (i + 1) / endIndex * 100);
}
return transactions;
} catch (e: any) {
loadingIndicators.setProgress('address-' + scripthash, 100);
throw new Error(typeof e === 'string' ? e : e && e.message || e);
}
}
private $getScriptHashBalance(scriptHash: string): Promise<IElectrumApi.ScriptHashBalance> {
return this.electrumClient.blockchainScripthash_getBalance(this.encodeScriptHash(scriptHash));
}

View File

@ -6,6 +6,7 @@ export namespace IEsploraApi {
size: number;
weight: number;
fee: number;
sigops?: number;
vin: Vin[];
vout: Vout[];
status: Status;
@ -99,6 +100,13 @@ export namespace IEsploraApi {
electrum?: boolean;
}
export interface ScriptHash {
scripthash: string;
chain_stats: ChainStats;
mempool_stats: MempoolStats;
electrum?: boolean;
}
export interface ChainStats {
funded_txo_count: number;
funded_txo_sum: number;

View File

@ -1,104 +1,276 @@
import config from '../../config';
import axios, { AxiosRequestConfig } from 'axios';
import axios, { AxiosResponse } from 'axios';
import http from 'http';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IEsploraApi } from './esplora-api.interface';
import logger from '../../logger';
const axiosConnection = axios.create({
httpAgent: new http.Agent({ keepAlive: true, })
});
interface FailoverHost {
host: string,
rtts: number[],
rtt: number,
failures: number,
latestHeight?: number,
socket?: boolean,
outOfSync?: boolean,
unreachable?: boolean,
preferred?: boolean,
}
class ElectrsApi implements AbstractBitcoinApi {
private axiosConfigWithUnixSocket: AxiosRequestConfig = config.ESPLORA.UNIX_SOCKET_PATH ? {
socketPath: config.ESPLORA.UNIX_SOCKET_PATH,
timeout: 10000,
} : {
timeout: 10000,
};
private axiosConfigTcpSocketOnly: AxiosRequestConfig = {
timeout: 10000,
};
unixSocketRetryTimeout;
activeAxiosConfig;
class FailoverRouter {
activeHost: FailoverHost;
fallbackHost: FailoverHost;
hosts: FailoverHost[];
multihost: boolean;
pollInterval: number = 60000;
pollTimer: NodeJS.Timeout | null = null;
pollConnection = axios.create();
requestConnection = axios.create({
httpAgent: new http.Agent({ keepAlive: true })
});
constructor() {
this.activeAxiosConfig = this.axiosConfigWithUnixSocket;
// setup list of hosts
this.hosts = (config.ESPLORA.FALLBACK || []).map(domain => {
return {
host: domain,
rtts: [],
rtt: Infinity,
failures: 0,
};
});
this.activeHost = {
host: config.ESPLORA.UNIX_SOCKET_PATH || config.ESPLORA.REST_API_URL,
rtts: [],
rtt: 0,
failures: 0,
socket: !!config.ESPLORA.UNIX_SOCKET_PATH,
preferred: true,
};
this.fallbackHost = this.activeHost;
this.hosts.unshift(this.activeHost);
this.multihost = this.hosts.length > 1;
}
fallbackToTcpSocket() {
if (!this.unixSocketRetryTimeout) {
logger.err(`Unable to connect to esplora unix socket. Falling back to tcp socket. Retrying unix socket in ${config.ESPLORA.RETRY_UNIX_SOCKET_AFTER / 1000} seconds`);
// Retry the unix socket after a few seconds
this.unixSocketRetryTimeout = setTimeout(() => {
logger.info(`Retrying to use unix socket for esplora now (applied for the next query)`);
this.activeAxiosConfig = this.axiosConfigWithUnixSocket;
this.unixSocketRetryTimeout = undefined;
}, config.ESPLORA.RETRY_UNIX_SOCKET_AFTER);
public startHealthChecks(): void {
// use axios interceptors to measure request rtt
this.pollConnection.interceptors.request.use((config) => {
config['meta'] = { startTime: Date.now() };
return config;
});
this.pollConnection.interceptors.response.use((response) => {
response.config['meta'].rtt = Date.now() - response.config['meta'].startTime;
return response;
});
if (this.multihost) {
this.pollHosts();
}
}
// start polling hosts to measure availability & rtt
private async pollHosts(): Promise<void> {
if (this.pollTimer) {
clearTimeout(this.pollTimer);
}
// Use the TCP socket (reach a different esplora instance through nginx)
this.activeAxiosConfig = this.axiosConfigTcpSocketOnly;
const results = await Promise.allSettled(this.hosts.map(async (host) => {
if (host.socket) {
return this.pollConnection.get<number>('/blocks/tip/height', { socketPath: host.host, timeout: config.ESPLORA.FALLBACK_TIMEOUT });
} else {
return this.pollConnection.get<number>(host.host + '/blocks/tip/height', { timeout: config.ESPLORA.FALLBACK_TIMEOUT });
}
}));
const maxHeight = results.reduce((max, result) => Math.max(max, result.status === 'fulfilled' ? result.value?.data || 0 : 0), 0);
// update rtts & sync status
for (let i = 0; i < results.length; i++) {
const host = this.hosts[i];
const result = results[i].status === 'fulfilled' ? (results[i] as PromiseFulfilledResult<AxiosResponse<number, any>>).value : null;
if (result) {
const height = result.data;
const rtt = result.config['meta'].rtt;
host.rtts.unshift(rtt);
host.rtts.slice(0, 5);
host.rtt = host.rtts.reduce((acc, l) => acc + l, 0) / host.rtts.length;
host.latestHeight = height;
if (height == null || isNaN(height) || (maxHeight - height > 2)) {
host.outOfSync = true;
} else {
host.outOfSync = false;
}
host.unreachable = false;
} else {
host.outOfSync = true;
host.unreachable = true;
}
}
this.sortHosts();
logger.debug(`Tomahawk ranking:\n${this.hosts.map((host, index) => this.formatRanking(index, host, this.activeHost, maxHeight)).join('\n')}`);
// switch if the current host is out of sync or significantly slower than the next best alternative
if (this.activeHost.outOfSync || this.activeHost.unreachable || (this.activeHost !== this.hosts[0] && this.hosts[0].preferred) || (!this.activeHost.preferred && this.activeHost.rtt > (this.hosts[0].rtt * 2) + 50)) {
if (this.activeHost.unreachable) {
logger.warn(`🚨🚨🚨 Unable to reach ${this.activeHost.host}, failing over to next best alternative 🚨🚨🚨`);
} else if (this.activeHost.outOfSync) {
logger.warn(`🚨🚨🚨 ${this.activeHost.host} has fallen behind, failing over to next best alternative 🚨🚨🚨`);
} else {
logger.debug(`🛠️ ${this.activeHost.host} is no longer the best esplora host 🛠️`);
}
this.electHost();
}
this.pollTimer = setTimeout(() => { this.pollHosts(); }, this.pollInterval);
}
$queryWrapper<T>(url, responseType = 'json'): Promise<T> {
return axiosConnection.get<T>(url, { ...this.activeAxiosConfig, responseType: responseType })
.then((response) => response.data)
private formatRanking(index: number, host: FailoverHost, active: FailoverHost, maxHeight: number): string {
const heightStatus = host.outOfSync ? '🚫' : (host.latestHeight && host.latestHeight < maxHeight ? '🟧' : '✅');
return `${host === active ? '⭐️' : ' '} ${host.rtt < Infinity ? Math.round(host.rtt).toString().padStart(5, ' ') + 'ms' : ' - '} ${host.unreachable ? '🔥' : '✅'} | block: ${host.latestHeight || '??????'} ${heightStatus} | ${host.host} ${host === active ? '⭐️' : ' '}`;
}
// sort hosts by connection quality, and update default fallback
private sortHosts(): void {
// sort by connection quality
this.hosts.sort((a, b) => {
if ((a.unreachable || a.outOfSync) === (b.unreachable || b.outOfSync)) {
if (a.preferred === b.preferred) {
// lower rtt is best
return a.rtt - b.rtt;
} else { // unless we have a preferred host
return a.preferred ? -1 : 1;
}
} else { // or the host is out of sync
return (a.unreachable || a.outOfSync) ? 1 : -1;
}
});
if (this.hosts.length > 1 && this.hosts[0] === this.activeHost) {
this.fallbackHost = this.hosts[1];
} else {
this.fallbackHost = this.hosts[0];
}
}
// depose the active host and choose the next best replacement
private electHost(): void {
this.activeHost.outOfSync = true;
this.activeHost.failures = 0;
this.sortHosts();
this.activeHost = this.hosts[0];
logger.warn(`Switching esplora host to ${this.activeHost.host}`);
}
private addFailure(host: FailoverHost): FailoverHost {
host.failures++;
if (host.failures > 5 && this.multihost) {
logger.warn(`🚨🚨🚨 Too many esplora failures on ${this.activeHost.host}, falling back to next best alternative 🚨🚨🚨`);
this.electHost();
return this.activeHost;
} else {
return this.fallbackHost;
}
}
private async $query<T>(method: 'get'| 'post', path, data: any, responseType = 'json', host = this.activeHost, retry: boolean = true): Promise<T> {
let axiosConfig;
let url;
if (host.socket) {
axiosConfig = { socketPath: host.host, timeout: config.ESPLORA.REQUEST_TIMEOUT, responseType };
url = path;
} else {
axiosConfig = { timeout: config.ESPLORA.REQUEST_TIMEOUT, responseType };
url = host.host + path;
}
if (data?.params) {
axiosConfig.params = data.params;
}
return (method === 'post'
? this.requestConnection.post<T>(url, data, axiosConfig)
: this.requestConnection.get<T>(url, axiosConfig)
).then((response) => { host.failures = Math.max(0, host.failures - 1); return response.data; })
.catch((e) => {
if (e?.code === 'ECONNREFUSED') {
this.fallbackToTcpSocket();
let fallbackHost = this.fallbackHost;
if (e?.response?.status !== 404) {
logger.warn(`esplora request failed ${e?.response?.status} ${host.host}${path}`);
logger.warn(e instanceof Error ? e.message : e);
fallbackHost = this.addFailure(host);
}
if (retry && e?.code === 'ECONNREFUSED' && this.multihost) {
// Retry immediately
return axiosConnection.get<T>(url, this.activeAxiosConfig)
.then((response) => response.data)
.catch((e) => {
logger.warn(`Cannot query esplora through the unix socket nor the tcp socket. Exception ${e}`);
throw e;
});
return this.$query(method, path, data, responseType, fallbackHost, false);
} else {
throw e;
}
});
}
public async $get<T>(path, responseType = 'json', params: any = null): Promise<T> {
return this.$query<T>('get', path, params ? { params } : null, responseType);
}
public async $post<T>(path, data: any, responseType = 'json'): Promise<T> {
return this.$query<T>('post', path, data, responseType);
}
}
class ElectrsApi implements AbstractBitcoinApi {
private failoverRouter = new FailoverRouter();
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return this.$queryWrapper<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids');
return this.failoverRouter.$get<IEsploraApi.Transaction['txid'][]>('/mempool/txids');
}
$getRawTransaction(txId: string): Promise<IEsploraApi.Transaction> {
return this.$queryWrapper<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId);
return this.failoverRouter.$get<IEsploraApi.Transaction>('/tx/' + txId);
}
async $getRawTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]> {
return this.failoverRouter.$post<IEsploraApi.Transaction[]>('/internal/txs', txids, 'json');
}
async $getMempoolTransactions(txids: string[]): Promise<IEsploraApi.Transaction[]> {
return this.failoverRouter.$post<IEsploraApi.Transaction[]>('/internal/mempool/txs', txids, 'json');
}
async $getAllMempoolTransactions(lastSeenTxid?: string, max_txs?: number): Promise<IEsploraApi.Transaction[]> {
return this.failoverRouter.$get<IEsploraApi.Transaction[]>('/internal/mempool/txs' + (lastSeenTxid ? '/' + lastSeenTxid : ''), 'json', max_txs ? { max_txs } : null);
}
$getTransactionHex(txId: string): Promise<string> {
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex');
return this.failoverRouter.$get<string>('/tx/' + txId + '/hex');
}
$getBlockHeightTip(): Promise<number> {
return this.$queryWrapper<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height');
return this.failoverRouter.$get<number>('/blocks/tip/height');
}
$getBlockHashTip(): Promise<string> {
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash');
return this.failoverRouter.$get<string>('/blocks/tip/hash');
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
return this.$queryWrapper<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids');
return this.failoverRouter.$get<string[]>('/block/' + hash + '/txids');
}
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]> {
return this.failoverRouter.$get<IEsploraApi.Transaction[]>('/internal/block/' + hash + '/txs');
}
$getBlockHash(height: number): Promise<string> {
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height);
return this.failoverRouter.$get<string>('/block-height/' + height);
}
$getBlockHeader(hash: string): Promise<string> {
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header');
return this.failoverRouter.$get<string>('/block/' + hash + '/header');
}
$getBlock(hash: string): Promise<IEsploraApi.Block> {
return this.$queryWrapper<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash);
return this.failoverRouter.$get<IEsploraApi.Block>('/block/' + hash);
}
$getRawBlock(hash: string): Promise<Buffer> {
return this.$queryWrapper<any>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", 'arraybuffer')
return this.failoverRouter.$get<any>('/block/' + hash + '/raw', 'arraybuffer')
.then((response) => { return Buffer.from(response.data); });
}
@ -110,6 +282,14 @@ class ElectrsApi implements AbstractBitcoinApi {
throw new Error('Method getAddressTransactions not implemented.');
}
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
throw new Error('Method getScriptHash not implemented.');
}
$getScriptHashTransactions(scripthash: string, txId?: string): Promise<IEsploraApi.Transaction[]> {
throw new Error('Method getScriptHashTransactions not implemented.');
}
$getAddressPrefix(prefix: string): string[] {
throw new Error('Method not implemented.');
}
@ -119,20 +299,27 @@ class ElectrsApi implements AbstractBitcoinApi {
}
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
return this.$queryWrapper<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout);
return this.failoverRouter.$get<IEsploraApi.Outspend>('/tx/' + txId + '/outspend/' + vout);
}
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
return this.$queryWrapper<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends');
return this.failoverRouter.$get<IEsploraApi.Outspend[]>('/tx/' + txId + '/outspends');
}
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {
const outspends: IEsploraApi.Outspend[][] = [];
for (const tx of txId) {
const outspend = await this.$getOutspends(tx);
outspends.push(outspend);
}
return outspends;
async $getBatchedOutspends(txids: string[]): Promise<IEsploraApi.Outspend[][]> {
throw new Error('Method not implemented.');
}
async $getBatchedOutspendsInternal(txids: string[]): Promise<IEsploraApi.Outspend[][]> {
return this.failoverRouter.$post<IEsploraApi.Outspend[][]>('/internal/txs/outspends/by-txid', txids, 'json');
}
async $getOutSpendsByOutpoint(outpoints: { txid: string, vout: number }[]): Promise<IEsploraApi.Outspend[]> {
return this.failoverRouter.$post<IEsploraApi.Outspend[]>('/internal/txs/outspends/by-outpoint', outpoints.map(out => `${out.txid}:${out.vout}`), 'json');
}
public startHealthChecks(): void {
this.failoverRouter.startHealthChecks();
}
}

View File

@ -26,12 +26,15 @@ import PricesRepository from '../repositories/PricesRepository';
import priceUpdater from '../tasks/price-updater';
import chainTips from './chain-tips';
import websocketHandler from './websocket-handler';
import redisCache from './redis-cache';
import rbfCache from './rbf-cache';
import { calcBitsDifference } from './difficulty-adjustment';
class Blocks {
private blocks: BlockExtended[] = [];
private blockSummaries: BlockSummary[] = [];
private currentBlockHeight = 0;
private currentDifficulty = 0;
private currentBits = 0;
private lastDifficultyAdjustmentTime = 0;
private previousDifficultyRetarget = 0;
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
@ -70,72 +73,118 @@ class Blocks {
* @param blockHash
* @param blockHeight
* @param onlyCoinbase - Set to true if you only need the coinbase transaction
* @param txIds - optional ordered list of transaction ids if already known
* @param quiet - don't print non-essential logs
* @param addMempoolData - calculate sigops etc
* @returns Promise<TransactionExtended[]>
*/
private async $getTransactionsExtended(
blockHash: string,
blockHeight: number,
blockTime: number,
onlyCoinbase: boolean,
txIds: string[] | null = null,
quiet: boolean = false,
addMempoolData: boolean = false,
): Promise<TransactionExtended[]> {
const transactions: TransactionExtended[] = [];
const isEsplora = config.MEMPOOL.BACKEND === 'esplora';
const transactionMap: { [txid: string]: TransactionExtended } = {};
if (!txIds) {
txIds = await bitcoinApi.$getTxIdsForBlock(blockHash);
}
const mempool = memPool.getMempool();
let transactionsFound = 0;
let transactionsFetched = 0;
let foundInMempool = 0;
let totalFound = 0;
for (let i = 0; i < txIds.length; i++) {
if (mempool[txIds[i]]) {
// We update blocks before the mempool (index.ts), therefore we can
// optimize here by directly fetching txs in the "outdated" mempool
transactions.push(mempool[txIds[i]]);
transactionsFound++;
} else if (config.MEMPOOL.BACKEND === 'esplora' || !memPool.hasPriority() || i === 0) {
// Otherwise we fetch the tx data through backend services (esplora, electrum, core rpc...)
if (!quiet && (i % (Math.round((txIds.length) / 10)) === 0 || i + 1 === txIds.length)) { // Avoid log spam
logger.debug(`Indexing tx ${i + 1} of ${txIds.length} in block #${blockHeight}`);
}
try {
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, false, addMempoolData);
transactions.push(tx);
transactionsFetched++;
} catch (e) {
try {
if (config.MEMPOOL.BACKEND === 'esplora') {
// Try again with core
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, true, addMempoolData);
transactions.push(tx);
transactionsFetched++;
} else {
throw e;
}
} catch (e) {
if (i === 0) {
const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e);
logger.err(msg);
throw new Error(msg);
} else {
logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
// Copy existing transactions from the mempool
if (!onlyCoinbase) {
for (const txid of txIds) {
if (mempool[txid]) {
mempool[txid].status = {
confirmed: true,
block_height: blockHeight,
block_hash: blockHash,
block_time: blockTime,
};
transactionMap[txid] = mempool[txid];
foundInMempool++;
totalFound++;
}
}
}
if (onlyCoinbase === true) {
break; // Fetch the first transaction and exit
if (onlyCoinbase) {
try {
const coinbase = await transactionUtils.$getTransactionExtendedRetry(txIds[0], false, false, false, addMempoolData);
if (coinbase && coinbase.vin[0].is_coinbase) {
return [coinbase];
} else {
const msg = `Expected a coinbase tx, but the backend API returned something else`;
logger.err(msg);
throw new Error(msg);
}
} catch (e) {
const msg = `Cannot fetch coinbase tx ${txIds[0]}. Reason: ` + (e instanceof Error ? e.message : e);
logger.err(msg);
throw new Error(msg);
}
}
// Fetch remaining txs in bulk
if (isEsplora && (txIds.length - totalFound > 500)) {
try {
const rawTransactions = await bitcoinApi.$getTxsForBlock(blockHash);
for (const tx of rawTransactions) {
if (!transactionMap[tx.txid]) {
transactionMap[tx.txid] = addMempoolData ? transactionUtils.extendMempoolTransaction(tx) : transactionUtils.extendTransaction(tx);
totalFound++;
}
}
} catch (e) {
logger.err(`Cannot fetch bulk txs for block ${blockHash}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
// Fetch remaining txs individually
for (const txid of txIds.filter(txid => !transactionMap[txid])) {
if (!quiet && (totalFound % (Math.round((txIds.length) / 10)) === 0 || totalFound + 1 === txIds.length)) { // Avoid log spam
logger.debug(`Indexing tx ${totalFound + 1} of ${txIds.length} in block #${blockHeight}`);
}
try {
const tx = await transactionUtils.$getTransactionExtendedRetry(txid, false, false, false, addMempoolData);
transactionMap[txid] = tx;
totalFound++;
} catch (e) {
const msg = `Cannot fetch tx ${txid}. Reason: ` + (e instanceof Error ? e.message : e);
logger.err(msg);
throw new Error(msg);
}
}
if (!quiet) {
logger.debug(`${transactionsFound} of ${txIds.length} found in mempool. ${transactionsFetched} fetched through backend service.`);
logger.debug(`${foundInMempool} of ${txIds.length} found in mempool. ${totalFound - foundInMempool} fetched through backend service.`);
}
return transactions;
// Require the first transaction to be a coinbase
const coinbase = transactionMap[txIds[0]];
if (!coinbase || !coinbase.vin[0].is_coinbase) {
const msg = `Expected first tx in a block to be a coinbase, but found something else`;
logger.err(msg);
throw new Error(msg);
}
// Require all transactions to be present
// (we should have thrown an error already if a tx request failed)
if (txIds.some(txid => !transactionMap[txid])) {
const msg = `Failed to fetch ${txIds.length - totalFound} transactions from block`;
logger.err(msg);
throw new Error(msg);
}
// Return list of transactions, preserving block order
return txIds.map(txid => transactionMap[txid]);
}
/**
@ -171,7 +220,9 @@ class Blocks {
private convertLiquidFees(block: IBitcoinApi.VerboseBlock): IBitcoinApi.VerboseBlock {
block.tx.forEach(tx => {
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
if (!isFinite(Number(tx.fee))) {
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
}
});
return block;
}
@ -376,8 +427,8 @@ class Blocks {
let newlyIndexed = 0;
let totalIndexed = indexedBlockSummariesHashesArray.length;
let indexedThisRun = 0;
let timer = new Date().getTime() / 1000;
const startedAt = new Date().getTime() / 1000;
let timer = Date.now() / 1000;
const startedAt = Date.now() / 1000;
for (const block of indexedBlocks) {
if (indexedBlockSummariesHashes[block.hash] === true) {
@ -385,17 +436,24 @@ class Blocks {
}
// Logging
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
const elapsedSeconds = (Date.now() / 1000) - timer;
if (elapsedSeconds > 5) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
const runningFor = (Date.now() / 1000) - startedAt;
const blockPerSeconds = indexedThisRun / elapsedSeconds;
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000;
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor.toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now() / 1000;
indexedThisRun = 0;
}
await this.$getStrippedBlockTransactions(block.hash, true, true); // This will index the block summary
if (config.MEMPOOL.BACKEND === 'esplora') {
const txs = (await bitcoinApi.$getTxsForBlock(block.hash)).map(tx => transactionUtils.extendTransaction(tx));
const cpfpSummary = await this.$indexCPFP(block.hash, block.height, txs);
await this.$getStrippedBlockTransactions(block.hash, true, true, cpfpSummary, block.height); // This will index the block summary
} else {
await this.$getStrippedBlockTransactions(block.hash, true, true); // This will index the block summary
}
// Logging
indexedThisRun++;
@ -434,18 +492,18 @@ class Blocks {
// Logging
let count = 0;
let countThisRun = 0;
let timer = new Date().getTime() / 1000;
const startedAt = new Date().getTime() / 1000;
let timer = Date.now() / 1000;
const startedAt = Date.now() / 1000;
for (const height of unindexedBlockHeights) {
// Logging
const hash = await bitcoinApi.$getBlockHash(height);
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
const elapsedSeconds = (Date.now() / 1000) - timer;
if (elapsedSeconds > 5) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = (countThisRun / elapsedSeconds);
const runningFor = (Date.now() / 1000) - startedAt;
const blockPerSeconds = countThisRun / elapsedSeconds;
const progress = Math.round(count / unindexedBlockHeights.length * 10000) / 100;
logger.debug(`Indexing cpfp clusters for #${height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${count}/${unindexedBlockHeights.length} (${progress}%) | elapsed: ${runningFor} seconds`);
timer = new Date().getTime() / 1000;
logger.debug(`Indexing cpfp clusters for #${height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${count}/${unindexedBlockHeights.length} (${progress}%) | elapsed: ${runningFor.toFixed(2)} seconds`);
timer = Date.now() / 1000;
countThisRun = 0;
}
@ -524,8 +582,8 @@ class Blocks {
let totalIndexed = await blocksRepository.$blockCountBetweenHeight(currentBlockHeight, lastBlockToIndex);
let indexedThisRun = 0;
let newlyIndexed = 0;
const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000;
const startedAt = Date.now() / 1000;
let timer = Date.now() / 1000;
while (currentBlockHeight >= lastBlockToIndex) {
const endBlock = Math.max(0, lastBlockToIndex, currentBlockHeight - chunkSize + 1);
@ -545,19 +603,19 @@ class Blocks {
}
++indexedThisRun;
++totalIndexed;
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
const elapsedSeconds = (Date.now() / 1000) - timer;
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
const runningFor = (Date.now() / 1000) - startedAt;
const blockPerSeconds = indexedThisRun / elapsedSeconds;
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000;
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress.toFixed(2)}%) | elapsed: ${runningFor.toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('block-indexing', progress, false);
}
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, null, true);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, block.timestamp, true, null, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
newlyIndexed++;
@ -613,17 +671,21 @@ class Blocks {
const heightDiff = blockHeightTip % 2016;
const blockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff);
this.updateTimerProgress(timer, 'got block hash for initial difficulty adjustment');
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
this.updateTimerProgress(timer, 'got block for initial difficulty adjustment');
this.lastDifficultyAdjustmentTime = block.timestamp;
this.currentDifficulty = block.difficulty;
this.currentBits = block.bits;
if (blockHeightTip >= 2016) {
const previousPeriodBlockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff - 2016);
this.updateTimerProgress(timer, 'got previous block hash for initial difficulty adjustment');
const previousPeriodBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(previousPeriodBlockHash);
const previousPeriodBlock: IEsploraApi.Block = await bitcoinApi.$getBlock(previousPeriodBlockHash);
this.updateTimerProgress(timer, 'got previous block for initial difficulty adjustment');
this.previousDifficultyRetarget = (block.difficulty - previousPeriodBlock.difficulty) / previousPeriodBlock.difficulty * 100;
if (['liquid', 'liquidtestnet'].includes(config.MEMPOOL.NETWORK)) {
this.previousDifficultyRetarget = NaN;
} else {
this.previousDifficultyRetarget = calcBitsDifference(previousPeriodBlock.bits, block.bits);
}
logger.debug(`Initial difficulty adjustment data set.`);
}
} else {
@ -646,15 +708,15 @@ class Blocks {
const verboseBlock = await bitcoinClient.getBlock(blockHash, 2);
const block = BitcoinApi.convertBlock(verboseBlock);
const txIds: string[] = verboseBlock.tx.map(tx => tx.txid);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false, txIds, false, true) as MempoolTransactionExtended[];
if (config.MEMPOOL.BACKEND !== 'esplora') {
// fill in missing transaction fee data from verboseBlock
for (let i = 0; i < transactions.length; i++) {
if (!transactions[i].fee && transactions[i].txid === verboseBlock.tx[i].txid) {
transactions[i].fee = verboseBlock.tx[i].fee * 100_000_000;
}
const transactions = await this.$getTransactionsExtended(blockHash, block.height, block.timestamp, false, txIds, false, true) as MempoolTransactionExtended[];
// fill in missing transaction fee data from verboseBlock
for (let i = 0; i < transactions.length; i++) {
if (!transactions[i].fee && transactions[i].txid === verboseBlock.tx[i].txid) {
transactions[i].fee = (verboseBlock.tx[i].fee * 100_000_000) || 0;
}
}
const cpfpSummary: CpfpSummary = Common.calculateCpfp(block.height, transactions);
const blockExtended: BlockExtended = await this.$getBlockExtended(block, cpfpSummary.transactions);
const blockSummary: BlockSummary = this.summarizeBlockTransactions(block.id, cpfpSummary.transactions);
@ -699,8 +761,13 @@ class Blocks {
this.updateTimerProgress(timer, `saved ${this.currentBlockHeight} to database`);
if (!fastForwarded) {
const lastestPriceId = await PricesRepository.$getLatestPriceId();
this.updateTimerProgress(timer, `got latest price id ${this.currentBlockHeight}`);
let lastestPriceId;
try {
lastestPriceId = await PricesRepository.$getLatestPriceId();
this.updateTimerProgress(timer, `got latest price id ${this.currentBlockHeight}`);
} catch (e) {
logger.debug('failed to fetch latest price id from db: ' + (e instanceof Error ? e.message : e));
}
if (priceUpdater.historyInserted === true && lastestPriceId !== null) {
await blocksRepository.$saveBlockPrices([{
height: blockExtended.height,
@ -709,9 +776,7 @@ class Blocks {
this.updateTimerProgress(timer, `saved prices for ${this.currentBlockHeight}`);
} else {
logger.debug(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
setTimeout(() => {
indexer.runSingleTask('blocksPrices');
}, 10000);
indexer.scheduleSingleTask('blocksPrices', 10000);
}
// Save blocks summary for visualization if it's enabled
@ -732,18 +797,33 @@ class Blocks {
if (block.height % 2016 === 0) {
if (Common.indexingEnabled()) {
let adjustment;
if (['liquid', 'liquidtestnet'].includes(config.MEMPOOL.NETWORK)) {
adjustment = NaN;
} else {
adjustment = Math.round(
// calcBitsDifference returns +- percentage, +100 returns to positive, /100 returns to ratio.
// Instead of actually doing /100, just reduce the multiplier.
(calcBitsDifference(this.currentBits, block.bits) + 100) * 10000
) / 1000000; // Remove float point noise
}
await DifficultyAdjustmentsRepository.$saveAdjustments({
time: block.timestamp,
height: block.height,
difficulty: block.difficulty,
adjustment: Math.round((block.difficulty / this.currentDifficulty) * 1000000) / 1000000, // Remove float point noise
adjustment,
});
this.updateTimerProgress(timer, `saved difficulty adjustment for ${this.currentBlockHeight}`);
}
this.previousDifficultyRetarget = (block.difficulty - this.currentDifficulty) / this.currentDifficulty * 100;
if (['liquid', 'liquidtestnet'].includes(config.MEMPOOL.NETWORK)) {
this.previousDifficultyRetarget = NaN;
} else {
this.previousDifficultyRetarget = calcBitsDifference(this.currentBits, block.bits);
}
this.lastDifficultyAdjustmentTime = block.timestamp;
this.currentDifficulty = block.difficulty;
this.currentBits = block.bits;
}
// wait for pending async callbacks to finish
@ -763,10 +843,18 @@ class Blocks {
if (this.newBlockCallbacks.length) {
this.newBlockCallbacks.forEach((cb) => cb(blockExtended, txIds, transactions));
}
if (!memPool.hasPriority() && (block.height % config.MEMPOOL.DISK_CACHE_BLOCK_INTERVAL === 0)) {
if (config.MEMPOOL.CACHE_ENABLED && !memPool.hasPriority() && (block.height % config.MEMPOOL.DISK_CACHE_BLOCK_INTERVAL === 0)) {
diskCache.$saveCacheToDisk();
}
// Update Redis cache
if (config.REDIS.ENABLED) {
await redisCache.$updateBlocks(this.blocks);
await redisCache.$updateBlockSummaries(this.blockSummaries);
await redisCache.$removeTransactions(txIds);
await rbfCache.updateCache();
}
handledBlocks++;
}
@ -811,8 +899,8 @@ class Blocks {
}
const blockHash = await bitcoinApi.$getBlockHash(height);
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, block.timestamp, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
if (Common.indexingEnabled()) {
@ -823,8 +911,8 @@ class Blocks {
}
public async $indexStaleBlock(hash: string): Promise<BlockExtended> {
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
const transactions = await this.$getTransactionsExtended(hash, block.height, true);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(hash);
const transactions = await this.$getTransactionsExtended(hash, block.height, block.timestamp, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
blockExtended.canonical = await bitcoinApi.$getBlockHash(block.height);
@ -848,7 +936,7 @@ class Blocks {
}
// Bitcoin network, add our custom data on top
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(hash);
if (block.stale) {
return await this.$indexStaleBlock(hash);
} else {
@ -877,13 +965,13 @@ class Blocks {
let height = blockHeight;
let summary: BlockSummary;
if (cpfpSummary) {
if (cpfpSummary && !Common.isLiquid()) {
summary = {
id: hash,
transactions: cpfpSummary.transactions.map(tx => {
return {
txid: tx.txid,
fee: tx.fee,
fee: tx.fee || 0,
vsize: tx.vsize,
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0)),
rate: tx.effectiveFeePerVsize
@ -891,10 +979,15 @@ class Blocks {
}),
};
} else {
// Call Core RPC
const block = await bitcoinClient.getBlock(hash, 2);
summary = this.summarizeBlock(block);
height = block.height;
if (config.MEMPOOL.BACKEND === 'esplora') {
const txs = (await bitcoinApi.$getTxsForBlock(hash)).map(tx => transactionUtils.extendTransaction(tx));
summary = this.summarizeBlockTransactions(hash, txs);
} else {
// Call Core RPC
const block = await bitcoinClient.getBlock(hash, 2);
summary = this.summarizeBlock(block);
height = block.height;
}
}
if (height == null) {
const block = await bitcoinApi.$getBlock(hash);
@ -1017,8 +1110,17 @@ class Blocks {
if (Common.blocksSummariesIndexingEnabled() && cleanBlock.fee_amt_percentiles === null) {
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
if (cleanBlock.fee_amt_percentiles === null) {
const block = await bitcoinClient.getBlock(cleanBlock.hash, 2);
const summary = this.summarizeBlock(block);
let summary;
if (config.MEMPOOL.BACKEND === 'esplora') {
const txs = (await bitcoinApi.$getTxsForBlock(cleanBlock.hash)).map(tx => transactionUtils.extendTransaction(tx));
summary = this.summarizeBlockTransactions(cleanBlock.hash, txs);
} else {
// Call Core RPC
const block = await bitcoinClient.getBlock(cleanBlock.hash, 2);
summary = this.summarizeBlock(block);
}
await BlocksSummariesRepository.$saveTransactions(cleanBlock.height, cleanBlock.hash, summary.transactions);
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
}
@ -1078,19 +1180,29 @@ class Blocks {
return this.currentBlockHeight;
}
public async $indexCPFP(hash: string, height: number): Promise<void> {
const block = await bitcoinClient.getBlock(hash, 2);
const transactions = block.tx.map(tx => {
tx.fee *= 100_000_000;
return tx;
});
public async $indexCPFP(hash: string, height: number, txs?: TransactionExtended[]): Promise<CpfpSummary> {
let transactions = txs;
if (!transactions) {
if (config.MEMPOOL.BACKEND === 'esplora') {
transactions = (await bitcoinApi.$getTxsForBlock(hash)).map(tx => transactionUtils.extendTransaction(tx));
}
if (!transactions) {
const block = await bitcoinClient.getBlock(hash, 2);
transactions = block.tx.map(tx => {
tx.fee *= 100_000_000;
return tx;
});
}
}
const summary = Common.calculateCpfp(height, transactions);
const summary = Common.calculateCpfp(height, transactions as TransactionExtended[]);
await this.$saveCpfp(hash, height, summary);
const effectiveFeeStats = Common.calcEffectiveFeeStatistics(summary.transactions);
await blocksRepository.$saveEffectiveFeeStats(hash, effectiveFeeStats);
return summary;
}
public async $saveCpfp(hash: string, height: number, cpfpSummary: CpfpSummary): Promise<void> {

View File

@ -1,9 +1,12 @@
import * as bitcoinjs from 'bitcoinjs-lib';
import { Request } from 'express';
import { Ancestor, CpfpInfo, CpfpSummary, CpfpCluster, EffectiveFeeStats, MempoolBlockWithTransactions, TransactionExtended, MempoolTransactionExtended, TransactionStripped, WorkingEffectiveFeeStats } from '../mempool.interfaces';
import { Ancestor, CpfpInfo, CpfpSummary, CpfpCluster, EffectiveFeeStats, MempoolBlockWithTransactions, TransactionExtended, MempoolTransactionExtended, TransactionStripped, WorkingEffectiveFeeStats, TransactionClassified, TransactionFlags } from '../mempool.interfaces';
import config from '../config';
import { NodeSocket } from '../repositories/NodesSocketsRepository';
import { isIP } from 'net';
import rbfCache from './rbf-cache';
import transactionUtils from './transaction-utils';
import { isPoint } from '../utils/secp256k1';
export class Common {
static nativeAssetId = config.MEMPOOL.NETWORK === 'liquidtestnet' ?
'144c654344aa716d6f3abcc1ca90e5641e4e2a7f633bc09fe3baf64585819a49'
@ -59,10 +62,12 @@ export class Common {
return arr;
}
static findRbfTransactions(added: MempoolTransactionExtended[], deleted: MempoolTransactionExtended[]): { [txid: string]: MempoolTransactionExtended[] } {
static findRbfTransactions(added: MempoolTransactionExtended[], deleted: MempoolTransactionExtended[], forceScalable = false): { [txid: string]: MempoolTransactionExtended[] } {
const matches: { [txid: string]: MempoolTransactionExtended[] } = {};
added
.forEach((addedTx) => {
// For small N, a naive nested loop is extremely fast, but it doesn't scale
if (added.length < 1000 && deleted.length < 50 && !forceScalable) {
added.forEach((addedTx) => {
const foundMatches = deleted.filter((deletedTx) => {
// The new tx must, absolutely speaking, pay at least as much fee as the replaced tx.
return addedTx.fee > deletedTx.fee
@ -73,9 +78,40 @@ export class Common {
addedTx.vin.some((vin) => vin.txid === deletedVin.txid && vin.vout === deletedVin.vout));
});
if (foundMatches?.length) {
matches[addedTx.txid] = foundMatches;
matches[addedTx.txid] = [...new Set(foundMatches)];
}
});
} else {
// for large N, build a lookup table of prevouts we can check in ~constant time
const deletedSpendMap: { [txid: string]: { [vout: number]: MempoolTransactionExtended } } = {};
for (const tx of deleted) {
for (const vin of tx.vin) {
if (!deletedSpendMap[vin.txid]) {
deletedSpendMap[vin.txid] = {};
}
deletedSpendMap[vin.txid][vin.vout] = tx;
}
}
for (const addedTx of added) {
const foundMatches = new Set<MempoolTransactionExtended>();
for (const vin of addedTx.vin) {
const deletedTx = deletedSpendMap[vin.txid]?.[vin.vout];
if (deletedTx && deletedTx.txid !== addedTx.txid
// The new tx must, absolutely speaking, pay at least as much fee as the replaced tx.
&& addedTx.fee > deletedTx.fee
// The new transaction must pay more fee per kB than the replaced tx.
&& addedTx.adjustedFeePerVsize > deletedTx.adjustedFeePerVsize
) {
foundMatches.add(deletedTx);
}
if (foundMatches.size) {
matches[addedTx.txid] = [...foundMatches];
}
}
}
}
return matches;
}
@ -105,12 +141,224 @@ export class Common {
return matches;
}
static setSchnorrSighashFlags(flags: bigint, witness: string[]): bigint {
// no witness items
if (!witness?.length) {
return flags;
}
const hasAnnex = witness.length > 1 && witness[witness.length - 1].startsWith('50');
if (witness?.length === (hasAnnex ? 2 : 1)) {
// keypath spend, signature is the only witness item
if (witness[0].length === 130) {
flags |= this.setSighashFlags(flags, witness[0]);
} else {
flags |= TransactionFlags.sighash_default;
}
} else {
// scriptpath spend, all items except for the script, control block and annex could be signatures
for (let i = 0; i < witness.length - (hasAnnex ? 3 : 2); i++) {
// handle probable signatures
if (witness[i].length === 130) {
flags |= this.setSighashFlags(flags, witness[i]);
} else if (witness[i].length === 128) {
flags |= TransactionFlags.sighash_default;
}
}
}
return flags;
}
static isDERSig(w: string): boolean {
// heuristic to detect probable DER signatures
return (w.length >= 18
&& w.startsWith('30') // minimum DER signature length is 8 bytes + sighash flag (see https://mempool.space/testnet/tx/c6c232a36395fa338da458b86ff1327395a9afc28c5d2daa4273e410089fd433)
&& ['01', '02', '03', '81', '82', '83'].includes(w.slice(-2)) // signature must end with a valid sighash flag
&& (w.length === (2 * parseInt(w.slice(2, 4), 16)) + 6) // second byte encodes the combined length of the R and S components
);
}
static setSegwitSighashFlags(flags: bigint, witness: string[]): bigint {
for (const w of witness) {
if (this.isDERSig(w)) {
flags |= this.setSighashFlags(flags, w);
}
}
return flags;
}
static setLegacySighashFlags(flags: bigint, scriptsig_asm: string): bigint {
for (const item of scriptsig_asm.split(' ')) {
// skip op_codes
if (item.startsWith('OP_')) {
continue;
}
// check pushed data
if (this.isDERSig(item)) {
flags |= this.setSighashFlags(flags, item);
}
}
return flags;
}
static setSighashFlags(flags: bigint, signature: string): bigint {
switch(signature.slice(-2)) {
case '01': return flags | TransactionFlags.sighash_all;
case '02': return flags | TransactionFlags.sighash_none;
case '03': return flags | TransactionFlags.sighash_single;
case '81': return flags | TransactionFlags.sighash_all | TransactionFlags.sighash_acp;
case '82': return flags | TransactionFlags.sighash_none | TransactionFlags.sighash_acp;
case '83': return flags | TransactionFlags.sighash_single | TransactionFlags.sighash_acp;
default: return flags | TransactionFlags.sighash_default; // taproot only
}
}
static isBurnKey(pubkey: string): boolean {
return [
'022222222222222222222222222222222222222222222222222222222222222222',
'033333333333333333333333333333333333333333333333333333333333333333',
'020202020202020202020202020202020202020202020202020202020202020202',
'030303030303030303030303030303030303030303030303030303030303030303',
].includes(pubkey);
}
static getTransactionFlags(tx: TransactionExtended): number {
let flags = tx.flags ? BigInt(tx.flags) : 0n;
// Update variable flags (CPFP, RBF)
if (tx.ancestors?.length) {
flags |= TransactionFlags.cpfp_child;
}
if (tx.descendants?.length) {
flags |= TransactionFlags.cpfp_parent;
}
if (tx.replacement) {
flags |= TransactionFlags.replacement;
}
// Already processed static flags, no need to do it again
if (tx.flags) {
return Number(flags);
}
// Process static flags
if (tx.version === 1) {
flags |= TransactionFlags.v1;
} else if (tx.version === 2) {
flags |= TransactionFlags.v2;
}
const reusedAddresses: { [address: string ]: number } = {};
const inValues = {};
const outValues = {};
let rbf = false;
for (const vin of tx.vin) {
if (vin.sequence < 0xfffffffe) {
rbf = true;
}
switch (vin.prevout?.scriptpubkey_type) {
case 'p2pk': flags |= TransactionFlags.p2pk; break;
case 'multisig': flags |= TransactionFlags.p2ms; break;
case 'p2pkh': flags |= TransactionFlags.p2pkh; break;
case 'p2sh': flags |= TransactionFlags.p2sh; break;
case 'v0_p2wpkh': flags |= TransactionFlags.p2wpkh; break;
case 'v0_p2wsh': flags |= TransactionFlags.p2wsh; break;
case 'v1_p2tr': {
flags |= TransactionFlags.p2tr;
if (vin.witness.length > 2) {
const asm = vin.inner_witnessscript_asm || transactionUtils.convertScriptSigAsm(vin.witness[vin.witness.length - 2]);
if (asm?.includes('OP_0 OP_IF')) {
flags |= TransactionFlags.inscription;
}
}
} break;
}
// sighash flags
if (vin.prevout?.scriptpubkey_type === 'v1_p2tr') {
flags |= this.setSchnorrSighashFlags(flags, vin.witness);
} else if (vin.witness) {
flags |= this.setSegwitSighashFlags(flags, vin.witness);
} else if (vin.scriptsig?.length) {
flags |= this.setLegacySighashFlags(flags, vin.scriptsig_asm || transactionUtils.convertScriptSigAsm(vin.scriptsig));
}
if (vin.prevout?.scriptpubkey_address) {
reusedAddresses[vin.prevout?.scriptpubkey_address] = (reusedAddresses[vin.prevout?.scriptpubkey_address] || 0) + 1;
}
inValues[vin.prevout?.value || Math.random()] = (inValues[vin.prevout?.value || Math.random()] || 0) + 1;
}
if (rbf) {
flags |= TransactionFlags.rbf;
} else {
flags |= TransactionFlags.no_rbf;
}
let hasFakePubkey = false;
for (const vout of tx.vout) {
switch (vout.scriptpubkey_type) {
case 'p2pk': {
flags |= TransactionFlags.p2pk;
// detect fake pubkey (i.e. not a valid DER point on the secp256k1 curve)
hasFakePubkey = hasFakePubkey || !isPoint(vout.scriptpubkey.slice(2, -2));
} break;
case 'multisig': {
flags |= TransactionFlags.p2ms;
// detect fake pubkeys (i.e. not valid DER points on the secp256k1 curve)
const asm = vout.scriptpubkey_asm || transactionUtils.convertScriptSigAsm(vout.scriptpubkey);
for (const key of (asm?.split(' ') || [])) {
if (!hasFakePubkey && !key.startsWith('OP_')) {
hasFakePubkey = hasFakePubkey || this.isBurnKey(key) || !isPoint(key);
}
}
} break;
case 'p2pkh': flags |= TransactionFlags.p2pkh; break;
case 'p2sh': flags |= TransactionFlags.p2sh; break;
case 'v0_p2wpkh': flags |= TransactionFlags.p2wpkh; break;
case 'v0_p2wsh': flags |= TransactionFlags.p2wsh; break;
case 'v1_p2tr': flags |= TransactionFlags.p2tr; break;
case 'op_return': flags |= TransactionFlags.op_return; break;
}
if (vout.scriptpubkey_address) {
reusedAddresses[vout.scriptpubkey_address] = (reusedAddresses[vout.scriptpubkey_address] || 0) + 1;
}
outValues[vout.value || Math.random()] = (outValues[vout.value || Math.random()] || 0) + 1;
}
if (hasFakePubkey) {
flags |= TransactionFlags.fake_pubkey;
}
// fast but bad heuristic to detect possible coinjoins
// (at least 5 inputs and 5 outputs, less than half of which are unique amounts, with no address reuse)
const addressReuse = Object.values(reusedAddresses).reduce((acc, count) => Math.max(acc, count), 0) > 1;
if (!addressReuse && tx.vin.length >= 5 && tx.vout.length >= 5 && (Object.keys(inValues).length + Object.keys(outValues).length) <= (tx.vin.length + tx.vout.length) / 2 ) {
flags |= TransactionFlags.coinjoin;
}
// more than 5:1 input:output ratio
if (tx.vin.length / tx.vout.length >= 5) {
flags |= TransactionFlags.consolidation;
}
// less than 1:5 input:output ratio
if (tx.vin.length / tx.vout.length <= 0.2) {
flags |= TransactionFlags.batch_payout;
}
return Number(flags);
}
static classifyTransaction(tx: TransactionExtended): TransactionClassified {
const flags = this.getTransactionFlags(tx);
tx.flags = flags;
return {
...this.stripTransaction(tx),
flags,
};
}
static stripTransaction(tx: TransactionExtended): TransactionStripped {
return {
txid: tx.txid,
fee: tx.fee,
fee: tx.fee || 0,
vsize: tx.weight / 4,
value: tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0),
acc: tx.acceleration || undefined,
rate: tx.effectiveFeePerVsize,
};
}
@ -460,7 +708,7 @@ export class Common {
};
}
static calcEffectiveFeeStatistics(transactions: { weight: number, fee: number, effectiveFeePerVsize?: number, txid: string }[]): EffectiveFeeStats {
static calcEffectiveFeeStatistics(transactions: { weight: number, fee: number, effectiveFeePerVsize?: number, txid: string, acceleration?: boolean }[]): EffectiveFeeStats {
const sortedTxs = transactions.map(tx => { return { txid: tx.txid, weight: tx.weight, rate: tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4)) }; }).sort((a, b) => a.rate - b.rate);
let weightCount = 0;

View File

@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
import { RowDataPacket } from 'mysql2';
class DatabaseMigration {
private static currentVersion = 64;
private static currentVersion = 66;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@ -548,6 +548,16 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `nodes` ADD features text NULL');
await this.updateToSchemaVersion(64);
}
if (databaseSchemaVersion < 65 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD accelerated_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(65);
}
if (databaseSchemaVersion < 66) {
await this.$executeQuery('ALTER TABLE `statistics` ADD min_fee FLOAT UNSIGNED DEFAULT NULL');
await this.updateToSchemaVersion(66);
}
}
/**

View File

@ -16,6 +16,68 @@ export interface DifficultyAdjustment {
expectedBlocks: number; // Block count
}
/**
* Calculate the difficulty increase/decrease by using the `bits` integer contained in two
* block headers.
*
* Warning: Only compare `bits` from blocks in two adjacent difficulty periods. This code
* assumes the maximum difference is x4 or /4 (as per the protocol) and will throw an
* error if an exponent difference of 2 or more is seen.
*
* @param {number} oldBits The 32 bit `bits` integer from a block header.
* @param {number} newBits The 32 bit `bits` integer from a block header in the next difficulty period.
* @returns {number} A floating point decimal of the difficulty change from old to new.
* (ie. 21.3 means 21.3% increase in difficulty, -21.3 is a 21.3% decrease in difficulty)
*/
export function calcBitsDifference(oldBits: number, newBits: number): number {
// Must be
// - integer
// - highest exponent is 0x20, so max value (as integer) is 0x207fffff
// - min value is 1 (exponent = 0)
// - highest bit of the number-part is +- sign, it must not be 1
const verifyBits = (bits: number): void => {
if (
Math.floor(bits) !== bits ||
bits > 0x207fffff ||
bits < 1 ||
(bits & 0x00800000) !== 0 ||
(bits & 0x007fffff) === 0
) {
throw new Error('Invalid bits');
}
};
verifyBits(oldBits);
verifyBits(newBits);
// No need to mask exponents because we checked the bounds above
const oldExp = oldBits >> 24;
const newExp = newBits >> 24;
const oldNum = oldBits & 0x007fffff;
const newNum = newBits & 0x007fffff;
// The diff can only possibly be 1, 0, -1
// (because maximum difficulty change is x4 or /4 (2 bits up or down))
let result: number;
switch (newExp - oldExp) {
// New less than old, target lowered, difficulty increased
case -1:
result = ((oldNum << 8) * 100) / newNum - 100;
break;
// Same exponent, compare numbers as is.
case 0:
result = (oldNum * 100) / newNum - 100;
break;
// Old less than new, target raised, difficulty decreased
case 1:
result = (oldNum * 100) / (newNum << 8) - 100;
break;
default:
throw new Error('Impossible exponent difference');
}
// Min/Max values
return result > 300 ? 300 : result < -75 ? -75 : result;
}
export function calcDifficultyAdjustment(
DATime: number,
nowSeconds: number,

View File

@ -29,7 +29,7 @@ class DiskCache {
};
constructor() {
if (!cluster.isPrimary) {
if (!cluster.isPrimary || !config.MEMPOOL.CACHE_ENABLED) {
return;
}
process.on('SIGINT', (e) => {
@ -39,7 +39,7 @@ class DiskCache {
}
async $saveCacheToDisk(sync: boolean = false): Promise<void> {
if (!cluster.isPrimary) {
if (!cluster.isPrimary || !config.MEMPOOL.CACHE_ENABLED) {
return;
}
if (this.isWritingCache) {
@ -175,10 +175,11 @@ class DiskCache {
}
async $loadMempoolCache(): Promise<void> {
if (!fs.existsSync(DiskCache.FILE_NAME)) {
if (!config.MEMPOOL.CACHE_ENABLED || !fs.existsSync(DiskCache.FILE_NAME)) {
return;
}
try {
const start = Date.now();
let data: any = {};
const cacheData = fs.readFileSync(DiskCache.FILE_NAME, 'utf8');
if (cacheData) {
@ -220,6 +221,8 @@ class DiskCache {
}
}
logger.info(`Loaded mempool from disk cache in ${Date.now() - start} ms`);
await memPool.$setMempool(data.mempool);
if (!this.ignoreBlocksCache) {
blocks.setBlocks(data.blocks);
@ -249,7 +252,12 @@ class DiskCache {
}
if (rbfData?.rbf) {
rbfCache.load(rbfData.rbf);
rbfCache.load({
txs: rbfData.rbf.txs.map(([txid, entry]) => ({ value: entry })),
trees: rbfData.rbf.trees,
expiring: rbfData.rbf.expiring.map(([txid, value]) => ({ key: txid, value })),
mempool: memPool.getMempool(),
});
}
} catch (e) {
logger.warn('Failed to parse rbf cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));

View File

@ -80,7 +80,13 @@ class ChannelsApi {
public async $searchChannelsById(search: string): Promise<any[]> {
try {
const searchStripped = search.replace('%', '') + '%';
// restrict search to valid id/short_id prefix formats
let searchStripped = search.match(/[0-9]+[0-9x]*/)?.[0] || '';
if (!searchStripped.length) {
return [];
}
// add wildcard to search by prefix
searchStripped += '%';
const query = `SELECT id, short_id, capacity, status FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
return rows;

View File

@ -42,6 +42,12 @@ class NodesRoutes {
switch (config.MEMPOOL.NETWORK) {
case 'testnet':
nodesList = [
'0259db43b4e4ac0ff12a805f2d81e521253ba2317f6739bc611d8e2fa156d64256',
'0352b9944b9a52bd2116c91f1ba70c4ef851ac5ba27e1b20f1d92da3ade010dd10',
'03424f5a7601eaa47482cb17100b31a84a04d14fb44b83a57eeceffd8e299878e3',
'032850492ee61a5f7006a2fda6925e4b4ec3782f2b6de2ff0e439ef5a38c3b2470',
'022c80bace98831c44c32fb69755f2b353434e0ee9e7fbda29507f7ef8abea1421',
'02c3559c833e6f99f9ca05fe503e0b4e7524dea9121344edfd3e811101e0c28680',
'032c7c7819276c4f706a04df1a0f1e10a5495994a7be4c1d3d28ca766e5a2b957b',
'025a7e38c2834dd843591a4d23d5f09cdeb77ddca85f673c2d944a14220ff14cf7',
'0395e2731a1673ef21d7a16a727c4fc4d4c35a861c428ce2c819c53d2b81c8bd55',
@ -64,6 +70,12 @@ class NodesRoutes {
break;
case 'signet':
nodesList = [
'029fe3621fc0c6e08056a14b868f8fb9acca1aa28a129512f6cea0f0d7654d9f92',
'02f60cd7a3a4f1c953dd9554a6ebd51a34f8b10b8124b7fc43a0b381139b55c883',
'03cbbf581774700865eebd1be42d022bc004ba30881274ab304e088a25d70e773d',
'0243348cb3741cfe2d8485fa8375c29c7bc7cbb67577c363cb6987a5e5fd0052cc',
'02cb73e631af44bee600d80f8488a9194c9dc5c7590e575c421a070d1be05bc8e9',
'0306f55ee631aa1e2cd4d9b2bfcbc14404faec5c541cef8b2e6f779061029d09c4',
'03ddab321b760433cbf561b615ef62ac7d318630c5f51d523aaf5395b90b751956',
'033d92c7bfd213ef1b34c90e985fb5dc77f9ec2409d391492484e57a44c4aca1de',
'02ad010dda54253c1eb9efe38b0760657a3b43ecad62198c359c051c9d99d45781',
@ -86,6 +98,12 @@ class NodesRoutes {
break;
default:
nodesList = [
'02b12b889fe3c943cb05645921040ef13d6d397a2e7a4ad000e28500c505ff26d6',
'0302240ac9d71b39617cbde2764837ec3d6198bd6074b15b75d2ff33108e89d2e1',
'03364a8ace313376e5e4b68c954e287c6388e16df9e9fdbaf0363ecac41105cbf6',
'03229ab4b7f692753e094b93df90530150680f86b535b5183b0cffd75b3df583fc',
'03a696eb7acde991c1be97a58a9daef416659539ae462b897f5e9ae361f990228e',
'0248bf26cf3a63ab8870f34dc0ec9e6c8c6288cdba96ba3f026f34ec0f13ac4055',
'03fbc17549ec667bccf397ababbcb4cdc0e3394345e4773079ab2774612ec9be61',
'03da9a8623241ccf95f19cd645c6cecd4019ac91570e976eb0a128bebbc4d8a437',
'03ca5340cf85cb2e7cf076e489f785410838de174e40be62723e8a60972ad75144',

View File

@ -1,23 +1,35 @@
import { MempoolBlock } from '../mempool.interfaces';
import { Common } from './common';
import config from '../config';
import mempool from './mempool';
import projectedBlocks from './mempool-blocks';
const isLiquid = config.MEMPOOL.NETWORK === 'liquid' || config.MEMPOOL.NETWORK === 'liquidtestnet';
interface RecommendedFees {
fastestFee: number,
halfHourFee: number,
hourFee: number,
economyFee: number,
minimumFee: number,
}
class FeeApi {
constructor() { }
defaultFee = Common.isLiquid() ? 0.1 : 1;
defaultFee = isLiquid ? 0.1 : 1;
minimumIncrement = isLiquid ? 0.1 : 1;
public getRecommendedFee() {
public getRecommendedFee(): RecommendedFees {
const pBlocks = projectedBlocks.getMempoolBlocks();
const mPool = mempool.getMempoolInfo();
const minimumFee = Math.ceil(mPool.mempoolminfee * 100000);
const minimumFee = this.roundUpToNearest(mPool.mempoolminfee * 100000, this.minimumIncrement);
const defaultMinFee = Math.max(minimumFee, this.defaultFee);
if (!pBlocks.length) {
return {
'fastestFee': this.defaultFee,
'halfHourFee': this.defaultFee,
'hourFee': this.defaultFee,
'fastestFee': defaultMinFee,
'halfHourFee': defaultMinFee,
'hourFee': defaultMinFee,
'economyFee': minimumFee,
'minimumFee': minimumFee,
};
@ -27,11 +39,15 @@ class FeeApi {
const secondMedianFee = pBlocks[1] ? this.optimizeMedianFee(pBlocks[1], pBlocks[2], firstMedianFee) : this.defaultFee;
const thirdMedianFee = pBlocks[2] ? this.optimizeMedianFee(pBlocks[2], pBlocks[3], secondMedianFee) : this.defaultFee;
// explicitly enforce a minimum of ceil(mempoolminfee) on all recommendations.
// simply rounding up recommended rates is insufficient, as the purging rate
// can exceed the median rate of projected blocks in some extreme scenarios
// (see https://bitcoin.stackexchange.com/a/120024)
return {
'fastestFee': firstMedianFee,
'halfHourFee': secondMedianFee,
'hourFee': thirdMedianFee,
'economyFee': Math.min(2 * minimumFee, thirdMedianFee),
'fastestFee': Math.max(minimumFee, firstMedianFee),
'halfHourFee': Math.max(minimumFee, secondMedianFee),
'hourFee': Math.max(minimumFee, thirdMedianFee),
'economyFee': Math.max(minimumFee, Math.min(2 * minimumFee, thirdMedianFee)),
'minimumFee': minimumFee,
};
}
@ -45,7 +61,11 @@ class FeeApi {
const multiplier = (pBlock.blockVSize - 500000) / 500000;
return Math.max(Math.round(useFee * multiplier), this.defaultFee);
}
return Math.ceil(useFee);
return this.roundUpToNearest(useFee, this.minimumIncrement);
}
private roundUpToNearest(value: number, nearest: number): number {
return Math.ceil(value / nearest) * nearest;
}
}

View File

@ -44,9 +44,13 @@ export enum FeatureBits {
KeysendOptional = 55,
ScriptEnforcedLeaseRequired = 2022,
ScriptEnforcedLeaseOptional = 2023,
SimpleTaprootChannelsRequiredFinal = 80,
SimpleTaprootChannelsOptionalFinal = 81,
SimpleTaprootChannelsRequiredStaging = 180,
SimpleTaprootChannelsOptionalStaging = 181,
MaxBolt11Feature = 5114,
};
export const FeaturesMap = new Map<FeatureBits, string>([
[FeatureBits.DataLossProtectRequired, 'data-loss-protect'],
[FeatureBits.DataLossProtectOptional, 'data-loss-protect'],
@ -85,6 +89,10 @@ export const FeaturesMap = new Map<FeatureBits, string>([
[FeatureBits.ZeroConfOptional, 'zero-conf'],
[FeatureBits.ShutdownAnySegwitRequired, 'shutdown-any-segwit'],
[FeatureBits.ShutdownAnySegwitOptional, 'shutdown-any-segwit'],
[FeatureBits.SimpleTaprootChannelsRequiredFinal, 'taproot-channels'],
[FeatureBits.SimpleTaprootChannelsOptionalFinal, 'taproot-channels'],
[FeatureBits.SimpleTaprootChannelsRequiredStaging, 'taproot-channels-staging'],
[FeatureBits.SimpleTaprootChannelsOptionalStaging, 'taproot-channels-staging'],
]);
/**
@ -217,7 +225,7 @@ async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILigh
return {
channel_id: Common.channelShortIdToIntegerId(clChannelA.short_channel_id),
capacity: clChannelA.satoshis,
capacity: (clChannelA.amount_msat / 1000).toString(),
last_update: lastUpdate,
node1_policy: convertPolicy(clChannelA),
node2_policy: convertPolicy(clChannelB),
@ -241,7 +249,7 @@ async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Cha
return {
channel_id: Common.channelShortIdToIntegerId(clChannel.short_channel_id),
capacity: clChannel.satoshis,
capacity: (clChannel.amount_msat / 1000).toString(),
last_update: clChannel.last_update ?? 0,
node1_policy: convertPolicy(clChannel),
node2_policy: null,
@ -257,8 +265,8 @@ async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Cha
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
return {
time_lock_delta: clChannel.delay,
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
max_htlc_msat: clChannel.htlc_maximum_msat.slice(0, -4),
min_htlc: clChannel.htlc_minimum_msat.toString(),
max_htlc_msat: clChannel.htlc_maximum_msat.toString(),
fee_base_msat: clChannel.base_fee_millisatoshi,
fee_rate_milli_msat: clChannel.fee_per_millionth,
disabled: !clChannel.active,

View File

@ -31,7 +31,7 @@ class MemoryCache {
}
private cleanup() {
this.cache = this.cache.filter((cache) => cache.expires < (new Date()));
this.cache = this.cache.filter((cache) => cache.expires > (new Date()));
}
}

View File

@ -1,10 +1,11 @@
import { GbtGenerator, GbtResult, ThreadTransaction as RustThreadTransaction } from '../../rust-gbt';
import { GbtGenerator, GbtResult, ThreadTransaction as RustThreadTransaction, ThreadAcceleration as RustThreadAcceleration } from 'rust-gbt';
import logger from '../logger';
import { MempoolBlock, MempoolTransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor, CompactThreadTransaction, EffectiveFeeStats } from '../mempool.interfaces';
import { MempoolBlock, MempoolTransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor, CompactThreadTransaction, EffectiveFeeStats, PoolTag, TransactionClassified } from '../mempool.interfaces';
import { Common, OnlineFeeStatsCalculator } from './common';
import config from '../config';
import { Worker } from 'worker_threads';
import path from 'path';
import mempool from './mempool';
const MAX_UINT32 = Math.pow(2, 32) - 1;
@ -168,9 +169,9 @@ class MempoolBlocks {
private calculateMempoolDeltas(prevBlocks: MempoolBlockWithTransactions[], mempoolBlocks: MempoolBlockWithTransactions[]): MempoolBlockDelta[] {
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
let added: TransactionStripped[] = [];
let added: TransactionClassified[] = [];
let removed: string[] = [];
const changed: { txid: string, rate: number | undefined }[] = [];
const changed: { txid: string, rate: number | undefined, acc: boolean | undefined }[] = [];
if (mempoolBlocks[i] && !prevBlocks[i]) {
added = mempoolBlocks[i].transactions;
} else if (!mempoolBlocks[i] && prevBlocks[i]) {
@ -192,8 +193,8 @@ class MempoolBlocks {
mempoolBlocks[i].transactions.forEach(tx => {
if (!prevIds[tx.txid]) {
added.push(tx);
} else if (tx.rate !== prevIds[tx.txid].rate) {
changed.push({ txid: tx.txid, rate: tx.rate });
} else if (tx.rate !== prevIds[tx.txid].rate || tx.acc !== prevIds[tx.txid].acc) {
changed.push({ txid: tx.txid, rate: tx.rate, acc: tx.acc });
}
});
}
@ -206,14 +207,19 @@ class MempoolBlocks {
return mempoolBlockDeltas;
}
public async $makeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
public async $makeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false, useAccelerations: boolean = false, accelerationPool?: number): Promise<MempoolBlockWithTransactions[]> {
const start = Date.now();
// reset mempool short ids
this.resetUids();
for (const tx of Object.values(newMempool)) {
this.setUid(tx);
if (saveResults) {
this.resetUids();
}
// set missing short ids
for (const tx of Object.values(newMempool)) {
this.setUid(tx, !saveResults);
}
const accelerations = useAccelerations ? mempool.getAccelerations() : {};
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
@ -222,7 +228,7 @@ class MempoolBlocks {
if (entry.uid !== null && entry.uid !== undefined) {
const stripped = {
uid: entry.uid,
fee: entry.fee,
fee: entry.fee + (useAccelerations && (!accelerationPool || accelerations[entry.txid]?.pools?.includes(accelerationPool)) ? (accelerations[entry.txid]?.feeDelta || 0) : 0),
weight: (entry.adjustedVsize * 4),
sigops: entry.sigops,
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
@ -262,7 +268,7 @@ class MempoolBlocks {
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
const processed = this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
const processed = this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), accelerations, accelerationPool, saveResults);
logger.debug(`makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
@ -273,25 +279,29 @@ class MempoolBlocks {
return this.mempoolBlocks;
}
public async $updateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[], saveResults: boolean = false): Promise<void> {
public async $updateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[], accelerationDelta: string[] = [], saveResults: boolean = false, useAccelerations: boolean = false): Promise<void> {
if (!this.txSelectionWorker) {
// need to reset the worker
await this.$makeBlockTemplates(newMempool, saveResults);
await this.$makeBlockTemplates(newMempool, saveResults, useAccelerations);
return;
}
const start = Date.now();
for (const tx of Object.values(added)) {
const accelerations = useAccelerations ? mempool.getAccelerations() : {};
const addedAndChanged: MempoolTransactionExtended[] = useAccelerations ? accelerationDelta.map(txid => newMempool[txid]).filter(tx => tx != null).concat(added) : added;
for (const tx of addedAndChanged) {
this.setUid(tx, true);
}
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => uid != null) as number[];
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const addedStripped: CompactThreadTransaction[] = added.filter(entry => (entry.uid !== null && entry.uid !== undefined)).map(entry => {
const addedStripped: CompactThreadTransaction[] = addedAndChanged.filter(entry => entry.uid != null).map(entry => {
return {
uid: entry.uid || 0,
fee: entry.fee,
fee: entry.fee + (useAccelerations ? (accelerations[entry.txid]?.feeDelta || 0) : 0),
weight: (entry.adjustedVsize * 4),
sigops: entry.sigops,
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
@ -318,7 +328,7 @@ class MempoolBlocks {
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), accelerations, null, saveResults);
logger.debug(`updateBlockTemplates completed in ${(Date.now() - start) / 1000} seconds`);
} catch (e) {
logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
@ -330,7 +340,7 @@ class MempoolBlocks {
this.rustGbtGenerator = new GbtGenerator();
}
private async $rustMakeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
public async $rustMakeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false, useAccelerations: boolean = false, accelerationPool?: number): Promise<MempoolBlockWithTransactions[]> {
const start = Date.now();
// reset mempool short ids
@ -346,16 +356,25 @@ class MempoolBlocks {
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
}
const accelerations = useAccelerations ? mempool.getAccelerations() : {};
const acceleratedList = accelerationPool ? Object.values(accelerations).filter(acc => newMempool[acc.txid] && acc.pools.includes(accelerationPool)) : Object.values(accelerations).filter(acc => newMempool[acc.txid]);
const convertedAccelerations = acceleratedList.map(acc => {
return {
uid: this.getUid(newMempool[acc.txid]),
delta: acc.feeDelta,
};
});
// run the block construction algorithm in a separate thread, and wait for a result
const rustGbt = saveResults ? this.rustGbtGenerator : new GbtGenerator();
try {
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
await rustGbt.make(Object.values(newMempool) as RustThreadTransaction[], this.nextUid),
await rustGbt.make(Object.values(newMempool) as RustThreadTransaction[], convertedAccelerations as RustThreadAcceleration[], this.nextUid),
);
if (saveResults) {
this.rustInitialized = true;
}
const processed = this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, saveResults);
const processed = this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, accelerations, accelerationPool, saveResults);
logger.debug(`RUST makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
return processed;
} catch (e) {
@ -367,20 +386,20 @@ class MempoolBlocks {
return this.mempoolBlocks;
}
public async $oneOffRustBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }): Promise<MempoolBlockWithTransactions[]> {
return this.$rustMakeBlockTemplates(newMempool, false);
public async $oneOffRustBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, useAccelerations: boolean, accelerationPool?: number): Promise<MempoolBlockWithTransactions[]> {
return this.$rustMakeBlockTemplates(newMempool, false, useAccelerations, accelerationPool);
}
public async $rustUpdateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[]): Promise<void> {
public async $rustUpdateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[], useAccelerations: boolean, accelerationPool?: number): Promise<MempoolBlockWithTransactions[]> {
// GBT optimization requires that uids never get too sparse
// as a sanity check, we should also explicitly prevent uint32 uid overflow
if (this.nextUid + added.length >= Math.min(Math.max(262144, 2 * mempoolSize), MAX_UINT32)) {
this.resetRustGbt();
}
if (!this.rustInitialized) {
// need to reset the worker
await this.$rustMakeBlockTemplates(newMempool, true);
return;
return this.$rustMakeBlockTemplates(newMempool, true, useAccelerations, accelerationPool);
}
const start = Date.now();
@ -394,12 +413,22 @@ class MempoolBlocks {
}
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
const accelerations = useAccelerations ? mempool.getAccelerations() : {};
const acceleratedList = accelerationPool ? Object.values(accelerations).filter(acc => newMempool[acc.txid] && acc.pools.includes(accelerationPool)) : Object.values(accelerations).filter(acc => newMempool[acc.txid]);
const convertedAccelerations = acceleratedList.map(acc => {
return {
uid: this.getUid(newMempool[acc.txid]),
delta: acc.feeDelta,
};
});
// run the block construction algorithm in a separate thread, and wait for a result
try {
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
await this.rustGbtGenerator.update(
added as RustThreadTransaction[],
removedUids,
convertedAccelerations as RustThreadAcceleration[],
this.nextUid,
),
);
@ -407,19 +436,22 @@ class MempoolBlocks {
if (mempoolSize !== resultMempoolSize) {
throw new Error('GBT returned wrong number of transactions, cache is probably out of sync');
} else {
this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, true);
const processed = this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, accelerations, accelerationPool, true);
this.removeUids(removedUids);
logger.debug(`RUST updateBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
return processed;
}
this.removeUids(removedUids);
logger.debug(`RUST updateBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
} catch (e) {
logger.err('RUST updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
this.resetRustGbt();
return this.mempoolBlocks;
}
}
private processBlockTemplates(mempool: { [txid: string]: MempoolTransactionExtended }, blocks: string[][], blockWeights: number[] | null, rates: [string, number][], clusters: string[][], saveResults): MempoolBlockWithTransactions[] {
private processBlockTemplates(mempool: { [txid: string]: MempoolTransactionExtended }, blocks: string[][], blockWeights: number[] | null, rates: [string, number][], clusters: string[][], accelerations, accelerationPool, saveResults): MempoolBlockWithTransactions[] {
for (const [txid, rate] of rates) {
if (txid in mempool) {
mempool[txid].cpfpDirty = (rate !== mempool[txid].effectiveFeePerVsize);
mempool[txid].effectiveFeePerVsize = rate;
mempool[txid].cpfpChecked = false;
}
@ -463,11 +495,16 @@ class MempoolBlocks {
}
}
});
if (mempoolTx.ancestors?.length !== ancestors.length || mempoolTx.descendants?.length !== descendants.length) {
mempoolTx.cpfpDirty = true;
}
Object.assign(mempoolTx, {ancestors, descendants, bestDescendant: null, cpfpChecked: true});
}
}
}
const isAccelerated : { [txid: string]: boolean } = {};
const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
// update this thread's mempool with the results
let mempoolTx: MempoolTransactionExtended;
@ -496,6 +533,26 @@ class MempoolBlocks {
mempoolTx.cpfpChecked = true;
}
const acceleration = accelerations[txid];
if (isAccelerated[txid] || (acceleration && (!accelerationPool || acceleration.pools.includes(accelerationPool)))) {
if (!mempoolTx.acceleration) {
mempoolTx.cpfpDirty = true;
}
mempoolTx.acceleration = true;
for (const ancestor of mempoolTx.ancestors || []) {
if (!mempool[ancestor.txid].acceleration) {
mempool[ancestor.txid].cpfpDirty = true;
}
mempool[ancestor.txid].acceleration = true;
isAccelerated[ancestor.txid] = true;
}
} else {
if (mempoolTx.acceleration) {
mempoolTx.cpfpDirty = true;
}
delete mempoolTx.acceleration;
}
// online calculation of stack-of-blocks fee stats
if (hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) {
feeStatsCalculator.processNext(mempoolTx);
@ -525,6 +582,7 @@ class MempoolBlocks {
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, mempoolBlocks);
this.mempoolBlocks = mempoolBlocks;
this.mempoolBlockDeltas = deltas;
}
return mempoolBlocks;
@ -532,7 +590,7 @@ class MempoolBlocks {
private dataToMempoolBlocks(transactionIds: string[], transactions: MempoolTransactionExtended[], totalSize: number, totalWeight: number, totalFees: number, feeStats?: EffectiveFeeStats ): MempoolBlockWithTransactions {
if (!feeStats) {
feeStats = Common.calcEffectiveFeeStatistics(transactions);
feeStats = Common.calcEffectiveFeeStatistics(transactions.filter(tx => !tx.acceleration));
}
return {
blockSize: totalSize,
@ -542,7 +600,7 @@ class MempoolBlocks {
medianFee: feeStats.medianFee, // Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: feeStats.feeRange, //Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactionIds,
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
transactions: transactions.map((tx) => Common.classifyTransaction(tx)),
};
}

View File

@ -9,6 +9,8 @@ import loadingIndicators from './loading-indicators';
import bitcoinClient from './bitcoin/bitcoin-client';
import bitcoinSecondClient from './bitcoin/bitcoin-second-client';
import rbfCache from './rbf-cache';
import { Acceleration } from './services/acceleration';
import redisCache from './redis-cache';
class Mempool {
private inSync: boolean = false;
@ -16,11 +18,13 @@ class Mempool {
private mempoolCache: { [txId: string]: MempoolTransactionExtended } = {};
private spendMap = new Map<string, MempoolTransactionExtended>();
private mempoolInfo: IBitcoinApi.MempoolInfo = { loaded: false, size: 0, bytes: 0, usage: 0, total_fee: 0,
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
maxmempool: 300000000, mempoolminfee: Common.isLiquid() ? 0.00000100 : 0.00001000, minrelaytxfee: Common.isLiquid() ? 0.00000100 : 0.00001000 };
private mempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, newTransactions: MempoolTransactionExtended[],
deletedTransactions: MempoolTransactionExtended[]) => void) | undefined;
deletedTransactions: MempoolTransactionExtended[], accelerationDelta: string[]) => void) | undefined;
private $asyncMempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, mempoolSize: number, newTransactions: MempoolTransactionExtended[],
deletedTransactions: MempoolTransactionExtended[]) => Promise<void>) | undefined;
deletedTransactions: MempoolTransactionExtended[], accelerationDelta: string[]) => Promise<void>) | undefined;
private accelerations: { [txId: string]: Acceleration } = {};
private txPerSecondArray: number[] = [];
private txPerSecond: number = 0;
@ -65,12 +69,12 @@ class Mempool {
}
public setMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; },
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => void): void {
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[], accelerationDelta: string[]) => void): void {
this.mempoolChangedCallback = fn;
}
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; }, mempoolSize: number,
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => Promise<void>): void {
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[], accelerationDelta: string[]) => Promise<void>): void {
this.$asyncMempoolChangedCallback = fn;
}
@ -85,24 +89,76 @@ class Mempool {
public async $setMempool(mempoolData: { [txId: string]: MempoolTransactionExtended }) {
this.mempoolCache = mempoolData;
let count = 0;
const redisTimer = Date.now();
if (config.MEMPOOL.CACHE_ENABLED && config.REDIS.ENABLED) {
logger.debug(`Migrating ${Object.keys(this.mempoolCache).length} transactions from disk cache to Redis cache`);
}
for (const txid of Object.keys(this.mempoolCache)) {
if (this.mempoolCache[txid].sigops == null || this.mempoolCache[txid].effectiveFeePerVsize == null) {
if (!this.mempoolCache[txid].adjustedVsize || this.mempoolCache[txid].sigops == null || this.mempoolCache[txid].effectiveFeePerVsize == null) {
this.mempoolCache[txid] = transactionUtils.extendMempoolTransaction(this.mempoolCache[txid]);
}
if (this.mempoolCache[txid].order == null) {
this.mempoolCache[txid].order = transactionUtils.txidToOrdering(txid);
}
for (const vin of this.mempoolCache[txid].vin) {
transactionUtils.addInnerScriptsToVin(vin);
}
count++;
if (config.MEMPOOL.CACHE_ENABLED && config.REDIS.ENABLED) {
await redisCache.$addTransaction(this.mempoolCache[txid]);
}
}
if (config.MEMPOOL.CACHE_ENABLED && config.REDIS.ENABLED) {
await redisCache.$flushTransactions();
logger.debug(`Finished migrating cache transactions in ${((Date.now() - redisTimer) / 1000).toFixed(2)} seconds`);
}
if (this.mempoolChangedCallback) {
this.mempoolChangedCallback(this.mempoolCache, [], []);
this.mempoolChangedCallback(this.mempoolCache, [], [], []);
}
if (this.$asyncMempoolChangedCallback) {
await this.$asyncMempoolChangedCallback(this.mempoolCache, count, [], []);
await this.$asyncMempoolChangedCallback(this.mempoolCache, count, [], [], []);
}
this.addToSpendMap(Object.values(this.mempoolCache));
}
public async $reloadMempool(expectedCount: number): Promise<MempoolTransactionExtended[]> {
let count = 0;
let done = false;
let last_txid;
const newTransactions: MempoolTransactionExtended[] = [];
loadingIndicators.setProgress('mempool', count / expectedCount * 100);
while (!done) {
try {
const result = await bitcoinApi.$getAllMempoolTransactions(last_txid, config.ESPLORA.BATCH_QUERY_BASE_SIZE);
if (result) {
for (const tx of result) {
const extendedTransaction = transactionUtils.extendMempoolTransaction(tx);
if (!this.mempoolCache[extendedTransaction.txid]) {
newTransactions.push(extendedTransaction);
this.mempoolCache[extendedTransaction.txid] = extendedTransaction;
}
count++;
}
logger.info(`Fetched ${count} of ${expectedCount} mempool transactions from esplora`);
if (result.length > 0) {
last_txid = result[result.length - 1].txid;
} else {
done = true;
}
if (Math.floor((count / expectedCount) * 100) < 100) {
loadingIndicators.setProgress('mempool', count / expectedCount * 100);
}
} else {
done = true;
}
} catch(err) {
logger.err('failed to fetch bulk mempool transactions from esplora');
}
}
logger.info(`Done inserting loaded mempool transactions into local cache`);
return newTransactions;
}
public async $updateMemPoolInfo() {
this.mempoolInfo = await this.$getMempoolInfo();
}
@ -132,7 +188,7 @@ class Mempool {
return txTimes;
}
public async $updateMempool(transactions: string[]): Promise<void> {
public async $updateMempool(transactions: string[], accelerations: Acceleration[] | null, pollRate: number): Promise<void> {
logger.debug(`Updating mempool...`);
// warn if this run stalls the main loop for more than 2 minutes
@ -143,7 +199,7 @@ class Mempool {
const currentMempoolSize = Object.keys(this.mempoolCache).length;
this.updateTimerProgress(timer, 'got raw mempool');
const diff = transactions.length - currentMempoolSize;
const newTransactions: MempoolTransactionExtended[] = [];
let newTransactions: MempoolTransactionExtended[] = [];
this.mempoolCacheDelta = Math.abs(diff);
@ -162,12 +218,35 @@ class Mempool {
};
let intervalTimer = Date.now();
for (const txid of transactions) {
if (!this.mempoolCache[txid]) {
try {
const transaction = await transactionUtils.$getMempoolTransactionExtended(txid, false, false, false);
this.updateTimerProgress(timer, 'fetched new transaction');
this.mempoolCache[txid] = transaction;
let loaded = false;
if (config.MEMPOOL.BACKEND === 'esplora' && currentMempoolSize < transactions.length * 0.5 && transactions.length > 20_000) {
this.inSync = false;
logger.info(`Missing ${transactions.length - currentMempoolSize} mempool transactions, attempting to reload in bulk from esplora`);
try {
newTransactions = await this.$reloadMempool(transactions.length);
if (config.REDIS.ENABLED) {
for (const tx of newTransactions) {
await redisCache.$addTransaction(tx);
}
}
loaded = true;
} catch (e) {
logger.err('failed to load mempool in bulk from esplora, falling back to fetching individual transactions');
}
}
if (!loaded) {
const remainingTxids = transactions.filter(txid => !this.mempoolCache[txid]);
const sliceLength = config.ESPLORA.BATCH_QUERY_BASE_SIZE;
for (let i = 0; i < Math.ceil(remainingTxids.length / sliceLength); i++) {
const slice = remainingTxids.slice(i * sliceLength, (i + 1) * sliceLength);
const txs = await transactionUtils.$getMempoolTransactionsExtended(slice, false, false, false);
logger.debug(`fetched ${txs.length} transactions`);
this.updateTimerProgress(timer, 'fetched new transactions');
for (const transaction of txs) {
this.mempoolCache[transaction.txid] = transaction;
if (this.inSync) {
this.txPerSecondArray.push(new Date().getTime());
this.vBytesPerSecondArray.push({
@ -177,26 +256,34 @@ class Mempool {
}
hasChange = true;
newTransactions.push(transaction);
} catch (e: any) {
if (config.MEMPOOL.BACKEND === 'esplora' && e.response?.status === 404) {
this.missingTxCount++;
}
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
}
}
if (Date.now() - intervalTimer > 5_000) {
if (this.inSync) {
// Break and restart mempool loop if we spend too much time processing
// new transactions that may lead to falling behind on block height
logger.debug('Breaking mempool loop because the 5s time limit exceeded.');
break;
} else {
const progress = (currentMempoolSize + newTransactions.length) / transactions.length * 100;
logger.debug(`Mempool is synchronizing. Processed ${newTransactions.length}/${diff} txs (${Math.round(progress)}%)`);
loadingIndicators.setProgress('mempool', progress);
intervalTimer = Date.now()
if (config.REDIS.ENABLED) {
await redisCache.$addTransaction(transaction);
}
}
if (txs.length < slice.length) {
const missing = slice.length - txs.length;
if (config.MEMPOOL.BACKEND === 'esplora') {
this.missingTxCount += missing;
}
logger.debug(`Error finding ${missing} transactions in the mempool: `);
}
if (Date.now() - intervalTimer > Math.max(pollRate * 2, 5_000)) {
if (this.inSync) {
// Break and restart mempool loop if we spend too much time processing
// new transactions that may lead to falling behind on block height
logger.debug('Breaking mempool loop because the 5s time limit exceeded.');
break;
} else {
const progress = (currentMempoolSize + newTransactions.length) / transactions.length * 100;
logger.debug(`Mempool is synchronizing. Processed ${newTransactions.length}/${diff} txs (${Math.round(progress)}%)`);
if (Math.floor(progress) < 100) {
loadingIndicators.setProgress('mempool', progress);
}
intervalTimer = Date.now();
}
}
}
}
@ -219,7 +306,7 @@ class Mempool {
logger.warn(`Mempool clear protection triggered because transactions.length: ${transactions.length} and currentMempoolSize: ${currentMempoolSize}.`);
setTimeout(() => {
this.mempoolProtection = 2;
logger.warn('Mempool clear protection resumed.');
logger.warn('Mempool clear protection ended, normal operation resumed.');
}, 1000 * 60 * config.MEMPOOL.CLEAR_PROTECTION_MINUTES);
}
@ -246,21 +333,33 @@ class Mempool {
const newTransactionsStripped = newTransactions.map((tx) => Common.stripTransaction(tx));
this.latestTransactions = newTransactionsStripped.concat(this.latestTransactions).slice(0, 6);
const accelerationDelta = accelerations != null ? await this.$updateAccelerations(accelerations) : [];
if (accelerationDelta.length) {
hasChange = true;
}
this.mempoolCacheDelta = Math.abs(transactions.length - newMempoolSize);
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions, accelerationDelta);
}
if (this.$asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.updateTimerProgress(timer, 'running async mempool callback');
await this.$asyncMempoolChangedCallback(this.mempoolCache, newMempoolSize, newTransactions, deletedTransactions, accelerationDelta);
this.updateTimerProgress(timer, 'completed async mempool callback');
}
if (!this.inSync && transactions.length === newMempoolSize) {
this.inSync = true;
logger.notice('The mempool is now in sync!');
loadingIndicators.setProgress('mempool', 100);
}
this.mempoolCacheDelta = Math.abs(transactions.length - newMempoolSize);
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
if (this.$asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.updateTimerProgress(timer, 'running async mempool callback');
await this.$asyncMempoolChangedCallback(this.mempoolCache, newMempoolSize, newTransactions, deletedTransactions);
this.updateTimerProgress(timer, 'completed async mempool callback');
// Update Redis cache
if (config.REDIS.ENABLED) {
await redisCache.$flushTransactions();
await redisCache.$removeTransactions(deletedTransactions.map(tx => tx.txid));
await rbfCache.updateCache();
}
const end = new Date().getTime();
@ -270,6 +369,68 @@ class Mempool {
this.clearTimer(timer);
}
public getAccelerations(): { [txid: string]: Acceleration } {
return this.accelerations;
}
public $updateAccelerations(newAccelerations: Acceleration[]): string[] {
if (!config.MEMPOOL_SERVICES.ACCELERATIONS) {
return [];
}
try {
const changed: string[] = [];
const newAccelerationMap: { [txid: string]: Acceleration } = {};
for (const acceleration of newAccelerations) {
newAccelerationMap[acceleration.txid] = acceleration;
if (this.accelerations[acceleration.txid] == null) {
// new acceleration
changed.push(acceleration.txid);
} else {
if (this.accelerations[acceleration.txid].feeDelta !== acceleration.feeDelta) {
// feeDelta changed
changed.push(acceleration.txid);
} else if (this.accelerations[acceleration.txid].pools?.length) {
let poolsChanged = false;
const pools = new Set();
this.accelerations[acceleration.txid].pools.forEach(pool => {
pools.add(pool);
});
acceleration.pools.forEach(pool => {
if (!pools.has(pool)) {
poolsChanged = true;
} else {
pools.delete(pool);
}
});
if (pools.size > 0) {
poolsChanged = true;
}
if (poolsChanged) {
// pools changed
changed.push(acceleration.txid);
}
}
}
}
for (const oldTxid of Object.keys(this.accelerations)) {
if (!newAccelerationMap[oldTxid]) {
// removed
changed.push(oldTxid);
}
}
this.accelerations = newAccelerationMap;
return changed;
} catch (e: any) {
logger.debug(`Failed to update accelerations: ` + (e instanceof Error ? e.message : e));
return [];
}
}
private startTimer() {
const state: any = {
start: Date.now(),

View File

@ -12,6 +12,7 @@ import PricesRepository from '../../repositories/PricesRepository';
class MiningRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pools', this.$listPools)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pools/:interval', this.$getPools)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/hashrate', this.$getPoolHistoricalHashrate)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks', this.$getPoolBlocks)
@ -41,6 +42,10 @@ class MiningRoutes {
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
if (['testnet', 'signet', 'liquidtestnet'].includes(config.MEMPOOL.NETWORK)) {
res.status(400).send('Prices are not available on testnets.');
return;
}
if (req.query.timestamp) {
res.status(200).send(await PricesRepository.$getNearestHistoricalPrice(
parseInt(<string>req.query.timestamp ?? 0, 10)
@ -88,6 +93,29 @@ class MiningRoutes {
}
}
private async $listPools(req: Request, res: Response): Promise<void> {
try {
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
const pools = await mining.$listPools();
if (!pools) {
res.status(500).end();
return;
}
res.header('X-total-count', pools.length.toString());
if (pools.length === 0) {
res.status(204).send();
} else {
res.json(pools);
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getPools(req: Request, res: Response) {
try {
const stats = await mining.$getPoolsStats(req.params.interval);

View File

@ -11,10 +11,17 @@ import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjust
import config from '../../config';
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
import PricesRepository from '../../repositories/PricesRepository';
import { bitcoinCoreApi } from '../bitcoin/bitcoin-api-factory';
import bitcoinApi from '../bitcoin/bitcoin-api-factory';
import { IEsploraApi } from '../bitcoin/esplora-api.interface';
import database from '../../database';
interface DifficultyBlock {
timestamp: number,
height: number,
bits: number,
difficulty: number,
}
class Mining {
private blocksPriceIndexingRunning = false;
public lastHashrateIndexingDate: number | null = null;
@ -26,7 +33,7 @@ class Mining {
/**
* Get historical blocks health
*/
public async $getBlocksHealthHistory(interval: string | null = null): Promise<any> {
public async $getBlocksHealthHistory(interval: string | null = null): Promise<any> {
return await BlocksAuditsRepository.$getBlocksHealthHistory(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
@ -56,7 +63,7 @@ class Mining {
/**
* Get historical block fee rates percentiles
*/
public async $getHistoricalBlockFeeRates(interval: string | null = null): Promise<any> {
public async $getHistoricalBlockFeeRates(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockFeeRates(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
@ -66,7 +73,7 @@ class Mining {
/**
* Get historical block sizes
*/
public async $getHistoricalBlockSizes(interval: string | null = null): Promise<any> {
public async $getHistoricalBlockSizes(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockSizes(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
@ -76,7 +83,7 @@ class Mining {
/**
* Get historical block weights
*/
public async $getHistoricalBlockWeights(interval: string | null = null): Promise<any> {
public async $getHistoricalBlockWeights(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockWeights(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
@ -107,6 +114,7 @@ class Mining {
slug: poolInfo.slug,
avgMatchRate: poolInfo.avgMatchRate !== null ? Math.round(100 * poolInfo.avgMatchRate) / 100 : null,
avgFeeDelta: poolInfo.avgFeeDelta,
poolUniqueId: poolInfo.poolUniqueId
};
poolsStats.push(poolStat);
});
@ -201,7 +209,7 @@ class Mining {
try {
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
const genesisBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(await bitcoinClient.getBlockHash(0));
const genesisBlock: IEsploraApi.Block = await bitcoinApi.$getBlock(await bitcoinApi.$getBlockHash(0));
const genesisTimestamp = genesisBlock.timestamp * 1000;
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
@ -312,7 +320,7 @@ class Mining {
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
try {
const genesisBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(await bitcoinClient.getBlockHash(0));
const genesisBlock: IEsploraApi.Block = await bitcoinApi.$getBlock(await bitcoinApi.$getBlockHash(0));
const genesisTimestamp = genesisBlock.timestamp * 1000;
const indexedTimestamp = (await HashratesRepository.$getRawNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
const lastMidnight = this.getDateMidnight(new Date());
@ -420,9 +428,11 @@ class Mining {
indexedHeights[height] = true;
}
// gets {time, height, difficulty, bits} of blocks in ascending order of height
const blocks: any = await BlocksRepository.$getBlocksDifficulty();
const genesisBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(await bitcoinClient.getBlockHash(0));
const genesisBlock: IEsploraApi.Block = await bitcoinApi.$getBlock(await bitcoinApi.$getBlockHash(0));
let currentDifficulty = genesisBlock.difficulty;
let currentBits = genesisBlock.bits;
let totalIndexed = 0;
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && indexedHeights[0] !== true) {
@ -434,38 +444,45 @@ class Mining {
});
}
const oldestConsecutiveBlock = await BlocksRepository.$getOldestConsecutiveBlock();
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== -1) {
currentDifficulty = oldestConsecutiveBlock.difficulty;
if (!blocks?.length) {
// no blocks in database yet
return;
}
const oldestConsecutiveBlock = this.getOldestConsecutiveBlock(blocks);
currentBits = oldestConsecutiveBlock.bits;
currentDifficulty = oldestConsecutiveBlock.difficulty;
let totalBlockChecked = 0;
let timer = new Date().getTime() / 1000;
for (const block of blocks) {
if (block.difficulty !== currentDifficulty) {
if (indexedHeights[block.height] === true) { // Already indexed
if (block.height >= oldestConsecutiveBlock.height) {
currentDifficulty = block.difficulty;
}
continue;
// skip until the first block after the oldest consecutive block
if (block.height <= oldestConsecutiveBlock.height) {
continue;
}
// difficulty has changed between two consecutive blocks!
if (block.bits !== currentBits) {
// skip if already indexed
if (indexedHeights[block.height] !== true) {
let adjustment = block.difficulty / currentDifficulty;
adjustment = Math.round(adjustment * 1000000) / 1000000; // Remove float point noise
await DifficultyAdjustmentsRepository.$saveAdjustments({
time: block.time,
height: block.height,
difficulty: block.difficulty,
adjustment: adjustment,
});
totalIndexed++;
}
let adjustment = block.difficulty / currentDifficulty;
adjustment = Math.round(adjustment * 1000000) / 1000000; // Remove float point noise
await DifficultyAdjustmentsRepository.$saveAdjustments({
time: block.time,
height: block.height,
difficulty: block.difficulty,
adjustment: adjustment,
});
totalIndexed++;
if (block.height >= oldestConsecutiveBlock.height) {
currentDifficulty = block.difficulty;
}
}
// update the current difficulty
currentDifficulty = block.difficulty;
currentBits = block.bits;
}
totalBlockChecked++;
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
@ -590,6 +607,20 @@ class Mining {
}
}
/**
* List existing mining pools
*/
public async $listPools(): Promise<{name: string, slug: string, unique_id: number}[] | null> {
const [rows] = await database.query(`
SELECT
name,
slug,
unique_id
FROM pools`
);
return rows as {name: string, slug: string, unique_id: number}[];
}
private getDateMidnight(date: Date): Date {
date.setUTCHours(0);
date.setUTCMinutes(0);
@ -614,6 +645,17 @@ class Mining {
default: return 86400 * scale;
}
}
// Finds the oldest block in a consecutive chain back from the tip
// assumes `blocks` is sorted in ascending height order
private getOldestConsecutiveBlock(blocks: DifficultyBlock[]): DifficultyBlock {
for (let i = blocks.length - 1; i > 0; i--) {
if ((blocks[i].height - blocks[i - 1].height) > 1) {
return blocks[i];
}
}
return blocks[0];
}
}
export default new Mining();

View File

@ -0,0 +1,19 @@
import { Application, Request, Response } from 'express';
import config from '../../config';
import pricesUpdater from '../../tasks/price-updater';
class PricesRoutes {
public initRoutes(app: Application): void {
app.get(config.MEMPOOL.API_URL_PREFIX + 'prices', this.$getCurrentPrices.bind(this));
}
private $getCurrentPrices(req: Request, res: Response): void {
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 360_0000 / config.MEMPOOL.PRICE_UPDATES_PER_HOUR).toUTCString());
res.json(pricesUpdater.getLatestPrices());
}
}
export default new PricesRoutes();

View File

@ -1,15 +1,18 @@
import config from "../config";
import logger from "../logger";
import { MempoolTransactionExtended, TransactionStripped } from "../mempool.interfaces";
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { IEsploraApi } from "./bitcoin/esplora-api.interface";
import { Common } from "./common";
import redisCache from "./redis-cache";
interface RbfTransaction extends TransactionStripped {
export interface RbfTransaction extends TransactionStripped {
rbf?: boolean;
mined?: boolean;
fullRbf?: boolean;
}
interface RbfTree {
export interface RbfTree {
tx: RbfTransaction;
time: number;
interval?: number;
@ -28,6 +31,19 @@ export interface ReplacementInfo {
newVsize: number;
}
enum CacheOp {
Remove = 0,
Add = 1,
Change = 2,
}
interface CacheEvent {
op: CacheOp;
type: 'tx' | 'tree' | 'exp';
txid: string,
value?: any,
}
class RbfCache {
private replacedBy: Map<string, string> = new Map();
private replaces: Map<string, string[]> = new Map();
@ -36,20 +52,57 @@ class RbfCache {
private treeMap: Map<string, string> = new Map(); // map of txids to sequence ids
private txs: Map<string, MempoolTransactionExtended> = new Map();
private expiring: Map<string, number> = new Map();
private cacheQueue: CacheEvent[] = [];
private evictionCount = 0;
private staleCount = 0;
constructor() {
setInterval(this.cleanup.bind(this), 1000 * 60 * 10);
}
private addTx(txid: string, tx: MempoolTransactionExtended): void {
this.txs.set(txid, tx);
this.cacheQueue.push({ op: CacheOp.Add, type: 'tx', txid });
}
private addTree(txid: string, tree: RbfTree): void {
this.rbfTrees.set(txid, tree);
this.dirtyTrees.add(txid);
this.cacheQueue.push({ op: CacheOp.Add, type: 'tree', txid });
}
private addExpiration(txid: string, expiry: number): void {
this.expiring.set(txid, expiry);
this.cacheQueue.push({ op: CacheOp.Add, type: 'exp', txid, value: expiry });
}
private removeTx(txid: string): void {
this.txs.delete(txid);
this.cacheQueue.push({ op: CacheOp.Remove, type: 'tx', txid });
}
private removeTree(txid: string): void {
this.rbfTrees.delete(txid);
this.cacheQueue.push({ op: CacheOp.Remove, type: 'tree', txid });
}
private removeExpiration(txid: string): void {
this.expiring.delete(txid);
this.cacheQueue.push({ op: CacheOp.Remove, type: 'exp', txid });
}
public add(replaced: MempoolTransactionExtended[], newTxExtended: MempoolTransactionExtended): void {
if (!newTxExtended || !replaced?.length || this.txs.has(newTxExtended.txid)) {
return;
}
newTxExtended.replacement = true;
const newTx = Common.stripTransaction(newTxExtended) as RbfTransaction;
const newTime = newTxExtended.firstSeen || (Date.now() / 1000);
newTx.rbf = newTxExtended.vin.some((v) => v.sequence < 0xfffffffe);
this.txs.set(newTx.txid, newTxExtended);
this.addTx(newTx.txid, newTxExtended);
// maintain rbf trees
let txFullRbf = false;
@ -66,7 +119,7 @@ class RbfCache {
const treeId = this.treeMap.get(replacedTx.txid);
if (treeId) {
const tree = this.rbfTrees.get(treeId);
this.rbfTrees.delete(treeId);
this.removeTree(treeId);
if (tree) {
tree.interval = newTime - tree?.time;
replacedTrees.push(tree);
@ -83,7 +136,7 @@ class RbfCache {
replaces: [],
});
treeFullRbf = treeFullRbf || !replacedTx.rbf;
this.txs.set(replacedTx.txid, replacedTxExtended);
this.addTx(replacedTx.txid, replacedTxExtended);
}
}
newTx.fullRbf = txFullRbf;
@ -94,10 +147,27 @@ class RbfCache {
fullRbf: treeFullRbf,
replaces: replacedTrees
};
this.rbfTrees.set(treeId, newTree);
this.addTree(treeId, newTree);
this.updateTreeMap(treeId, newTree);
this.replaces.set(newTx.txid, replacedTrees.map(tree => tree.tx.txid));
this.dirtyTrees.add(treeId);
}
public has(txId: string): boolean {
return this.txs.has(txId);
}
public anyInSameTree(txId: string, predicate: (tx: RbfTransaction) => boolean): boolean {
const tree = this.getRbfTree(txId);
if (!tree) {
return false;
}
const txs = this.getTransactionsInTree(tree);
for (const tx of txs) {
if (predicate(tx)) {
return true;
}
}
return false;
}
public getReplacedBy(txId: string): string | undefined {
@ -173,6 +243,7 @@ class RbfCache {
this.setTreeMined(tree, txid);
tree.mined = true;
this.dirtyTrees.add(treeId);
this.cacheQueue.push({ op: CacheOp.Change, type: 'tree', txid: treeId });
}
}
this.evict(txid);
@ -180,8 +251,10 @@ class RbfCache {
// flag a transaction as removed from the mempool
public evict(txid: string, fast: boolean = false): void {
this.evictionCount++;
if (this.txs.has(txid) && (fast || !this.expiring.has(txid))) {
this.expiring.set(txid, fast ? Date.now() + (1000 * 60 * 10) : Date.now() + (1000 * 86400)); // 24 hours
const expiryTime = fast ? Date.now() + (1000 * 60 * 10) : Date.now() + (1000 * 86400); // 24 hours
this.addExpiration(txid, expiryTime);
}
}
@ -202,28 +275,33 @@ class RbfCache {
const now = Date.now();
for (const txid of this.expiring.keys()) {
if ((this.expiring.get(txid) || 0) < now) {
this.expiring.delete(txid);
this.removeExpiration(txid);
this.remove(txid);
}
}
logger.debug(`rbf cache contains ${this.txs.size} txs, ${this.expiring.size} due to expire`);
logger.debug(`rbf cache contains ${this.txs.size} txs, ${this.rbfTrees.size} trees, ${this.expiring.size} due to expire (${this.evictionCount} newly expired)`);
this.evictionCount = 0;
}
// remove a transaction & all previous versions from the cache
private remove(txid): void {
// don't remove a transaction if a newer version remains in the mempool
if (!this.replacedBy.has(txid)) {
const root = this.treeMap.get(txid);
const replaces = this.replaces.get(txid);
this.replaces.delete(txid);
this.treeMap.delete(txid);
this.txs.delete(txid);
this.expiring.delete(txid);
this.removeTx(txid);
this.removeExpiration(txid);
if (root === txid) {
this.removeTree(txid);
}
for (const tx of (replaces || [])) {
// recursively remove prior versions from the cache
this.replacedBy.delete(tx);
// if this is the id of a tree, remove that too
if (this.treeMap.get(tx) === tx) {
this.rbfTrees.delete(tx);
this.removeTree(tx);
}
this.remove(tx);
}
@ -255,6 +333,33 @@ class RbfCache {
}
}
public async updateCache(): Promise<void> {
if (!config.REDIS.ENABLED) {
return;
}
// Update the Redis cache by replaying queued events
for (const e of this.cacheQueue) {
if (e.op === CacheOp.Add || e.op === CacheOp.Change) {
let value = e.value;
switch(e.type) {
case 'tx': {
value = this.txs.get(e.txid);
} break;
case 'tree': {
const tree = this.rbfTrees.get(e.txid);
value = tree ? this.exportTree(tree) : null;
} break;
}
if (value != null) {
await redisCache.$setRbfEntry(e.type, e.txid, value);
}
} else if (e.op === CacheOp.Remove) {
await redisCache.$removeRbfEntry(e.type, e.txid);
}
}
this.cacheQueue = [];
}
public dump(): any {
const trees = Array.from(this.rbfTrees.values()).map((tree: RbfTree) => { return this.exportTree(tree); });
@ -265,19 +370,28 @@ class RbfCache {
};
}
public async load({ txs, trees, expiring }): Promise<void> {
txs.forEach(txEntry => {
this.txs.set(txEntry[0], txEntry[1]);
});
for (const deflatedTree of trees) {
await this.importTree(deflatedTree.root, deflatedTree.root, deflatedTree, this.txs);
}
expiring.forEach(expiringEntry => {
if (this.txs.has(expiringEntry[0])) {
this.expiring.set(expiringEntry[0], new Date(expiringEntry[1]).getTime());
public async load({ txs, trees, expiring, mempool }): Promise<void> {
try {
txs.forEach(txEntry => {
this.txs.set(txEntry.value.txid, txEntry.value);
});
this.staleCount = 0;
for (const deflatedTree of trees) {
await this.importTree(mempool, deflatedTree.root, deflatedTree.root, deflatedTree, this.txs);
}
});
this.cleanup();
expiring.forEach(expiringEntry => {
if (this.txs.has(expiringEntry.key)) {
this.expiring.set(expiringEntry.key, new Date(expiringEntry.value).getTime());
}
});
this.staleCount = 0;
await this.checkTrees();
logger.debug(`loaded ${txs.length} txs, ${trees.length} trees into rbf cache, ${expiring.length} due to expire, ${this.staleCount} were stale`);
this.cleanup();
} catch (e) {
logger.err('failed to restore RBF cache: ' + (e instanceof Error ? e.message : e));
}
}
exportTree(tree: RbfTree, deflated: any = null) {
@ -301,40 +415,25 @@ class RbfCache {
return deflated;
}
async importTree(root, txid, deflated, txs: Map<string, MempoolTransactionExtended>, mined: boolean = false): Promise<RbfTree | void> {
async importTree(mempool, root, txid, deflated, txs: Map<string, MempoolTransactionExtended>, mined: boolean = false): Promise<RbfTree | void> {
const treeInfo = deflated[txid];
const replaces: RbfTree[] = [];
// check if any transactions in this tree have already been confirmed
mined = mined || treeInfo.mined;
let exists = mined;
if (!mined) {
try {
const apiTx = await bitcoinApi.$getRawTransaction(txid);
if (apiTx) {
exists = true;
}
if (apiTx?.status?.confirmed) {
mined = true;
treeInfo.txMined = true;
this.evict(txid, true);
}
} catch (e) {
// most transactions do not exist
}
}
// if the root tx is not in the mempool or the blockchain
// evict this tree as soon as possible
if (root === txid && !exists) {
this.evict(txid, true);
// if the root tx is unknown, remove this tree and return early
if (root === txid && !txs.has(txid)) {
this.staleCount++;
this.removeTree(deflated.key);
return;
}
// recursively reconstruct child trees
for (const childId of treeInfo.replaces) {
const replaced = await this.importTree(root, childId, deflated, txs, mined);
const replaced = await this.importTree(mempool, root, childId, deflated, txs, mined);
if (replaced) {
this.replacedBy.set(replaced.tx.txid, txid);
if (mempool[replaced.tx.txid]) {
mempool[replaced.tx.txid].replacement = true;
}
replaces.push(replaced);
if (replaced.mined) {
mined = true;
@ -360,12 +459,65 @@ class RbfCache {
};
this.treeMap.set(txid, root);
if (root === txid) {
this.rbfTrees.set(root, tree);
this.dirtyTrees.add(root);
this.addTree(root, tree);
}
return tree;
}
private async checkTrees(): Promise<void> {
const found: { [txid: string]: boolean } = {};
const txids = Array.from(this.txs.values()).map(tx => tx.txid).filter(txid => {
return !this.expiring.has(txid) && !this.getRbfTree(txid)?.mined;
});
const processTxs = (txs: IEsploraApi.Transaction[]): void => {
for (const tx of txs) {
found[tx.txid] = true;
if (tx.status?.confirmed) {
const tree = this.getRbfTree(tx.txid);
if (tree) {
this.setTreeMined(tree, tx.txid);
tree.mined = true;
this.evict(tx.txid, false);
}
}
}
};
if (config.MEMPOOL.BACKEND === 'esplora') {
let processedCount = 0;
const sliceLength = Math.ceil(config.ESPLORA.BATCH_QUERY_BASE_SIZE / 40);
for (let i = 0; i < Math.ceil(txids.length / sliceLength); i++) {
const slice = txids.slice(i * sliceLength, (i + 1) * sliceLength);
processedCount += slice.length;
try {
const txs = await bitcoinApi.$getRawTransactions(slice);
processTxs(txs);
logger.debug(`fetched and processed ${processedCount} of ${txids.length} cached rbf transactions (${(processedCount / txids.length * 100).toFixed(2)}%)`);
} catch (err) {
logger.err(`failed to fetch or process ${slice.length} cached rbf transactions`);
}
}
} else {
const txs: IEsploraApi.Transaction[] = [];
for (const txid of txids) {
try {
const tx = await bitcoinApi.$getRawTransaction(txid, true, false);
txs.push(tx);
} catch (err) {
// some 404s are expected, so continue quietly
}
}
processTxs(txs);
}
for (const txid of txids) {
if (!found[txid]) {
this.evict(txid, false);
}
}
}
public getLatestRbfSummary(): ReplacementInfo[] {
const rbfList = this.getRbfTrees(false);
return rbfList.slice(0, 6).map(rbfTree => {

View File

@ -0,0 +1,278 @@
import { createClient } from 'redis';
import memPool from './mempool';
import blocks from './blocks';
import logger from '../logger';
import config from '../config';
import { BlockExtended, BlockSummary, MempoolTransactionExtended } from '../mempool.interfaces';
import rbfCache from './rbf-cache';
import transactionUtils from './transaction-utils';
enum NetworkDB {
mainnet = 0,
testnet,
signet,
liquid,
liquidtestnet,
}
class RedisCache {
private client;
private connected = false;
private schemaVersion = 1;
private cacheQueue: MempoolTransactionExtended[] = [];
private txFlushLimit: number = 10000;
constructor() {
if (config.REDIS.ENABLED) {
const redisConfig = {
socket: {
path: config.REDIS.UNIX_SOCKET_PATH
},
database: NetworkDB[config.MEMPOOL.NETWORK],
};
this.client = createClient(redisConfig);
this.client.on('error', (e) => {
logger.err(`Error in Redis client: ${e instanceof Error ? e.message : e}`);
});
this.$ensureConnected();
}
}
private async $ensureConnected(): Promise<void> {
if (!this.connected && config.REDIS.ENABLED) {
return this.client.connect().then(async () => {
this.connected = true;
logger.info(`Redis client connected`);
const version = await this.client.get('schema_version');
if (version !== this.schemaVersion) {
// schema changed
// perform migrations or flush DB if necessary
logger.info(`Redis schema version changed from ${version} to ${this.schemaVersion}`);
await this.client.set('schema_version', this.schemaVersion);
}
});
}
}
async $updateBlocks(blocks: BlockExtended[]) {
try {
await this.$ensureConnected();
await this.client.set('blocks', JSON.stringify(blocks));
logger.debug(`Saved latest blocks to Redis cache`);
} catch (e) {
logger.warn(`Failed to update blocks in Redis cache: ${e instanceof Error ? e.message : e}`);
}
}
async $updateBlockSummaries(summaries: BlockSummary[]) {
try {
await this.$ensureConnected();
await this.client.set('block-summaries', JSON.stringify(summaries));
logger.debug(`Saved latest block summaries to Redis cache`);
} catch (e) {
logger.warn(`Failed to update block summaries in Redis cache: ${e instanceof Error ? e.message : e}`);
}
}
async $addTransaction(tx: MempoolTransactionExtended) {
this.cacheQueue.push(tx);
if (this.cacheQueue.length >= this.txFlushLimit) {
await this.$flushTransactions();
}
}
async $flushTransactions() {
const success = await this.$addTransactions(this.cacheQueue);
if (success) {
logger.debug(`Saved ${this.cacheQueue.length} transactions to Redis cache`);
this.cacheQueue = [];
} else {
logger.err(`Failed to save ${this.cacheQueue.length} transactions to Redis cache`);
}
}
private async $addTransactions(newTransactions: MempoolTransactionExtended[]): Promise<boolean> {
if (!newTransactions.length) {
return true;
}
try {
await this.$ensureConnected();
const msetData = newTransactions.map(tx => {
const minified: any = { ...tx };
delete minified.hex;
for (const vin of minified.vin) {
delete vin.inner_redeemscript_asm;
delete vin.inner_witnessscript_asm;
delete vin.scriptsig_asm;
}
for (const vout of minified.vout) {
delete vout.scriptpubkey_asm;
}
return [`mempool:tx:${tx.txid}`, JSON.stringify(minified)];
});
await this.client.MSET(msetData);
return true;
} catch (e) {
logger.warn(`Failed to add ${newTransactions.length} transactions to Redis cache: ${e instanceof Error ? e.message : e}`);
return false;
}
}
async $removeTransactions(transactions: string[]) {
try {
await this.$ensureConnected();
const sliceLength = config.REDIS.BATCH_QUERY_BASE_SIZE;
for (let i = 0; i < Math.ceil(transactions.length / sliceLength); i++) {
const slice = transactions.slice(i * sliceLength, (i + 1) * sliceLength);
await this.client.unlink(slice.map(txid => `mempool:tx:${txid}`));
logger.debug(`Deleted ${slice.length} transactions from the Redis cache`);
}
} catch (e) {
logger.warn(`Failed to remove ${transactions.length} transactions from Redis cache: ${e instanceof Error ? e.message : e}`);
}
}
async $setRbfEntry(type: string, txid: string, value: any): Promise<void> {
try {
await this.$ensureConnected();
await this.client.set(`rbf:${type}:${txid}`, JSON.stringify(value));
} catch (e) {
logger.warn(`Failed to set RBF ${type} in Redis cache: ${e instanceof Error ? e.message : e}`);
}
}
async $removeRbfEntry(type: string, txid: string): Promise<void> {
try {
await this.$ensureConnected();
await this.client.unlink(`rbf:${type}:${txid}`);
} catch (e) {
logger.warn(`Failed to remove RBF ${type} from Redis cache: ${e instanceof Error ? e.message : e}`);
}
}
async $getBlocks(): Promise<BlockExtended[]> {
try {
await this.$ensureConnected();
const json = await this.client.get('blocks');
return JSON.parse(json);
} catch (e) {
logger.warn(`Failed to retrieve blocks from Redis cache: ${e instanceof Error ? e.message : e}`);
return [];
}
}
async $getBlockSummaries(): Promise<BlockSummary[]> {
try {
await this.$ensureConnected();
const json = await this.client.get('block-summaries');
return JSON.parse(json);
} catch (e) {
logger.warn(`Failed to retrieve blocks from Redis cache: ${e instanceof Error ? e.message : e}`);
return [];
}
}
async $getMempool(): Promise<{ [txid: string]: MempoolTransactionExtended }> {
const start = Date.now();
const mempool = {};
try {
await this.$ensureConnected();
const mempoolList = await this.scanKeys<MempoolTransactionExtended>('mempool:tx:*');
for (const tx of mempoolList) {
mempool[tx.key] = tx.value;
}
logger.info(`Loaded mempool from Redis cache in ${Date.now() - start} ms`);
return mempool || {};
} catch (e) {
logger.warn(`Failed to retrieve mempool from Redis cache: ${e instanceof Error ? e.message : e}`);
}
return {};
}
async $getRbfEntries(type: string): Promise<any[]> {
try {
await this.$ensureConnected();
const rbfEntries = await this.scanKeys<MempoolTransactionExtended[]>(`rbf:${type}:*`);
return rbfEntries;
} catch (e) {
logger.warn(`Failed to retrieve Rbf ${type}s from Redis cache: ${e instanceof Error ? e.message : e}`);
return [];
}
}
async $loadCache() {
logger.info('Restoring mempool and blocks data from Redis cache');
// Load block data
const loadedBlocks = await this.$getBlocks();
const loadedBlockSummaries = await this.$getBlockSummaries();
// Load mempool
const loadedMempool = await this.$getMempool();
this.inflateLoadedTxs(loadedMempool);
// Load rbf data
const rbfTxs = await this.$getRbfEntries('tx');
const rbfTrees = await this.$getRbfEntries('tree');
const rbfExpirations = await this.$getRbfEntries('exp');
// Set loaded data
blocks.setBlocks(loadedBlocks || []);
blocks.setBlockSummaries(loadedBlockSummaries || []);
await memPool.$setMempool(loadedMempool);
await rbfCache.load({
txs: rbfTxs,
trees: rbfTrees.map(loadedTree => { loadedTree.value.key = loadedTree.key; return loadedTree.value; }),
expiring: rbfExpirations,
mempool: memPool.getMempool(),
});
}
private inflateLoadedTxs(mempool: { [txid: string]: MempoolTransactionExtended }) {
for (const tx of Object.values(mempool)) {
for (const vin of tx.vin) {
if (vin.scriptsig) {
vin.scriptsig_asm = transactionUtils.convertScriptSigAsm(vin.scriptsig);
transactionUtils.addInnerScriptsToVin(vin);
}
}
for (const vout of tx.vout) {
if (vout.scriptpubkey) {
vout.scriptpubkey_asm = transactionUtils.convertScriptSigAsm(vout.scriptpubkey);
}
}
}
}
private async scanKeys<T>(pattern): Promise<{ key: string, value: T }[]> {
logger.info(`loading Redis entries for ${pattern}`);
let keys: string[] = [];
const result: { key: string, value: T }[] = [];
const patternLength = pattern.length - 1;
let count = 0;
const processValues = async (keys): Promise<void> => {
const values = await this.client.MGET(keys);
for (let i = 0; i < values.length; i++) {
if (values[i]) {
result.push({ key: keys[i].slice(patternLength), value: JSON.parse(values[i]) });
count++;
}
}
logger.info(`loaded ${count} entries from Redis cache`);
};
for await (const key of this.client.scanIterator({
MATCH: pattern,
COUNT: 100
})) {
keys.push(key);
if (keys.length >= 10000) {
await processValues(keys);
keys = [];
}
}
if (keys.length) {
await processValues(keys);
}
return result;
}
}
export default new RedisCache();

View File

@ -0,0 +1,36 @@
import config from '../../config';
import logger from '../../logger';
import { BlockExtended, PoolTag } from '../../mempool.interfaces';
import axios from 'axios';
export interface Acceleration {
txid: string,
feeDelta: number,
pools: number[],
}
class AccelerationApi {
public async $fetchAccelerations(): Promise<Acceleration[] | null> {
if (config.MEMPOOL_SERVICES.ACCELERATIONS) {
try {
const response = await axios.get(`${config.MEMPOOL_SERVICES.API}/accelerator/accelerations`, { responseType: 'json', timeout: 10000 });
return response.data as Acceleration[];
} catch (e) {
logger.warn('Failed to fetch current accelerations from the mempool services backend: ' + (e instanceof Error ? e.message : e));
return null;
}
} else {
return [];
}
}
public isAcceleratedBlock(block: BlockExtended, accelerations: Acceleration[]): boolean {
let anyAccelerated = false;
for (let i = 0; i < accelerations.length && !anyAccelerated; i++) {
anyAccelerated = anyAccelerated || accelerations[i].pools?.includes(block.extras.pool.id);
}
return anyAccelerated;
}
}
export default new AccelerationApi();

View File

@ -15,6 +15,7 @@ class StatisticsApi {
mempool_byte_weight,
fee_data,
total_fee,
min_fee,
vsize_1,
vsize_2,
vsize_3,
@ -54,7 +55,7 @@ class StatisticsApi {
vsize_1800,
vsize_2000
)
VALUES (NOW(), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
VALUES (NOW(), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)`;
const [result]: any = await DB.query(query);
return result.insertId;
@ -73,6 +74,7 @@ class StatisticsApi {
mempool_byte_weight,
fee_data,
total_fee,
min_fee,
vsize_1,
vsize_2,
vsize_3,
@ -112,7 +114,7 @@ class StatisticsApi {
vsize_1800,
vsize_2000
)
VALUES (${statistics.added}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
VALUES (${statistics.added}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
const params: (string | number)[] = [
@ -122,6 +124,7 @@ class StatisticsApi {
statistics.mempool_byte_weight,
statistics.fee_data,
statistics.total_fee,
statistics.min_fee,
statistics.vsize_1,
statistics.vsize_2,
statistics.vsize_3,
@ -171,7 +174,9 @@ class StatisticsApi {
private getQueryForDaysAvg(div: number, interval: string) {
return `SELECT
UNIX_TIMESTAMP(added) as added,
CAST(avg(unconfirmed_transactions) as DOUBLE) as unconfirmed_transactions,
CAST(avg(vbytes_per_second) as DOUBLE) as vbytes_per_second,
CAST(avg(min_fee) as DOUBLE) as min_fee,
CAST(avg(vsize_1) as DOUBLE) as vsize_1,
CAST(avg(vsize_2) as DOUBLE) as vsize_2,
CAST(avg(vsize_3) as DOUBLE) as vsize_3,
@ -219,7 +224,9 @@ class StatisticsApi {
private getQueryForDays(div: number, interval: string) {
return `SELECT
UNIX_TIMESTAMP(added) as added,
CAST(avg(unconfirmed_transactions) as DOUBLE) as unconfirmed_transactions,
CAST(avg(vbytes_per_second) as DOUBLE) as vbytes_per_second,
CAST(avg(min_fee) as DOUBLE) as min_fee,
vsize_1,
vsize_2,
vsize_3,
@ -401,9 +408,11 @@ class StatisticsApi {
return statistic.map((s) => {
return {
added: s.added,
count: s.unconfirmed_transactions,
vbytes_per_second: s.vbytes_per_second,
mempool_byte_weight: s.mempool_byte_weight,
total_fee: s.total_fee,
min_fee: s.min_fee,
vsizes: [
s.vsize_1,
s.vsize_2,

View File

@ -89,6 +89,9 @@ class Statistics {
}
});
// get minFee and convert to sats/vb
const minFee = memPool.getMempoolInfo().mempoolminfee * 100000;
try {
const insertId = await statisticsApi.$create({
added: 'NOW()',
@ -98,6 +101,7 @@ class Statistics {
mempool_byte_weight: totalWeight,
total_fee: totalFee,
fee_data: '',
min_fee: minFee,
vsize_1: weightVsizeFees['1'] || 0,
vsize_2: weightVsizeFees['2'] || 0,
vsize_3: weightVsizeFees['3'] || 0,

View File

@ -3,6 +3,9 @@ import { IEsploraApi } from './bitcoin/esplora-api.interface';
import { Common } from './common';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
import * as bitcoinjs from 'bitcoinjs-lib';
import logger from '../logger';
import config from '../config';
import pLimit from '../utils/p-limit';
class TransactionUtils {
constructor() { }
@ -22,6 +25,23 @@ class TransactionUtils {
};
}
// Wrapper for $getTransactionExtended with an automatic retry direct to Core if the first API request fails.
// Propagates any error from the retry request.
public async $getTransactionExtendedRetry(txid: string, addPrevouts = false, lazyPrevouts = false, forceCore = false, addMempoolData = false): Promise<TransactionExtended> {
try {
const result = await this.$getTransactionExtended(txid, addPrevouts, lazyPrevouts, forceCore, addMempoolData);
if (result) {
return result;
} else {
logger.err(`Cannot fetch tx ${txid}. Reason: backend returned null data`);
}
} catch (e) {
logger.err(`Cannot fetch tx ${txid}. Reason: ` + (e instanceof Error ? e.message : e));
}
// retry direct from Core if first request failed
return this.$getTransactionExtended(txid, addPrevouts, lazyPrevouts, true, addMempoolData);
}
/**
* @param txId
* @param addPrevouts
@ -31,10 +51,17 @@ class TransactionUtils {
public async $getTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false, forceCore = false, addMempoolData = false): Promise<TransactionExtended> {
let transaction: IEsploraApi.Transaction;
if (forceCore === true) {
transaction = await bitcoinCoreApi.$getRawTransaction(txId, true);
transaction = await bitcoinCoreApi.$getRawTransaction(txId, false, addPrevouts, lazyPrevouts);
} else {
transaction = await bitcoinApi.$getRawTransaction(txId, false, addPrevouts, lazyPrevouts);
}
if (Common.isLiquid()) {
if (!isFinite(Number(transaction.fee))) {
transaction.fee = Object.values(transaction.fee || {}).reduce((total, output) => total + output, 0);
}
}
if (addMempoolData || !transaction?.status?.confirmed) {
return this.extendMempoolTransaction(transaction);
} else {
@ -46,14 +73,35 @@ class TransactionUtils {
return (await this.$getTransactionExtended(txId, addPrevouts, lazyPrevouts, forceCore, true)) as MempoolTransactionExtended;
}
private extendTransaction(transaction: IEsploraApi.Transaction): TransactionExtended {
public async $getMempoolTransactionsExtended(txids: string[], addPrevouts = false, lazyPrevouts = false, forceCore = false): Promise<MempoolTransactionExtended[]> {
if (forceCore || config.MEMPOOL.BACKEND !== 'esplora') {
const limiter = pLimit(8); // Run 8 requests at a time
const results = await Promise.allSettled(txids.map(
txid => limiter(() => this.$getMempoolTransactionExtended(txid, addPrevouts, lazyPrevouts, forceCore))
));
return results.filter(reply => reply.status === 'fulfilled')
.map(r => (r as PromiseFulfilledResult<MempoolTransactionExtended>).value);
} else {
const transactions = await bitcoinApi.$getMempoolTransactions(txids);
return transactions.map(transaction => {
if (Common.isLiquid()) {
if (!isFinite(Number(transaction.fee))) {
transaction.fee = Object.values(transaction.fee || {}).reduce((total, output) => total + output, 0);
}
}
return this.extendMempoolTransaction(transaction);
});
}
}
public extendTransaction(transaction: IEsploraApi.Transaction): TransactionExtended {
// @ts-ignore
if (transaction.vsize) {
// @ts-ignore
return transaction;
}
const feePerVbytes = Math.max(Common.isLiquid() ? 0.1 : 1,
(transaction.fee || 0) / (transaction.weight / 4));
const feePerVbytes = (transaction.fee || 0) / (transaction.weight / 4);
const transactionExtended: TransactionExtended = Object.assign({
vsize: Math.round(transaction.weight / 4),
feePerVsize: feePerVbytes,
@ -68,13 +116,11 @@ class TransactionUtils {
public extendMempoolTransaction(transaction: IEsploraApi.Transaction): MempoolTransactionExtended {
const vsize = Math.ceil(transaction.weight / 4);
const fractionalVsize = (transaction.weight / 4);
const sigops = this.countSigops(transaction);
let sigops = Common.isLiquid() ? 0 : (transaction.sigops != null ? transaction.sigops : this.countSigops(transaction));
// https://github.com/bitcoin/bitcoin/blob/e9262ea32a6e1d364fb7974844fadc36f931f8c6/src/policy/policy.cpp#L295-L298
const adjustedVsize = Math.max(fractionalVsize, sigops * 5); // adjusted vsize = Max(weight, sigops * bytes_per_sigop) / witness_scale_factor
const feePerVbytes = Math.max(Common.isLiquid() ? 0.1 : 1,
(transaction.fee || 0) / fractionalVsize);
const adjustedFeePerVsize = Math.max(Common.isLiquid() ? 0.1 : 1,
(transaction.fee || 0) / adjustedVsize);
const feePerVbytes = (transaction.fee || 0) / fractionalVsize;
const adjustedFeePerVsize = (transaction.fee || 0) / adjustedVsize;
const transactionExtended: MempoolTransactionExtended = Object.assign(transaction, {
order: this.txidToOrdering(transaction.txid),
vsize: Math.round(transaction.weight / 4),
@ -109,7 +155,7 @@ class TransactionUtils {
sigops += 20 * (script.match(/OP_CHECKMULTISIG/g)?.length || 0);
} else {
// in redeem scripts and witnesses, worth N if preceded by OP_N, 20 otherwise
const matches = script.matchAll(/(?:OP_(\d+))? OP_CHECKMULTISIG/g);
const matches = script.matchAll(/(?:OP_(?:PUSHNUM_)?(\d+))? OP_CHECKMULTISIG/g);
for (const match of matches) {
const n = parseInt(match[1]);
if (Number.isInteger(n)) {
@ -143,6 +189,12 @@ class TransactionUtils {
sigops += this.countScriptSigops(bitcoinjs.script.toASM(Buffer.from(input.witness[input.witness.length - 1], 'hex')), false, true);
}
break;
case input.prevout.scriptpubkey_type === 'p2sh':
if (input.inner_redeemscript_asm) {
sigops += this.countScriptSigops(input.inner_redeemscript_asm);
}
break;
}
}
}
@ -166,6 +218,122 @@ class TransactionUtils {
16
);
}
public addInnerScriptsToVin(vin: IEsploraApi.Vin): void {
if (!vin.prevout) {
return;
}
if (vin.prevout.scriptpubkey_type === 'p2sh') {
const redeemScript = vin.scriptsig_asm.split(' ').reverse()[0];
vin.inner_redeemscript_asm = this.convertScriptSigAsm(redeemScript);
if (vin.witness && vin.witness.length > 2) {
const witnessScript = vin.witness[vin.witness.length - 1];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
if (vin.prevout.scriptpubkey_type === 'v0_p2wsh' && vin.witness) {
const witnessScript = vin.witness[vin.witness.length - 1];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness) {
const witnessScript = this.witnessToP2TRScript(vin.witness);
if (witnessScript !== null) {
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
}
public convertScriptSigAsm(hex: string): string {
const buf = Buffer.from(hex, 'hex');
const b: string[] = [];
let i = 0;
while (i < buf.length) {
const op = buf[i];
if (op >= 0x01 && op <= 0x4e) {
i++;
let push: number;
if (op === 0x4c) {
push = buf.readUInt8(i);
b.push('OP_PUSHDATA1');
i += 1;
} else if (op === 0x4d) {
push = buf.readUInt16LE(i);
b.push('OP_PUSHDATA2');
i += 2;
} else if (op === 0x4e) {
push = buf.readUInt32LE(i);
b.push('OP_PUSHDATA4');
i += 4;
} else {
push = op;
b.push('OP_PUSHBYTES_' + push);
}
const data = buf.slice(i, i + push);
if (data.length !== push) {
break;
}
b.push(data.toString('hex'));
i += data.length;
} else {
if (op === 0x00) {
b.push('OP_0');
} else if (op === 0x4f) {
b.push('OP_PUSHNUM_NEG1');
} else if (op === 0xb1) {
b.push('OP_CLTV');
} else if (op === 0xb2) {
b.push('OP_CSV');
} else if (op === 0xba) {
b.push('OP_CHECKSIGADD');
} else {
const opcode = bitcoinjs.script.toASM([ op ]);
if (opcode && op < 0xfd) {
if (/^OP_(\d+)$/.test(opcode)) {
b.push(opcode.replace(/^OP_(\d+)$/, 'OP_PUSHNUM_$1'));
} else {
b.push(opcode);
}
} else {
b.push('OP_RETURN_' + op);
}
}
i += 1;
}
}
return b.join(' ');
}
/**
* This function must only be called when we know the witness we are parsing
* is a taproot witness.
* @param witness An array of hex strings that represents the witness stack of
* the input.
* @returns null if the witness is not a script spend, and the hex string of
* the script item if it is a script spend.
*/
public witnessToP2TRScript(witness: string[]): string | null {
if (witness.length < 2) return null;
// Note: see BIP341 for parsing details of witness stack
// If there are at least two witness elements, and the first byte of the
// last element is 0x50, this last element is called annex a and
// is removed from the witness stack.
const hasAnnex = witness[witness.length - 1].substring(0, 2) === '50';
// If there are at least two witness elements left, script path spending is used.
// Call the second-to-last stack element s, the script.
// (Note: this phrasing from BIP341 assumes we've *removed* the annex from the stack)
if (hasAnnex && witness.length < 3) return null;
const positionOfScript = hasAnnex ? witness.length - 3 : witness.length - 2;
return witness[positionOfScript];
}
}
export default new TransactionUtils();

View File

@ -21,6 +21,8 @@ import Audit from './audit';
import { deepClone } from '../utils/clone';
import priceUpdater from '../tasks/price-updater';
import { ApiPrice } from '../repositories/PricesRepository';
import accelerationApi from './services/acceleration';
import mempool from './mempool';
// valid 'want' subscriptions
const wantable = [
@ -92,9 +94,13 @@ class WebsocketHandler {
throw new Error('WebSocket.Server is not set');
}
this.wss.on('connection', (client: WebSocket) => {
this.wss.on('connection', (client: WebSocket, req) => {
this.numConnected++;
client.on('error', logger.info);
client['remoteAddress'] = req.headers['x-forwarded-for'] || req.socket?.remoteAddress || 'unknown';
client.on('error', (e) => {
logger.info(`websocket client error from ${client['remoteAddress']}: ` + (e instanceof Error ? e.message : e));
client.close();
});
client.on('close', () => {
this.numDisconnected++;
});
@ -172,9 +178,15 @@ class WebsocketHandler {
}
const tx = memPool.getMempool()[trackTxid];
if (tx && tx.position) {
const position: { block: number, vsize: number, accelerated?: boolean } = {
...tx.position
};
if (tx.acceleration) {
position.accelerated = tx.acceleration;
}
response['txPosition'] = JSON.stringify({
txid: trackTxid,
position: tx.position,
position
});
}
} else {
@ -183,13 +195,19 @@ class WebsocketHandler {
}
if (parsedMessage && parsedMessage['track-address']) {
if (/^([a-km-zA-HJ-NP-Z1-9]{26,35}|[a-km-zA-HJ-NP-Z1-9]{80}|[a-z]{2,5}1[ac-hj-np-z02-9]{8,100}|[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100})$/
if (/^([a-km-zA-HJ-NP-Z1-9]{26,35}|[a-km-zA-HJ-NP-Z1-9]{80}|[a-z]{2,5}1[ac-hj-np-z02-9]{8,100}|[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100}|04[a-fA-F0-9]{128}|(02|03)[a-fA-F0-9]{64})$/
.test(parsedMessage['track-address'])) {
let matchedAddress = parsedMessage['track-address'];
if (/^[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100}$/.test(parsedMessage['track-address'])) {
matchedAddress = matchedAddress.toLowerCase();
}
client['track-address'] = matchedAddress;
if (/^04[a-fA-F0-9]{128}$/.test(parsedMessage['track-address'])) {
client['track-address'] = '41' + matchedAddress + 'ac';
} else if (/^(02|03)[a-fA-F0-9]{64}$/.test(parsedMessage['track-address'])) {
client['track-address'] = '21' + matchedAddress + 'ac';
} else {
client['track-address'] = matchedAddress;
}
} else {
client['track-address'] = null;
}
@ -268,7 +286,8 @@ class WebsocketHandler {
client.send(serializedResponse);
}
} catch (e) {
logger.debug('Error parsing websocket message: ' + (e instanceof Error ? e.message : e));
logger.debug(`Error parsing websocket message from ${client['remoteAddress']}: ` + (e instanceof Error ? e.message : e));
client.close();
}
});
});
@ -380,7 +399,7 @@ class WebsocketHandler {
}
async $handleMempoolChange(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number,
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]): Promise<void> {
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[], accelerationDelta: string[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
@ -389,9 +408,9 @@ class WebsocketHandler {
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
if (config.MEMPOOL.RUST_GBT) {
await mempoolBlocks.$rustUpdateBlockTemplates(newMempool, mempoolSize, newTransactions, deletedTransactions);
await mempoolBlocks.$rustUpdateBlockTemplates(newMempool, mempoolSize, newTransactions, deletedTransactions, config.MEMPOOL_SERVICES.ACCELERATIONS);
} else {
await mempoolBlocks.$updateBlockTemplates(newMempool, newTransactions, deletedTransactions, true);
await mempoolBlocks.$updateBlockTemplates(newMempool, newTransactions, deletedTransactions, accelerationDelta, true, config.MEMPOOL_SERVICES.ACCELERATIONS);
}
} else {
mempoolBlocks.updateMempoolBlocks(newMempool, true);
@ -470,6 +489,10 @@ class WebsocketHandler {
}
}
// pre-compute address transactions
const addressCache = this.makeAddressCache(newTransactions);
const removedAddressCache = this.makeAddressCache(deletedTransactions);
this.wss.clients.forEach(async (client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
@ -509,40 +532,17 @@ class WebsocketHandler {
}
if (client['track-address']) {
const foundTransactions: TransactionExtended[] = [];
const newTransactions = Array.from(addressCache[client['track-address']]?.values() || []);
const removedTransactions = Array.from(removedAddressCache[client['track-address']]?.values() || []);
// txs may be missing prevouts in non-esplora backends
// so fetch the full transactions now
const fullTransactions = (config.MEMPOOL.BACKEND !== 'esplora') ? await this.getFullTransactions(newTransactions) : newTransactions;
for (const tx of newTransactions) {
const someVin = tx.vin.some((vin) => !!vin.prevout && vin.prevout.scriptpubkey_address === client['track-address']);
if (someVin) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
foundTransactions.push(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
} else {
foundTransactions.push(tx);
}
return;
}
const someVout = tx.vout.some((vout) => vout.scriptpubkey_address === client['track-address']);
if (someVout) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
foundTransactions.push(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
} else {
foundTransactions.push(tx);
}
}
if (removedTransactions.length) {
response['address-removed-transactions'] = JSON.stringify(removedTransactions);
}
if (foundTransactions.length) {
response['address-transactions'] = JSON.stringify(foundTransactions);
if (fullTransactions.length) {
response['address-transactions'] = JSON.stringify(fullTransactions);
}
}
@ -550,7 +550,6 @@ class WebsocketHandler {
const foundTransactions: TransactionExtended[] = [];
newTransactions.forEach((tx) => {
if (client['track-asset'] === Common.nativeAssetId) {
if (tx.vin.some((vin) => !!vin.is_pegin)) {
foundTransactions.push(tx);
@ -583,7 +582,7 @@ class WebsocketHandler {
response['utxoSpent'] = JSON.stringify(outspends);
}
const rbfReplacedBy = rbfCache.getReplacedBy(client['track-tx']);
const rbfReplacedBy = rbfChanges.map[client['track-tx']] ? rbfCache.getReplacedBy(client['track-tx']) : false;
if (rbfReplacedBy) {
response['rbfTransaction'] = JSON.stringify({
txid: rbfReplacedBy,
@ -597,14 +596,29 @@ class WebsocketHandler {
const mempoolTx = newMempool[trackTxid];
if (mempoolTx && mempoolTx.position) {
response['txPosition'] = JSON.stringify({
const positionData = {
txid: trackTxid,
position: mempoolTx.position,
});
position: {
...mempoolTx.position,
accelerated: mempoolTx.acceleration || undefined,
}
};
if (mempoolTx.cpfpDirty) {
positionData['cpfp'] = {
ancestors: mempoolTx.ancestors,
bestDescendant: mempoolTx.bestDescendant || null,
descendants: mempoolTx.descendants || null,
effectiveFeePerVsize: mempoolTx.effectiveFeePerVsize || null,
sigops: mempoolTx.sigops,
adjustedVsize: mempoolTx.adjustedVsize,
acceleration: mempoolTx.acceleration
};
}
response['txPosition'] = JSON.stringify(positionData);
}
}
if (client['track-mempool-block'] >= 0) {
if (client['track-mempool-block'] >= 0 && memPool.isInSync()) {
const index = client['track-mempool-block'];
if (mBlockDeltas[index]) {
response['projected-block-transactions'] = getCachedResponse(`projected-block-transactions-${index}`, {
@ -644,9 +658,10 @@ class WebsocketHandler {
memPool.handleMinedRbfTransactions(rbfTransactions);
memPool.removeFromSpendMap(transactions);
if (config.MEMPOOL.AUDIT) {
if (config.MEMPOOL.AUDIT && memPool.isInSync()) {
let projectedBlocks;
let auditMempool = _memPool;
const isAccelerated = config.MEMPOOL_SERVICES.ACCELERATIONS && accelerationApi.isAcceleratedBlock(block, Object.values(mempool.getAccelerations()));
// template calculation functions have mempool side effects, so calculate audits using
// a cloned copy of the mempool if we're running a different algorithm for mempool updates
const separateAudit = config.MEMPOOL.ADVANCED_GBT_AUDIT !== config.MEMPOOL.ADVANCED_GBT_MEMPOOL;
@ -654,19 +669,27 @@ class WebsocketHandler {
auditMempool = deepClone(_memPool);
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
if (config.MEMPOOL.RUST_GBT) {
projectedBlocks = await mempoolBlocks.$oneOffRustBlockTemplates(auditMempool);
projectedBlocks = await mempoolBlocks.$oneOffRustBlockTemplates(auditMempool, isAccelerated, block.extras.pool.id);
} else {
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false);
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false, isAccelerated, block.extras.pool.id);
}
} else {
projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false);
}
} else {
projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
if ((config.MEMPOOL_SERVICES.ACCELERATIONS)) {
if (config.MEMPOOL.RUST_GBT) {
projectedBlocks = await mempoolBlocks.$rustUpdateBlockTemplates(auditMempool, Object.keys(auditMempool).length, [], [], isAccelerated, block.extras.pool.id);
} else {
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false, isAccelerated, block.extras.pool.id);
}
} else {
projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
}
}
if (Common.indexingEnabled() && memPool.isInSync()) {
const { censored, added, fresh, sigop, fullrbf, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
if (Common.indexingEnabled()) {
const { censored, added, fresh, sigop, fullrbf, accelerated, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
const matchRate = Math.round(score * 100 * 100) / 100;
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions : [];
@ -695,6 +718,7 @@ class WebsocketHandler {
freshTxs: fresh,
sigopTxs: sigop,
fullrbfTxs: fullrbf,
acceleratedTxs: accelerated,
matchRate: matchRate,
expectedFees: totalFees,
expectedWeight: totalWeight,
@ -722,9 +746,9 @@ class WebsocketHandler {
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
if (config.MEMPOOL.RUST_GBT) {
await mempoolBlocks.$rustUpdateBlockTemplates(_memPool, Object.keys(_memPool).length, [], transactions);
await mempoolBlocks.$rustUpdateBlockTemplates(_memPool, Object.keys(_memPool).length, [], transactions, true);
} else {
await mempoolBlocks.$makeBlockTemplates(_memPool, true);
await mempoolBlocks.$makeBlockTemplates(_memPool, true, config.MEMPOOL_SERVICES.ACCELERATIONS);
}
} else {
mempoolBlocks.updateMempoolBlocks(_memPool, true);
@ -736,6 +760,9 @@ class WebsocketHandler {
const fees = feeApi.getRecommendedFee();
const mempoolInfo = memPool.getMempoolInfo();
// pre-compute address transactions
const addressCache = this.makeAddressCache(transactions);
// update init data
this.updateSocketDataFields({
'mempoolInfo': mempoolInfo,
@ -788,24 +815,17 @@ class WebsocketHandler {
if (mempoolTx && mempoolTx.position) {
response['txPosition'] = JSON.stringify({
txid: trackTxid,
position: mempoolTx.position,
position: {
...mempoolTx.position,
accelerated: mempoolTx.acceleration || undefined,
}
});
}
}
}
if (client['track-address']) {
const foundTransactions: TransactionExtended[] = [];
transactions.forEach((tx) => {
if (tx.vin && tx.vin.some((vin) => !!vin.prevout && vin.prevout.scriptpubkey_address === client['track-address'])) {
foundTransactions.push(tx);
return;
}
if (tx.vout && tx.vout.some((vout) => vout.scriptpubkey_address === client['track-address'])) {
foundTransactions.push(tx);
}
});
const foundTransactions: TransactionExtended[] = Array.from(addressCache[client['track-address']]?.values() || []);
if (foundTransactions.length) {
foundTransactions.forEach((tx) => {
@ -858,7 +878,7 @@ class WebsocketHandler {
}
}
if (client['track-mempool-block'] >= 0) {
if (client['track-mempool-block'] >= 0 && memPool.isInSync()) {
const index = client['track-mempool-block'];
if (mBlockDeltas && mBlockDeltas[index]) {
response['projected-block-transactions'] = getCachedResponse(`projected-block-transactions-${index}`, {
@ -883,6 +903,52 @@ class WebsocketHandler {
+ '}';
}
private makeAddressCache(transactions: MempoolTransactionExtended[]): { [address: string]: Set<MempoolTransactionExtended> } {
const addressCache: { [address: string]: Set<MempoolTransactionExtended> } = {};
for (const tx of transactions) {
for (const vin of tx.vin) {
if (vin?.prevout?.scriptpubkey_address) {
if (!addressCache[vin.prevout.scriptpubkey_address]) {
addressCache[vin.prevout.scriptpubkey_address] = new Set();
}
addressCache[vin.prevout.scriptpubkey_address].add(tx);
}
if (vin?.prevout?.scriptpubkey) {
if (!addressCache[vin.prevout.scriptpubkey]) {
addressCache[vin.prevout.scriptpubkey] = new Set();
}
addressCache[vin.prevout.scriptpubkey].add(tx);
}
}
for (const vout of tx.vout) {
if (vout?.scriptpubkey_address) {
if (!addressCache[vout?.scriptpubkey_address]) {
addressCache[vout?.scriptpubkey_address] = new Set();
}
addressCache[vout?.scriptpubkey_address].add(tx);
}
if (vout?.scriptpubkey) {
if (!addressCache[vout.scriptpubkey]) {
addressCache[vout.scriptpubkey] = new Set();
}
addressCache[vout.scriptpubkey].add(tx);
}
}
}
return addressCache;
}
private async getFullTransactions(transactions: MempoolTransactionExtended[]): Promise<MempoolTransactionExtended[]> {
for (let i = 0; i < transactions.length; i++) {
try {
transactions[i] = await transactionUtils.$getMempoolTransactionExtended(transactions[i].txid, true);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
}
return transactions;
}
private printLogs(): void {
if (this.wss) {
const count = this.wss?.clients?.size || 0;

View File

@ -12,6 +12,7 @@ interface IConfig {
API_URL_PREFIX: string;
POLL_RATE_MS: number;
CACHE_DIR: string;
CACHE_ENABLED: boolean;
CLEAR_PROTECTION_MINUTES: number;
RECOMMENDED_FEE_PERCENTILE: number;
BLOCK_WEIGHT_UNITS: number;
@ -37,11 +38,16 @@ interface IConfig {
DISK_CACHE_BLOCK_INTERVAL: number;
MAX_PUSH_TX_SIZE_WEIGHT: number;
ALLOW_UNREACHABLE: boolean;
PRICE_UPDATES_PER_HOUR: number;
};
ESPLORA: {
REST_API_URL: string;
UNIX_SOCKET_PATH: string | void | null;
BATCH_QUERY_BASE_SIZE: number;
RETRY_UNIX_SOCKET_AFTER: number;
REQUEST_TIMEOUT: number;
FALLBACK_TIMEOUT: number;
FALLBACK: string[];
};
LIGHTNING: {
ENABLED: boolean;
@ -73,6 +79,8 @@ interface IConfig {
USERNAME: string;
PASSWORD: string;
TIMEOUT: number;
COOKIE: boolean;
COOKIE_PATH: string;
};
SECOND_CORE_RPC: {
HOST: string;
@ -80,6 +88,8 @@ interface IConfig {
USERNAME: string;
PASSWORD: string;
TIMEOUT: number;
COOKIE: boolean;
COOKIE_PATH: string;
};
DATABASE: {
ENABLED: boolean;
@ -90,6 +100,7 @@ interface IConfig {
USERNAME: string;
PASSWORD: string;
TIMEOUT: number;
PID_DIR: string;
};
SYSLOG: {
ENABLED: boolean;
@ -114,10 +125,6 @@ interface IConfig {
USERNAME: string;
PASSWORD: string;
};
PRICE_DATA_SERVER: {
TOR_URL: string;
CLEARNET_URL: string;
};
EXTERNAL_DATA_SERVER: {
MEMPOOL_API: string;
MEMPOOL_ONION: string;
@ -137,7 +144,16 @@ interface IConfig {
AUDIT: boolean;
AUDIT_START_HEIGHT: number;
SERVERS: string[];
}
},
MEMPOOL_SERVICES: {
API: string;
ACCELERATIONS: boolean;
},
REDIS: {
ENABLED: boolean;
UNIX_SOCKET_PATH: string;
BATCH_QUERY_BASE_SIZE: number;
},
}
const defaults: IConfig = {
@ -150,6 +166,7 @@ const defaults: IConfig = {
'API_URL_PREFIX': '/api/v1/',
'POLL_RATE_MS': 2000,
'CACHE_DIR': './cache',
'CACHE_ENABLED': true,
'CLEAR_PROTECTION_MINUTES': 20,
'RECOMMENDED_FEE_PERCENTILE': 50,
'BLOCK_WEIGHT_UNITS': 4000000,
@ -175,11 +192,16 @@ const defaults: IConfig = {
'DISK_CACHE_BLOCK_INTERVAL': 6,
'MAX_PUSH_TX_SIZE_WEIGHT': 400000,
'ALLOW_UNREACHABLE': true,
'PRICE_UPDATES_PER_HOUR': 1,
},
'ESPLORA': {
'REST_API_URL': 'http://127.0.0.1:3000',
'UNIX_SOCKET_PATH': null,
'BATCH_QUERY_BASE_SIZE': 1000,
'RETRY_UNIX_SOCKET_AFTER': 30000,
'REQUEST_TIMEOUT': 10000,
'FALLBACK_TIMEOUT': 5000,
'FALLBACK': [],
},
'ELECTRUM': {
'HOST': '127.0.0.1',
@ -192,6 +214,8 @@ const defaults: IConfig = {
'USERNAME': 'mempool',
'PASSWORD': 'mempool',
'TIMEOUT': 60000,
'COOKIE': false,
'COOKIE_PATH': '/bitcoin/.cookie'
},
'SECOND_CORE_RPC': {
'HOST': '127.0.0.1',
@ -199,6 +223,8 @@ const defaults: IConfig = {
'USERNAME': 'mempool',
'PASSWORD': 'mempool',
'TIMEOUT': 60000,
'COOKIE': false,
'COOKIE_PATH': '/bitcoin/.cookie'
},
'DATABASE': {
'ENABLED': true,
@ -209,6 +235,7 @@ const defaults: IConfig = {
'USERNAME': 'mempool',
'PASSWORD': 'mempool',
'TIMEOUT': 180000,
'PID_DIR': '',
},
'SYSLOG': {
'ENABLED': true,
@ -252,10 +279,6 @@ const defaults: IConfig = {
'USERNAME': '',
'PASSWORD': ''
},
'PRICE_DATA_SERVER': {
'TOR_URL': 'http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices',
'CLEARNET_URL': 'https://price.bisq.wiz.biz/getAllMarketPrices'
},
'EXTERNAL_DATA_SERVER': {
'MEMPOOL_API': 'https://mempool.space/api/v1',
'MEMPOOL_ONION': 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
@ -275,7 +298,16 @@ const defaults: IConfig = {
'AUDIT': false,
'AUDIT_START_HEIGHT': 774000,
'SERVERS': [],
}
},
'MEMPOOL_SERVICES': {
'API': '',
'ACCELERATIONS': false,
},
'REDIS': {
'ENABLED': false,
'UNIX_SOCKET_PATH': '',
'BATCH_QUERY_BASE_SIZE': 5000,
},
};
class Config implements IConfig {
@ -292,10 +324,11 @@ class Config implements IConfig {
LND: IConfig['LND'];
CLIGHTNING: IConfig['CLIGHTNING'];
SOCKS5PROXY: IConfig['SOCKS5PROXY'];
PRICE_DATA_SERVER: IConfig['PRICE_DATA_SERVER'];
EXTERNAL_DATA_SERVER: IConfig['EXTERNAL_DATA_SERVER'];
MAXMIND: IConfig['MAXMIND'];
REPLICATION: IConfig['REPLICATION'];
MEMPOOL_SERVICES: IConfig['MEMPOOL_SERVICES'];
REDIS: IConfig['REDIS'];
constructor() {
const configs = this.merge(configFromFile, defaults);
@ -312,10 +345,11 @@ class Config implements IConfig {
this.LND = configs.LND;
this.CLIGHTNING = configs.CLIGHTNING;
this.SOCKS5PROXY = configs.SOCKS5PROXY;
this.PRICE_DATA_SERVER = configs.PRICE_DATA_SERVER;
this.EXTERNAL_DATA_SERVER = configs.EXTERNAL_DATA_SERVER;
this.MAXMIND = configs.MAXMIND;
this.REPLICATION = configs.REPLICATION;
this.MEMPOOL_SERVICES = configs.MEMPOOL_SERVICES;
this.REDIS = configs.REDIS;
}
merge = (...objects: object[]): IConfig => {

View File

@ -1,7 +1,11 @@
import * as fs from 'fs';
import path from 'path';
import config from './config';
import { createPool, Pool, PoolConnection } from 'mysql2/promise';
import { LogLevel } from './logger';
import logger from './logger';
import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } from 'mysql2/typings/mysql';
import { execSync } from 'child_process';
class DB {
constructor() {
@ -30,7 +34,7 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
}
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
OkPacket[] | ResultSetHeader>(query, params?, connection?: PoolConnection): Promise<[T, FieldPacket[]]>
OkPacket[] | ResultSetHeader>(query, params?, errorLogLevel: LogLevel | 'silent' = 'debug', connection?: PoolConnection): Promise<[T, FieldPacket[]]>
{
this.checkDBFlag();
let hardTimeout;
@ -52,19 +56,38 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
}).then(result => {
resolve(result);
}).catch(error => {
if (errorLogLevel !== 'silent') {
logger[errorLogLevel](`database query "${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}" failed!`);
}
reject(error);
}).finally(() => {
clearTimeout(timer);
});
});
} else {
const pool = await this.getPool();
return pool.query(query, params);
try {
const pool = await this.getPool();
return pool.query(query, params);
} catch (e) {
if (errorLogLevel !== 'silent') {
logger[errorLogLevel](`database query "${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}" failed!`);
}
throw e;
}
}
}
private async $rollbackAtomic(connection: PoolConnection): Promise<void> {
try {
await connection.rollback();
await connection.release();
} catch (e) {
logger.warn('Failed to rollback incomplete db transaction: ' + (e instanceof Error ? e.message : e));
}
}
public async $atomicQuery<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
OkPacket[] | ResultSetHeader>(queries: { query, params }[]): Promise<[T, FieldPacket[]][]>
OkPacket[] | ResultSetHeader>(queries: { query, params }[], errorLogLevel: LogLevel | 'silent' = 'debug'): Promise<[T, FieldPacket[]][]>
{
const pool = await this.getPool();
const connection = await pool.getConnection();
@ -73,7 +96,7 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
const results: [T, FieldPacket[]][] = [];
for (const query of queries) {
const result = await this.query(query.query, query.params, connection) as [T, FieldPacket[]];
const result = await this.query(query.query, query.params, errorLogLevel, connection) as [T, FieldPacket[]];
results.push(result);
}
@ -81,9 +104,8 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
return results;
} catch (e) {
logger.err('Could not complete db transaction, rolling back: ' + (e instanceof Error ? e.message : e));
connection.rollback();
connection.release();
logger.warn('Could not complete db transaction, rolling back: ' + (e instanceof Error ? e.message : e));
this.$rollbackAtomic(connection);
throw e;
} finally {
connection.release();
@ -101,6 +123,50 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
}
}
public getPidLock(): boolean {
const filePath = path.join(config.DATABASE.PID_DIR || __dirname, `/mempool-${config.DATABASE.DATABASE}.pid`);
this.enforcePidLock(filePath);
fs.writeFileSync(filePath, `${process.pid}`);
return true;
}
private enforcePidLock(filePath: string): void {
if (fs.existsSync(filePath)) {
const pid = parseInt(fs.readFileSync(filePath, 'utf-8'));
if (pid === process.pid) {
logger.warn('PID file already exists for this process');
return;
}
let cmd;
try {
cmd = execSync(`ps -p ${pid} -o args=`);
} catch (e) {
logger.warn(`Stale PID file at ${filePath}, but no process running on that PID ${pid}`);
return;
}
if (cmd && cmd.toString()?.includes('node')) {
const msg = `Another mempool nodejs process is already running on PID ${pid}`;
logger.err(msg);
throw new Error(msg);
} else {
logger.warn(`Stale PID file at ${filePath}, but the PID ${pid} does not belong to a running mempool instance`);
}
}
}
public releasePidLock(): void {
const filePath = path.join(config.DATABASE.PID_DIR || __dirname, `/mempool-${config.DATABASE.DATABASE}.pid`);
if (fs.existsSync(filePath)) {
const pid = parseInt(fs.readFileSync(filePath, 'utf-8'));
// only release our own pid file
if (pid === process.pid) {
fs.unlinkSync(filePath);
}
}
}
private async getPool(): Promise<Pool> {
if (this.pool === null) {
this.pool = createPool(this.poolConfig);

View File

@ -30,6 +30,7 @@ import generalLightningRoutes from './api/explorer/general.routes';
import lightningStatsUpdater from './tasks/lightning/stats-updater.service';
import networkSyncService from './tasks/lightning/network-sync.service';
import statisticsRoutes from './api/statistics/statistics.routes';
import pricesRoutes from './api/prices/prices.routes';
import miningRoutes from './api/mining/mining-routes';
import bisqRoutes from './api/bisq/bisq.routes';
import liquidRoutes from './api/liquid/liquid.routes';
@ -41,6 +42,8 @@ import chainTips from './api/chain-tips';
import { AxiosError } from 'axios';
import v8 from 'v8';
import { formatBytes, getBytesUnit } from './utils/format';
import redisCache from './api/redis-cache';
import accelerationApi from './api/services/acceleration';
class Server {
private wss: WebSocket.Server | undefined;
@ -89,7 +92,24 @@ class Server {
async startServer(worker = false): Promise<void> {
logger.notice(`Starting Mempool Server${worker ? ' (worker)' : ''}... (${backendInfo.getShortCommitHash()})`);
// Register cleanup listeners for exit events
['exit', 'SIGHUP', 'SIGINT', 'SIGTERM', 'SIGUSR1', 'SIGUSR2'].forEach(event => {
process.on(event, () => { this.onExit(event); });
});
process.on('uncaughtException', (error) => {
this.onUnhandledException('uncaughtException', error);
});
process.on('unhandledRejection', (reason, promise) => {
this.onUnhandledException('unhandledRejection', reason);
});
if (config.MEMPOOL.BACKEND === 'esplora') {
bitcoinApi.startHealthChecks();
}
if (config.DATABASE.ENABLED) {
DB.getPidLock();
await DB.checkDbConnection();
try {
if (process.env.npm_config_reindex_blocks === 'true') { // Re-index requests
@ -122,7 +142,11 @@ class Server {
await poolsUpdater.updatePoolsJson(); // Needs to be done before loading the disk cache because we sometimes wipe it
await syncAssets.syncAssets$();
if (config.MEMPOOL.ENABLED) {
await diskCache.$loadMempoolCache();
if (config.MEMPOOL.CACHE_ENABLED) {
await diskCache.$loadMempoolCache();
} else if (config.REDIS.ENABLED) {
await redisCache.$loadCache();
}
}
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && cluster.isPrimary) {
@ -182,15 +206,18 @@ class Server {
}
}
const newMempool = await bitcoinApi.$getRawMempool();
const newAccelerations = await accelerationApi.$fetchAccelerations();
const numHandledBlocks = await blocks.$updateBlocks();
const pollRate = config.MEMPOOL.POLL_RATE_MS * (indexer.indexerIsRunning() ? 10 : 1);
if (numHandledBlocks === 0) {
await memPool.$updateMempool(newMempool);
await memPool.$updateMempool(newMempool, newAccelerations, pollRate);
}
indexer.$run();
priceUpdater.$run();
// rerun immediately if we skipped the mempool update, otherwise wait POLL_RATE_MS
const elapsed = Date.now() - start;
const remainingTime = Math.max(0, config.MEMPOOL.POLL_RATE_MS - elapsed)
const remainingTime = Math.max(0, pollRate - elapsed);
setTimeout(this.runMainUpdateLoop.bind(this), numHandledBlocks > 0 ? 0 : remainingTime);
this.backendRetryCount = 0;
} catch (e: any) {
@ -255,6 +282,7 @@ class Server {
setUpHttpApiRoutes(): void {
bitcoinRoutes.initRoutes(this.app);
pricesRoutes.initRoutes(this.app);
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && config.MEMPOOL.ENABLED) {
statisticsRoutes.initRoutes(this.app);
}
@ -293,6 +321,19 @@ class Server {
this.lastHeapLogTime = now;
}
}
onExit(exitEvent, code = 0): void {
logger.debug(`onExit for signal: ${exitEvent}`);
if (config.DATABASE.ENABLED) {
DB.releasePidLock();
}
process.exit(code);
}
onUnhandledException(type, error): void {
console.error(`${type}:`, error);
this.onExit(type, 1);
}
}
((): Server => new Server())();

View File

@ -15,11 +15,18 @@ export interface CoreIndex {
best_block_height: number;
}
type TaskName = 'blocksPrices' | 'coinStatsIndex';
class Indexer {
runIndexer = true;
indexerRunning = false;
tasksRunning: string[] = [];
coreIndexes: CoreIndex[] = [];
private runIndexer = true;
private indexerRunning = false;
private tasksRunning: { [key in TaskName]?: boolean; } = {};
private tasksScheduled: { [key in TaskName]?: NodeJS.Timeout; } = {};
private coreIndexes: CoreIndex[] = [];
public indexerIsRunning(): boolean {
return this.indexerRunning;
}
/**
* Check which core index is available for indexing
@ -69,33 +76,69 @@ class Indexer {
}
}
public async runSingleTask(task: 'blocksPrices' | 'coinStatsIndex'): Promise<void> {
if (!Common.indexingEnabled()) {
return;
}
if (task === 'blocksPrices' && !this.tasksRunning.includes(task) && !['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
this.tasksRunning.push(task);
const lastestPriceId = await PricesRepository.$getLatestPriceId();
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`, logger.tags.mining);
setTimeout(() => {
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
this.runSingleTask('blocksPrices');
}, 10000);
} else {
logger.debug(`Blocks prices indexer will run now`, logger.tags.mining);
await mining.$indexBlockPrices();
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
/**
* schedules a single task to run in `timeout` ms
* only one task of each type may be scheduled
*
* @param {TaskName} task - the type of task
* @param {number} timeout - delay in ms
* @param {boolean} replace - `true` replaces any already scheduled task (works like a debounce), `false` ignores subsequent requests (works like a throttle)
*/
public scheduleSingleTask(task: TaskName, timeout: number = 10000, replace = false): void {
if (this.tasksScheduled[task]) {
if (!replace) { //throttle
return;
} else { // debounce
clearTimeout(this.tasksScheduled[task]);
}
}
this.tasksScheduled[task] = setTimeout(async () => {
try {
await this.runSingleTask(task);
} catch (e) {
logger.err(`Unexpected error in scheduled task ${task}: ` + (e instanceof Error ? e.message : e));
} finally {
clearTimeout(this.tasksScheduled[task]);
}
}, timeout);
}
if (task === 'coinStatsIndex' && !this.tasksRunning.includes(task)) {
this.tasksRunning.push(task);
logger.debug(`Indexing coinStatsIndex now`);
await mining.$indexCoinStatsIndex();
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
/**
* Runs a single task immediately
*
* (use `scheduleSingleTask` instead to queue a task to run after some timeout)
*/
public async runSingleTask(task: TaskName): Promise<void> {
if (!Common.indexingEnabled() || this.tasksRunning[task]) {
return;
}
this.tasksRunning[task] = true;
switch (task) {
case 'blocksPrices': {
if (!['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
let lastestPriceId;
try {
lastestPriceId = await PricesRepository.$getLatestPriceId();
} catch (e) {
logger.debug('failed to fetch latest price id from db: ' + (e instanceof Error ? e.message : e));
} if (priceUpdater.historyInserted === false || lastestPriceId === null) {
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`, logger.tags.mining);
this.scheduleSingleTask(task, 10000);
} else {
logger.debug(`Blocks prices indexer will run now`, logger.tags.mining);
await mining.$indexBlockPrices();
}
}
} break;
case 'coinStatsIndex': {
logger.debug(`Indexing coinStatsIndex now`);
await mining.$indexCoinStatsIndex();
} break;
}
this.tasksRunning[task] = false;
}
public async $run(): Promise<void> {
@ -105,6 +148,12 @@ class Indexer {
return;
}
try {
await priceUpdater.$run();
} catch (e) {
logger.err(`Running priceUpdater failed. Reason: ` + (e instanceof Error ? e.message : e));
}
// Do not attempt to index anything unless Bitcoin Core is fully synced
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
if (blockchainInfo.blocks !== blockchainInfo.headers) {
@ -119,8 +168,6 @@ class Indexer {
await this.checkAvailableCoreIndexes();
try {
await priceUpdater.$run();
const chainValid = await blocks.$generateBlockDatabase();
if (chainValid === false) {
// Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration

View File

@ -157,4 +157,6 @@ class Logger {
}
}
export type LogLevel = 'emerg' | 'alert' | 'crit' | 'err' | 'warn' | 'notice' | 'info' | 'debug';
export default new Logger();

View File

@ -20,6 +20,7 @@ export interface PoolInfo {
slug: string;
avgMatchRate: number | null;
avgFeeDelta: number | null;
poolUniqueId: number;
}
export interface PoolStats extends PoolInfo {
@ -36,6 +37,7 @@ export interface BlockAudit {
sigopTxs: string[],
fullrbfTxs: string[],
addedTxs: string[],
acceleratedTxs: string[],
matchRate: number,
expectedFees?: number,
expectedWeight?: number,
@ -59,13 +61,13 @@ export interface MempoolBlock {
export interface MempoolBlockWithTransactions extends MempoolBlock {
transactionIds: string[];
transactions: TransactionStripped[];
transactions: TransactionClassified[];
}
export interface MempoolBlockDelta {
added: TransactionStripped[];
added: TransactionClassified[];
removed: string[];
changed: { txid: string, rate: number | undefined }[];
changed: { txid: string, rate: number | undefined, flags?: number }[];
}
interface VinStrippedToScriptsig {
@ -91,7 +93,10 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
block: number,
vsize: number,
};
acceleration?: boolean;
replacement?: boolean;
uid?: number;
flags?: number;
}
export interface MempoolTransactionExtended extends TransactionExtended {
@ -101,6 +106,7 @@ export interface MempoolTransactionExtended extends TransactionExtended {
adjustedFeePerVsize: number;
inputs?: number[];
lastBoosted?: number;
cpfpDirty?: boolean;
}
export interface AuditTransaction {
@ -182,9 +188,49 @@ export interface TransactionStripped {
fee: number;
vsize: number;
value: number;
acc?: boolean;
rate?: number; // effective fee rate
}
export interface TransactionClassified extends TransactionStripped {
flags: number;
}
// binary flags for transaction classification
export const TransactionFlags = {
// features
rbf: 0b00000001n,
no_rbf: 0b00000010n,
v1: 0b00000100n,
v2: 0b00001000n,
// address types
p2pk: 0b00000001_00000000n,
p2ms: 0b00000010_00000000n,
p2pkh: 0b00000100_00000000n,
p2sh: 0b00001000_00000000n,
p2wpkh: 0b00010000_00000000n,
p2wsh: 0b00100000_00000000n,
p2tr: 0b01000000_00000000n,
// behavior
cpfp_parent: 0b00000001_00000000_00000000n,
cpfp_child: 0b00000010_00000000_00000000n,
replacement: 0b00000100_00000000_00000000n,
// data
op_return: 0b00000001_00000000_00000000_00000000n,
fake_pubkey: 0b00000010_00000000_00000000_00000000n,
inscription: 0b00000100_00000000_00000000_00000000n,
// heuristics
coinjoin: 0b00000001_00000000_00000000_00000000_00000000n,
consolidation: 0b00000010_00000000_00000000_00000000_00000000n,
batch_payout: 0b00000100_00000000_00000000_00000000_00000000n,
// sighash
sighash_all: 0b00000001_00000000_00000000_00000000_00000000_00000000n,
sighash_none: 0b00000010_00000000_00000000_00000000_00000000_00000000n,
sighash_single: 0b00000100_00000000_00000000_00000000_00000000_00000000n,
sighash_default:0b00001000_00000000_00000000_00000000_00000000_00000000n,
sighash_acp: 0b00010000_00000000_00000000_00000000_00000000_00000000n,
};
export interface BlockExtension {
totalFees: number;
medianFee: number; // median fee rate
@ -295,6 +341,7 @@ export interface Statistic {
total_fee: number;
mempool_byte_weight: number;
fee_data: string;
min_fee: number;
vsize_1: number;
vsize_2: number;
@ -341,6 +388,7 @@ export interface OptimizedStatistic {
vbytes_per_second: number;
total_fee: number;
mempool_byte_weight: number;
min_fee: number;
vsizes: number[];
}

View File

@ -116,6 +116,7 @@ class AuditReplication {
freshTxs: auditSummary.freshTxs || [],
sigopTxs: auditSummary.sigopTxs || [],
fullrbfTxs: auditSummary.fullrbfTxs || [],
acceleratedTxs: auditSummary.acceleratedTxs || [],
matchRate: auditSummary.matchRate,
expectedFees: auditSummary.expectedFees,
expectedWeight: auditSummary.expectedWeight,

View File

@ -6,9 +6,9 @@ import { BlockAudit, AuditScore } from '../mempool.interfaces';
class BlocksAuditRepositories {
public async $saveAudit(audit: BlockAudit): Promise<void> {
try {
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, sigop_txs, fullrbf_txs, match_rate, expected_fees, expected_weight)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]);
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, sigop_txs, fullrbf_txs, accelerated_txs, match_rate, expected_fees, expected_weight)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), JSON.stringify(audit.acceleratedTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
@ -69,6 +69,7 @@ class BlocksAuditRepositories {
fresh_txs as freshTxs,
sigop_txs as sigopTxs,
fullrbf_txs as fullrbfTxs,
accelerated_txs as acceleratedTxs,
match_rate as matchRate,
expected_fees as expectedFees,
expected_weight as expectedWeight
@ -83,6 +84,7 @@ class BlocksAuditRepositories {
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
rows[0].sigopTxs = JSON.parse(rows[0].sigopTxs);
rows[0].fullrbfTxs = JSON.parse(rows[0].fullrbfTxs);
rows[0].acceleratedTxs = JSON.parse(rows[0].acceleratedTxs);
rows[0].template = JSON.parse(rows[0].template);
return rows[0];

View File

@ -1,3 +1,4 @@
import bitcoinApi from '../api/bitcoin/bitcoin-api-factory';
import { BlockExtended, BlockExtension, BlockPrice, EffectiveFeeStats } from '../mempool.interfaces';
import DB from '../database';
import logger from '../logger';
@ -12,6 +13,7 @@ import config from '../config';
import chainTips from '../api/chain-tips';
import blocks from '../api/blocks';
import BlocksAuditsRepository from './BlocksAuditsRepository';
import transactionUtils from '../api/transaction-utils';
interface DatabaseBlock {
id: string;
@ -539,7 +541,7 @@ class BlocksRepository {
*/
public async $getBlocksDifficulty(): Promise<object[]> {
try {
const [rows]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(blockTimestamp) as time, height, difficulty FROM blocks`);
const [rows]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(blockTimestamp) as time, height, difficulty, bits FROM blocks ORDER BY height ASC`);
return rows;
} catch (e) {
logger.err('Cannot get blocks difficulty list from the db. Reason: ' + (e instanceof Error ? e.message : e));
@ -848,7 +850,7 @@ class BlocksRepository {
*/
public async $getOldestConsecutiveBlock(): Promise<any> {
try {
const [rows]: any = await DB.query(`SELECT height, UNIX_TIMESTAMP(blockTimestamp) as timestamp, difficulty FROM blocks ORDER BY height DESC`);
const [rows]: any = await DB.query(`SELECT height, UNIX_TIMESTAMP(blockTimestamp) as timestamp, difficulty, bits FROM blocks ORDER BY height DESC`);
for (let i = 0; i < rows.length - 1; ++i) {
if (rows[i].height - rows[i + 1].height > 1) {
return rows[i];
@ -1036,8 +1038,17 @@ class BlocksRepository {
{
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
if (extras.feePercentiles === null) {
const block = await bitcoinClient.getBlock(dbBlk.id, 2);
const summary = blocks.summarizeBlock(block);
let summary;
if (config.MEMPOOL.BACKEND === 'esplora') {
const txs = (await bitcoinApi.$getTxsForBlock(dbBlk.id)).map(tx => transactionUtils.extendTransaction(tx));
summary = blocks.summarizeBlockTransactions(dbBlk.id, txs);
} else {
// Call Core RPC
const block = await bitcoinClient.getBlock(dbBlk.id, 2);
summary = blocks.summarizeBlock(block);
}
await BlocksSummariesRepository.$saveTransactions(dbBlk.height, dbBlk.id, summary.transactions);
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
}

View File

@ -14,7 +14,7 @@ class NodesSocketsRepository {
await DB.query(`
INSERT INTO nodes_sockets(public_key, socket, type)
VALUE (?, ?, ?)
`, [socket.publicKey, socket.addr, socket.network]);
`, [socket.publicKey, socket.addr, socket.network], 'silent');
} catch (e: any) {
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
logger.err(`Cannot save node socket (${[socket.publicKey, socket.addr, socket.network]}) into db. Reason: ` + (e instanceof Error ? e.message : e));

View File

@ -40,7 +40,8 @@ class PoolsRepository {
pools.link AS link,
slug,
AVG(blocks_audits.match_rate) AS avgMatchRate,
AVG((CAST(blocks.fees as SIGNED) - CAST(blocks_audits.expected_fees as SIGNED)) / NULLIF(CAST(blocks_audits.expected_fees as SIGNED), 0)) AS avgFeeDelta
AVG((CAST(blocks.fees as SIGNED) - CAST(blocks_audits.expected_fees as SIGNED)) / NULLIF(CAST(blocks_audits.expected_fees as SIGNED), 0)) AS avgFeeDelta,
unique_id as poolUniqueId
FROM blocks
JOIN pools on pools.id = pool_id
LEFT JOIN blocks_audits ON blocks_audits.height = blocks.height

View File

@ -1,5 +1,6 @@
var http = require('http')
var https = require('https')
import { readFileSync } from 'fs';
var JsonRPC = function (opts) {
// @ts-ignore
@ -55,7 +56,13 @@ JsonRPC.prototype.call = function (method, params) {
}
// use HTTP auth if user and password set
if (this.opts.user && this.opts.pass) {
if (this.opts.cookie) {
if (!this.cachedCookie) {
this.cachedCookie = readFileSync(this.opts.cookie).toString();
}
// @ts-ignore
requestOptions.auth = this.cachedCookie;
} else if (this.opts.user && this.opts.pass) {
// @ts-ignore
requestOptions.auth = this.opts.user + ':' + this.opts.pass
}
@ -93,7 +100,7 @@ JsonRPC.prototype.call = function (method, params) {
reject(err)
})
request.on('response', function (response) {
request.on('response', (response) => {
clearTimeout(reqTimeout)
// We need to buffer the response chunks in a nonblocking way.
@ -104,7 +111,7 @@ JsonRPC.prototype.call = function (method, params) {
// When all the responses are finished, we decode the JSON and
// depending on whether it's got a result or an error, we call
// emitSuccess or emitError on the promise.
response.on('end', function () {
response.on('end', () => {
var err
if (cbCalled) return
@ -113,6 +120,14 @@ JsonRPC.prototype.call = function (method, params) {
try {
var decoded = JSON.parse(buffer)
} catch (e) {
// if we authenticated using a cookie and it failed, read the cookie file again
if (
response.statusCode === 401 /* Unauthorized */ &&
this.opts.cookie
) {
this.cachedCookie = undefined;
}
if (response.statusCode !== 200) {
err = new Error('Invalid params, response status code: ' + response.statusCode)
err.code = -32602

View File

@ -15,8 +15,6 @@ class ForensicsService {
txCache: { [txid: string]: IEsploraApi.Transaction } = {};
tempCached: string[] = [];
constructor() {}
public async $startService(): Promise<void> {
logger.info('Starting lightning network forensics service');
@ -66,93 +64,138 @@ class ForensicsService {
*/
public async $runClosedChannelsForensics(onlyNewChannels: boolean = false): Promise<void> {
// Only Esplora backend can retrieve spent transaction outputs
if (config.MEMPOOL.BACKEND !== 'esplora') {
return;
}
let progress = 0;
try {
logger.debug(`Started running closed channel forensics...`);
let channels;
let allChannels;
if (onlyNewChannels) {
channels = await channelsApi.$getClosedChannelsWithoutReason();
allChannels = await channelsApi.$getClosedChannelsWithoutReason();
} else {
channels = await channelsApi.$getUnresolvedClosedChannels();
allChannels = await channelsApi.$getUnresolvedClosedChannels();
}
for (const channel of channels) {
let reason = 0;
let resolvedForceClose = false;
// Only Esplora backend can retrieve spent transaction outputs
const cached: string[] = [];
let progress = 0;
const sliceLength = Math.ceil(config.ESPLORA.BATCH_QUERY_BASE_SIZE / 10);
// process batches of 1000 channels
for (let i = 0; i < Math.ceil(allChannels.length / sliceLength); i++) {
const channels = allChannels.slice(i * sliceLength, (i + 1) * sliceLength);
let allOutspends: IEsploraApi.Outspend[][] = [];
const forceClosedChannels: { channel: any, cachedSpends: string[] }[] = [];
// fetch outspends in bulk
try {
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
continue;
}
const lightningScriptReasons: number[] = [];
const outspendTxids = channels.map(channel => channel.closing_transaction_id);
allOutspends = await bitcoinApi.$getBatchedOutspendsInternal(outspendTxids);
logger.info(`Fetched outspends for ${allOutspends.length} txs from esplora for LN forensics`);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/internal/txs/outspends/by-txid'}. Reason ${e instanceof Error ? e.message : e}`);
}
// fetch spending transactions in bulk and load into txCache
const newSpendingTxids: { [txid: string]: boolean } = {};
for (const outspends of allOutspends) {
for (const outspend of outspends) {
if (outspend.spent && outspend.txid) {
let spendingTx = await this.fetchTransaction(outspend.txid);
if (!spendingTx) {
continue;
}
cached.push(spendingTx.txid);
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
lightningScriptReasons.push(lightningScript);
newSpendingTxids[outspend.txid] = true;
}
}
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
if (filteredReasons.length) {
if (filteredReasons.some((r) => r === 2 || r === 4)) {
reason = 3;
} else {
reason = 2;
resolvedForceClose = true;
}
} else {
/*
We can detect a commitment transaction (force close) by reading Sequence and Locktime
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
*/
let closingTx = await this.fetchTransaction(channel.closing_transaction_id, true);
if (!closingTx) {
}
const allOutspendTxs = await this.fetchTransactions(
allOutspends.flatMap(outspends =>
outspends
.filter(outspend => outspend.spent && outspend.txid)
.map(outspend => outspend.txid)
)
);
logger.info(`Fetched ${allOutspendTxs.length} out-spending txs from esplora for LN forensics`);
// process each outspend
for (const [index, channel] of channels.entries()) {
let reason = 0;
const cached: string[] = [];
try {
const outspends = allOutspends[index];
if (!outspends || !outspends.length) {
// outspends are missing
continue;
}
cached.push(closingTx.txid);
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
const locktimeHex: string = closingTx.locktime.toString(16);
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
reason = 2; // Here we can't be sure if it's a penalty or not
} else {
reason = 1;
const lightningScriptReasons: number[] = [];
for (const outspend of outspends) {
if (outspend.spent && outspend.txid) {
const spendingTx = this.txCache[outspend.txid];
if (!spendingTx) {
continue;
}
cached.push(spendingTx.txid);
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
lightningScriptReasons.push(lightningScript);
}
}
}
if (reason) {
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
if (reason === 2 && resolvedForceClose) {
await DB.query(`UPDATE channels SET closing_resolved = ? WHERE id = ?`, [true, channel.id]);
}
if (reason !== 2 || resolvedForceClose) {
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
if (filteredReasons.length) {
if (filteredReasons.some((r) => r === 2 || r === 4)) {
// Force closed with penalty
reason = 3;
} else {
// Force closed without penalty
reason = 2;
await DB.query(`UPDATE channels SET closing_resolved = ? WHERE id = ?`, [true, channel.id]);
}
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
// clean up cached transactions
cached.forEach(txid => {
delete this.txCache[txid];
});
} else {
forceClosedChannels.push({ channel, cachedSpends: cached });
}
} catch (e) {
logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`);
}
} catch (e) {
logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`);
}
++progress;
// fetch force-closing transactions in bulk
const closingTxs = await this.fetchTransactions(forceClosedChannels.map(x => x.channel.closing_transaction_id));
logger.info(`Fetched ${closingTxs.length} closing txs from esplora for LN forensics`);
// process channels with no lightning script reasons
for (const { channel, cachedSpends } of forceClosedChannels) {
const closingTx = this.txCache[channel.closing_transaction_id];
if (!closingTx) {
// no channel close transaction found yet
continue;
}
/*
We can detect a commitment transaction (force close) by reading Sequence and Locktime
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
*/
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
const locktimeHex: string = closingTx.locktime.toString(16);
let reason;
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
// Force closed, but we can't be sure if it's a penalty or not
reason = 2;
} else {
// Mutually closed
reason = 1;
// clean up cached transactions
delete this.txCache[closingTx.txid];
for (const txid of cachedSpends) {
delete this.txCache[txid];
}
}
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
}
progress += channels.length;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.debug(`Updating channel closed channel forensics ${progress}/${channels.length}`);
logger.debug(`Updating channel closed channel forensics ${progress}/${allChannels.length}`);
this.loggerTimer = new Date().getTime() / 1000;
}
}
@ -220,8 +263,11 @@ class ForensicsService {
logger.debug(`Started running open channel forensics...`);
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
// preload open channel transactions
await this.fetchTransactions(channels.map(channel => channel.transaction_id), true);
for (const openChannel of channels) {
let openTx = await this.fetchTransaction(openChannel.transaction_id, true);
const openTx = this.txCache[openChannel.transaction_id];
if (!openTx) {
continue;
}
@ -276,7 +322,7 @@ class ForensicsService {
// Check if a channel open tx input spends the result of a swept channel close output
private async $attributeSweptChannelCloses(openChannel: ILightningApi.Channel, input: IEsploraApi.Vin): Promise<void> {
let sweepTx = await this.fetchTransaction(input.txid, true);
const sweepTx = await this.fetchTransaction(input.txid, true);
if (!sweepTx) {
logger.err(`couldn't find input transaction for channel forensics ${openChannel.channel_id} ${input.txid}`);
return;
@ -335,7 +381,7 @@ class ForensicsService {
if (matched && !ambiguous) {
// fetch closing channel transaction and perform forensics on the outputs
let prevChannelTx = await this.fetchTransaction(input.txid, true);
const prevChannelTx = await this.fetchTransaction(input.txid, true);
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(input.txid);
@ -355,17 +401,17 @@ class ForensicsService {
};
});
}
// preload outspend transactions
await this.fetchTransactions(outspends.filter(o => o.spent && o.txid).map(o => o.txid), true);
for (let i = 0; i < outspends?.length; i++) {
const outspend = outspends[i];
const output = prevChannel.outputs[i];
if (outspend.spent && outspend.txid) {
try {
const spendingTx = await this.fetchTransaction(outspend.txid, true);
if (spendingTx) {
output.type = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
}
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + outspend.txid}. Reason ${e instanceof Error ? e.message : e}`);
const spendingTx = this.txCache[outspend.txid];
if (spendingTx) {
output.type = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
}
} else {
output.type = 0;
@ -430,13 +476,36 @@ class ForensicsService {
}
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + txid}. Reason ${e instanceof Error ? e.message : e}`);
return null;
}
}
return tx;
}
// fetches a batch of transactions and adds them to the txCache
// the returned list of txs does *not* preserve ordering or number
async fetchTransactions(txids, temp: boolean = false): Promise<(IEsploraApi.Transaction | null)[]> {
// deduplicate txids
const uniqueTxids = [...new Set<string>(txids)];
// filter out any transactions we already have in the cache
const needToFetch: string[] = uniqueTxids.filter(txid => !this.txCache[txid]);
try {
const txs = await bitcoinApi.$getRawTransactions(needToFetch);
for (const tx of txs) {
this.txCache[tx.txid] = tx;
if (temp) {
this.tempCached.push(tx.txid);
}
}
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/txs'}. Reason ${e instanceof Error ? e.message : e}`);
return [];
}
return txids.map(txid => this.txCache[txid]);
}
clearTempCache(): void {
for (const txid of this.tempCached) {
delete this.txCache[txid];

View File

@ -288,22 +288,32 @@ class NetworkSyncService {
}
logger.debug(`${log}`, logger.tags.ln);
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
for (const channel of channels) {
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
logger.debug(`Marking channel: ${channel.id} as closed.`, logger.tags.ln);
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
[spendingTx.status.block_time, channel.id]);
if (spendingTx.txid && !channel.closing_transaction_id) {
await DB.query(`UPDATE channels SET closing_transaction_id = ? WHERE id = ?`, [spendingTx.txid, channel.id]);
const allChannels = await channelsApi.$getChannelsByStatus([0, 1]);
const sliceLength = Math.ceil(config.ESPLORA.BATCH_QUERY_BASE_SIZE / 2);
// process batches of 5000 channels
for (let i = 0; i < Math.ceil(allChannels.length / sliceLength); i++) {
const channels = allChannels.slice(i * sliceLength, (i + 1) * sliceLength);
const outspends = await bitcoinApi.$getOutSpendsByOutpoint(channels.map(channel => {
return { txid: channel.transaction_id, vout: channel.transaction_vout };
}));
for (const [index, channel] of channels.entries()) {
const spendingTx = outspends[index];
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
// logger.debug(`Marking channel: ${channel.id} as closed.`, logger.tags.ln);
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
[spendingTx.status.block_time, channel.id]);
if (spendingTx.txid && !channel.closing_transaction_id) {
await DB.query(`UPDATE channels SET closing_transaction_id = ? WHERE id = ?`, [spendingTx.txid, channel.id]);
}
}
}
++progress;
progress += channels.length;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Checking if channel has been closed ${progress}/${channels.length}`, logger.tags.ln);
logger.debug(`Checking if channel has been closed ${progress}/${allChannels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}

View File

@ -25,7 +25,10 @@ export interface PriceHistory {
class PriceUpdater {
public historyInserted = false;
private lastRun = 0;
private timeBetweenUpdatesMs = 360_0000 / config.MEMPOOL.PRICE_UPDATES_PER_HOUR;
private cyclePosition = -1;
private firstRun = true;
private lastTime = -1;
private lastHistoricalRun = 0;
private running = false;
private feeds: PriceFeed[] = [];
@ -41,6 +44,8 @@ class PriceUpdater {
this.feeds.push(new CoinbaseApi());
this.feeds.push(new BitfinexApi());
this.feeds.push(new GeminiApi());
this.setCyclePosition();
}
public getLatestPrices(): ApiPrice {
@ -100,22 +105,48 @@ class PriceUpdater {
this.running = false;
}
private getMillisecondsSinceBeginningOfHour(): number {
const now = new Date();
const beginningOfHour = new Date(now);
beginningOfHour.setMinutes(0, 0, 0);
return now.getTime() - beginningOfHour.getTime();
}
private setCyclePosition(): void {
const millisecondsSinceBeginningOfHour = this.getMillisecondsSinceBeginningOfHour();
for (let i = 0; i < config.MEMPOOL.PRICE_UPDATES_PER_HOUR; i++) {
if (this.timeBetweenUpdatesMs * i > millisecondsSinceBeginningOfHour) {
this.cyclePosition = i;
return;
}
}
this.cyclePosition = config.MEMPOOL.PRICE_UPDATES_PER_HOUR;
}
/**
* Fetch last BTC price from exchanges, average them, and save it in the database once every hour
*/
private async $updatePrice(): Promise<void> {
if (this.lastRun === 0 && config.DATABASE.ENABLED === true) {
this.lastRun = await PricesRepository.$getLatestPriceTime();
let forceUpdate = false;
if (this.firstRun === true && config.DATABASE.ENABLED === true) {
const lastUpdate = await PricesRepository.$getLatestPriceTime();
if (new Date().getTime() / 1000 - lastUpdate > this.timeBetweenUpdatesMs / 1000) {
forceUpdate = true;
}
this.firstRun = false;
}
if ((Math.round(new Date().getTime() / 1000) - this.lastRun) < 3600) {
// Refresh only once every hour
const millisecondsSinceBeginningOfHour = this.getMillisecondsSinceBeginningOfHour();
// Reset the cycle on new hour
if (this.lastTime > millisecondsSinceBeginningOfHour) {
this.cyclePosition = 0;
}
this.lastTime = millisecondsSinceBeginningOfHour;
if (millisecondsSinceBeginningOfHour < this.timeBetweenUpdatesMs * this.cyclePosition && !forceUpdate && this.cyclePosition !== 0) {
return;
}
const previousRun = this.lastRun;
this.lastRun = new Date().getTime() / 1000;
for (const currency of this.currencies) {
let prices: number[] = [];
@ -146,26 +177,27 @@ class PriceUpdater {
}
}
logger.info(`Latest BTC fiat averaged price: ${JSON.stringify(this.latestPrices)}`);
if (config.DATABASE.ENABLED === true) {
if (config.DATABASE.ENABLED === true && this.cyclePosition === 0) {
// Save everything in db
try {
const p = 60 * 60 * 1000; // milliseconds in an hour
const nowRounded = new Date(Math.round(new Date().getTime() / p) * p); // https://stackoverflow.com/a/28037042
this.latestPrices.time = nowRounded.getTime() / 1000;
await PricesRepository.$savePrices(nowRounded.getTime() / 1000, this.latestPrices);
} catch (e) {
this.lastRun = previousRun + 5 * 60;
logger.err(`Cannot save latest prices into db. Trying again in 5 minutes. Reason: ${(e instanceof Error ? e.message : e)}`);
}
}
this.latestPrices.time = Math.round(new Date().getTime() / 1000);
logger.info(`Latest BTC fiat averaged price: ${JSON.stringify(this.latestPrices)}`);
if (this.ratesChangedCallback) {
this.ratesChangedCallback(this.latestPrices);
}
this.lastRun = new Date().getTime() / 1000;
if (!forceUpdate) {
this.cyclePosition++;
}
if (this.latestPrices.USD === -1) {
this.latestPrices = await PricesRepository.$getLatestConversionRates();

View File

@ -0,0 +1,179 @@
/*
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
How it works:
`this._head` is an instance of `Node` which keeps track of its current value and nests
another instance of `Node` that keeps the value that comes after it. When a value is
provided to `.enqueue()`, the code needs to iterate through `this._head`, going deeper
and deeper to find the last value. However, iterating through every single item is slow.
This problem is solved by saving a reference to the last value as `this._tail` so that
it can reference it to add a new value.
*/
class Node {
value;
next;
constructor(value) {
this.value = value;
}
}
class Queue {
private _head;
private _tail;
private _size;
constructor() {
this.clear();
}
enqueue(value) {
const node = new Node(value);
if (this._head) {
this._tail.next = node;
this._tail = node;
} else {
this._head = node;
this._tail = node;
}
this._size++;
}
dequeue() {
const current = this._head;
if (!current) {
return;
}
this._head = this._head.next;
this._size--;
return current.value;
}
clear() {
this._head = undefined;
this._tail = undefined;
this._size = 0;
}
get size() {
return this._size;
}
*[Symbol.iterator]() {
let current = this._head;
while (current) {
yield current.value;
current = current.next;
}
}
}
interface LimitFunction {
readonly activeCount: number;
readonly pendingCount: number;
clearQueue: () => void;
<Arguments extends unknown[], ReturnType>(
fn: (...args: Arguments) => PromiseLike<ReturnType> | ReturnType,
...args: Arguments
): Promise<ReturnType>;
}
export default function pLimit(concurrency: number): LimitFunction {
if (
!(
(Number.isInteger(concurrency) ||
concurrency === Number.POSITIVE_INFINITY) &&
concurrency > 0
)
) {
throw new TypeError('Expected `concurrency` to be a number from 1 and up');
}
const queue = new Queue();
let activeCount = 0;
const next = () => {
activeCount--;
if (queue.size > 0) {
queue.dequeue()();
}
};
const run = async (fn, resolve, args) => {
activeCount++;
const result = (async () => fn(...args))();
resolve(result);
try {
await result;
} catch {}
next();
};
const enqueue = (fn, resolve, args) => {
queue.enqueue(run.bind(undefined, fn, resolve, args));
(async () => {
// This function needs to wait until the next microtask before comparing
// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
// when the run function is dequeued and called. The comparison in the if-statement
// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
await Promise.resolve();
if (activeCount < concurrency && queue.size > 0) {
queue.dequeue()();
}
})();
};
const generator = (fn, ...args) =>
new Promise((resolve) => {
enqueue(fn, resolve, args);
});
Object.defineProperties(generator, {
activeCount: {
get: () => activeCount,
},
pendingCount: {
get: () => queue.size,
},
clearQueue: {
value: () => {
queue.clear();
},
},
});
return generator as any;
}

View File

@ -0,0 +1,74 @@
function powMod(x: bigint, power: number, modulo: bigint): bigint {
for (let i = 0; i < power; i++) {
x = (x * x) % modulo;
}
return x;
}
function sqrtMod(x: bigint, P: bigint): bigint {
const b2 = (x * x * x) % P;
const b3 = (b2 * b2 * x) % P;
const b6 = (powMod(b3, 3, P) * b3) % P;
const b9 = (powMod(b6, 3, P) * b3) % P;
const b11 = (powMod(b9, 2, P) * b2) % P;
const b22 = (powMod(b11, 11, P) * b11) % P;
const b44 = (powMod(b22, 22, P) * b22) % P;
const b88 = (powMod(b44, 44, P) * b44) % P;
const b176 = (powMod(b88, 88, P) * b88) % P;
const b220 = (powMod(b176, 44, P) * b44) % P;
const b223 = (powMod(b220, 3, P) * b3) % P;
const t1 = (powMod(b223, 23, P) * b22) % P;
const t2 = (powMod(t1, 6, P) * b2) % P;
const root = powMod(t2, 2, P);
return root;
}
const curveP = BigInt(`0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F`);
/**
* This function tells whether the point given is a DER encoded point on the ECDSA curve.
* @param {string} pointHex The point as a hex string (*must not* include a '0x' prefix)
* @returns {boolean} true if the point is on the SECP256K1 curve
*/
export function isPoint(pointHex: string): boolean {
if (
!(
// is uncompressed
(
(pointHex.length === 130 && pointHex.startsWith('04')) ||
// OR is compressed
(pointHex.length === 66 &&
(pointHex.startsWith('02') || pointHex.startsWith('03')))
)
)
) {
return false;
}
// Function modified slightly from noble-curves
// Now we know that pointHex is a 33 or 65 byte hex string.
const isCompressed = pointHex.length === 66;
const x = BigInt(`0x${pointHex.slice(2, 66)}`);
if (x >= curveP) {
return false;
}
if (!isCompressed) {
const y = BigInt(`0x${pointHex.slice(66, 130)}`);
if (y >= curveP) {
return false;
}
// Just check y^2 = x^3 + 7 (secp256k1 curve)
return (y * y) % curveP === (x * x * x + 7n) % curveP;
} else {
// Get unaltered y^2 (no mod p)
const ySquared = (x * x * x + 7n) % curveP;
// Try to sqrt it, it will round down if not perfect root
const y = sqrtMod(ySquared, curveP);
// If we square and it's equal, then it was a perfect root and valid point.
return (y * y) % curveP === ySquared;
}
}

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of November 17, 2023.
Signed: 0xBEEFCAF3

3
contributors/Czino.txt Normal file
View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 29, 2023.
Signed: Czino

3
contributors/TKone7.txt Normal file
View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of October 23, 2023.
Signed: TKone7

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file with sha256 hash c80c5ee4c71c5a76a1f6cd35339bd0c45b25b491933ea7b02a66470e9f43a6fd.
Signed: TheBlueMatt

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of August 2, 2023.
Signed: andrewtoth

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
Signed: bguillaumat

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 21, 2023.
Signed: devinbileck

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of October 23, 2023.
Signed: fanquake

5
contributors/fiatjaf.txt Normal file
View File

@ -0,0 +1,5 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
I also regret having ever contributed to this repository since they keep asking me to sign this legalese timewaste things.
And finally I don't care about licenses and won't sue anyone over intellectual property, which is a fake statist construct invented by evil lobby lawyers.
Signed: fiatjaf

3
contributors/fubz.txt Normal file
View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
Signed: fubz

3
contributors/ncois.txt Normal file
View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of November 16, 2023.
Signed: ncois

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of September 12, 2023.
Signed: orange surf

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 20, 2023.
Signed: pedromvpg

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 29, 2023.
Signed: rishkwal

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of November 15, 2023.
Signed: shubhamkmr04

3
contributors/starius.txt Normal file
View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of Oct 13, 2023.
Signed starius

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of December 13, 2023.
Signed: takuabonn

View File

@ -113,7 +113,8 @@ Below we list all settings from `mempool-config.json` and the corresponding over
"ADVANCED_GBT_MEMPOOL": false,
"CPFP_INDEXING": false,
"MAX_BLOCKS_BULK_QUERY": 0,
"DISK_CACHE_BLOCK_INTERVAL": 6
"DISK_CACHE_BLOCK_INTERVAL": 6,
"PRICE_UPDATES_PER_HOUR": 1
},
```
@ -123,7 +124,7 @@ Corresponding `docker-compose.yml` overrides:
environment:
MEMPOOL_NETWORK: ""
MEMPOOL_BACKEND: ""
MEMPOOL_HTTP_PORT: ""
BACKEND_HTTP_PORT: ""
MEMPOOL_SPAWN_CLUSTER_PROCS: ""
MEMPOOL_API_URL_PREFIX: ""
MEMPOOL_POLL_RATE_MS: ""
@ -146,6 +147,7 @@ Corresponding `docker-compose.yml` overrides:
MEMPOOL_CPFP_INDEXING: ""
MEMPOOL_MAX_BLOCKS_BULK_QUERY: ""
MEMPOOL_DISK_CACHE_BLOCK_INTERVAL: ""
MEMPOOL_PRICE_UPDATES_PER_HOUR: ""
...
```
@ -162,7 +164,9 @@ Corresponding `docker-compose.yml` overrides:
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool",
"TIMEOUT": 60000
"TIMEOUT": 60000,
"COOKIE": false,
"COOKIE_PATH": ""
},
```
@ -175,6 +179,8 @@ Corresponding `docker-compose.yml` overrides:
CORE_RPC_USERNAME: ""
CORE_RPC_PASSWORD: ""
CORE_RPC_TIMEOUT: 60000
CORE_RPC_COOKIE: false
CORE_RPC_COOKIE_PATH: ""
...
```
@ -229,7 +235,9 @@ Corresponding `docker-compose.yml` overrides:
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool",
"TIMEOUT": 60000
"TIMEOUT": 60000,
"COOKIE": false,
"COOKIE_PATH": ""
},
```
@ -242,6 +250,8 @@ Corresponding `docker-compose.yml` overrides:
SECOND_CORE_RPC_USERNAME: ""
SECOND_CORE_RPC_PASSWORD: ""
SECOND_CORE_RPC_TIMEOUT: ""
SECOND_CORE_RPC_COOKIE: false
SECOND_CORE_RPC_COOKIE_PATH: ""
...
```
@ -363,25 +373,6 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```json
"PRICE_DATA_SERVER": {
"TOR_URL": "http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices",
"CLEARNET_URL": "https://price.bisq.wiz.biz/getAllMarketPrices"
}
```
Corresponding `docker-compose.yml` overrides:
```yaml
api:
environment:
PRICE_DATA_SERVER_TOR_URL: ""
PRICE_DATA_SERVER_CLEARNET_URL: ""
...
```
<br/>
`mempool-config.json`:
```json
"LIGHTNING": {

View File

@ -1,4 +1,4 @@
FROM node:16.16.0-buster-slim AS builder
FROM node:20.8.0-buster-slim AS builder
ARG commitHash
ENV MEMPOOL_COMMIT_HASH=${commitHash}
@ -7,16 +7,17 @@ WORKDIR /build
COPY . .
RUN apt-get update
RUN apt-get install -y build-essential python3 pkg-config curl
RUN apt-get install -y build-essential python3 pkg-config curl ca-certificates
# Install Rust via rustup
RUN CPU_ARCH=$(uname -m); if [ "$CPU_ARCH" = "armv7l" ]; then c_rehash; fi
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
ENV PATH="/root/.cargo/bin:$PATH"
RUN npm install --omit=dev --omit=optional
RUN npm run package
FROM node:16.16.0-buster-slim
FROM node:20.8.0-buster-slim
WORKDIR /backend

View File

@ -8,6 +8,7 @@
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
"POLL_RATE_MS": __MEMPOOL_POLL_RATE_MS__,
"CACHE_DIR": "__MEMPOOL_CACHE_DIR__",
"CACHE_ENABLED": __MEMPOOL_CACHE_ENABLED__,
"CLEAR_PROTECTION_MINUTES": __MEMPOOL_CLEAR_PROTECTION_MINUTES__,
"RECOMMENDED_FEE_PERCENTILE": __MEMPOOL_RECOMMENDED_FEE_PERCENTILE__,
"BLOCK_WEIGHT_UNITS": __MEMPOOL_BLOCK_WEIGHT_UNITS__,
@ -32,14 +33,17 @@
"MAX_PUSH_TX_SIZE_WEIGHT": __MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__,
"ALLOW_UNREACHABLE": __MEMPOOL_ALLOW_UNREACHABLE__,
"POOLS_JSON_TREE_URL": "__MEMPOOL_POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__MEMPOOL_POOLS_JSON_URL__"
"POOLS_JSON_URL": "__MEMPOOL_POOLS_JSON_URL__",
"PRICE_UPDATES_PER_HOUR": __MEMPOOL_PRICE_UPDATES_PER_HOUR__
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
"PORT": __CORE_RPC_PORT__,
"USERNAME": "__CORE_RPC_USERNAME__",
"PASSWORD": "__CORE_RPC_PASSWORD__",
"TIMEOUT": __CORE_RPC_TIMEOUT__
"TIMEOUT": __CORE_RPC_TIMEOUT__,
"COOKIE": __CORE_RPC_COOKIE__,
"COOKIE_PATH": "__CORE_RPC_COOKIE_PATH__"
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",
@ -49,14 +53,20 @@
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__",
"UNIX_SOCKET_PATH": "__ESPLORA_UNIX_SOCKET_PATH__",
"RETRY_UNIX_SOCKET_AFTER": __ESPLORA_RETRY_UNIX_SOCKET_AFTER__
"BATCH_QUERY_BASE_SIZE": __ESPLORA_BATCH_QUERY_BASE_SIZE__,
"RETRY_UNIX_SOCKET_AFTER": __ESPLORA_RETRY_UNIX_SOCKET_AFTER__,
"REQUEST_TIMEOUT": __ESPLORA_REQUEST_TIMEOUT__,
"FALLBACK_TIMEOUT": __ESPLORA_FALLBACK_TIMEOUT__,
"FALLBACK": __ESPLORA_FALLBACK__
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
"PORT": __SECOND_CORE_RPC_PORT__,
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__",
"TIMEOUT": __SECOND_CORE_RPC_TIMEOUT__
"TIMEOUT": __SECOND_CORE_RPC_TIMEOUT__,
"COOKIE": __SECOND_CORE_RPC_COOKIE__,
"COOKIE_PATH": "__SECOND_CORE_RPC_COOKIE_PATH__"
},
"DATABASE": {
"ENABLED": __DATABASE_ENABLED__,
@ -66,7 +76,8 @@
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__",
"TIMEOUT": __DATABASE_TIMEOUT__
"TIMEOUT": __DATABASE_TIMEOUT__,
"PID_DIR": "__DATABASE_PID_DIR__"
},
"SYSLOG": {
"ENABLED": __SYSLOG_ENABLED__,
@ -110,10 +121,6 @@
"USERNAME": "__SOCKS5PROXY_USERNAME__",
"PASSWORD": "__SOCKS5PROXY_PASSWORD__"
},
"PRICE_DATA_SERVER": {
"TOR_URL": "__PRICE_DATA_SERVER_TOR_URL__",
"CLEARNET_URL": "__PRICE_DATA_SERVER_CLEARNET_URL__"
},
"EXTERNAL_DATA_SERVER": {
"MEMPOOL_API": "__EXTERNAL_DATA_SERVER_MEMPOOL_API__",
"MEMPOOL_ONION": "__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__",
@ -133,5 +140,14 @@
"AUDIT": __REPLICATION_AUDIT__,
"AUDIT_START_HEIGHT": __REPLICATION_AUDIT_START_HEIGHT__,
"SERVERS": __REPLICATION_SERVERS__
},
"MEMPOOL_SERVICES": {
"API": "__MEMPOOL_SERVICES_API__",
"ACCELERATIONS": __MEMPOOL_SERVICES_ACCELERATIONS__
},
"REDIS": {
"ENABLED": __REDIS_ENABLED__,
"UNIX_SOCKET_PATH": "__REDIS_UNIX_SOCKET_PATH__",
"BATCH_QUERY_BASE_SIZE": __REDIS_BATCH_QUERY_BASE_SIZE__
}
}

View File

@ -9,6 +9,7 @@ __MEMPOOL_SPAWN_CLUSTER_PROCS__=${MEMPOOL_SPAWN_CLUSTER_PROCS:=0}
__MEMPOOL_API_URL_PREFIX__=${MEMPOOL_API_URL_PREFIX:=/api/v1/}
__MEMPOOL_POLL_RATE_MS__=${MEMPOOL_POLL_RATE_MS:=2000}
__MEMPOOL_CACHE_DIR__=${MEMPOOL_CACHE_DIR:=./cache}
__MEMPOOL_CACHE_ENABLED__=${MEMPOOL_CACHE_ENABLED:=true}
__MEMPOOL_CLEAR_PROTECTION_MINUTES__=${MEMPOOL_CLEAR_PROTECTION_MINUTES:=20}
__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__=${MEMPOOL_RECOMMENDED_FEE_PERCENTILE:=50}
__MEMPOOL_BLOCK_WEIGHT_UNITS__=${MEMPOOL_BLOCK_WEIGHT_UNITS:=4000000}
@ -34,7 +35,7 @@ __MEMPOOL_MAX_BLOCKS_BULK_QUERY__=${MEMPOOL_MAX_BLOCKS_BULK_QUERY:=0}
__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__=${MEMPOOL_DISK_CACHE_BLOCK_INTERVAL:=6}
__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__=${MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT:=4000000}
__MEMPOOL_ALLOW_UNREACHABLE__=${MEMPOOL_ALLOW_UNREACHABLE:=true}
__MEMPOOL_PRICE_UPDATES_PER_HOUR__=${MEMPOOL_PRICE_UPDATES_PER_HOUR:=1}
# CORE_RPC
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
@ -42,6 +43,8 @@ __CORE_RPC_PORT__=${CORE_RPC_PORT:=8332}
__CORE_RPC_USERNAME__=${CORE_RPC_USERNAME:=mempool}
__CORE_RPC_PASSWORD__=${CORE_RPC_PASSWORD:=mempool}
__CORE_RPC_TIMEOUT__=${CORE_RPC_TIMEOUT:=60000}
__CORE_RPC_COOKIE__=${CORE_RPC_COOKIE:=false}
__CORE_RPC_COOKIE_PATH__=${CORE_RPC_COOKIE_PATH:=""}
# ELECTRUM
__ELECTRUM_HOST__=${ELECTRUM_HOST:=127.0.0.1}
@ -51,7 +54,11 @@ __ELECTRUM_TLS_ENABLED__=${ELECTRUM_TLS_ENABLED:=false}
# ESPLORA
__ESPLORA_REST_API_URL__=${ESPLORA_REST_API_URL:=http://127.0.0.1:3000}
__ESPLORA_UNIX_SOCKET_PATH__=${ESPLORA_UNIX_SOCKET_PATH:="null"}
__ESPLORA_BATCH_QUERY_BASE_SIZE__=${ESPLORA_BATCH_QUERY_BASE_SIZE:=1000}
__ESPLORA_RETRY_UNIX_SOCKET_AFTER__=${ESPLORA_RETRY_UNIX_SOCKET_AFTER:=30000}
__ESPLORA_REQUEST_TIMEOUT__=${ESPLORA_REQUEST_TIMEOUT:=5000}
__ESPLORA_FALLBACK_TIMEOUT__=${ESPLORA_FALLBACK_TIMEOUT:=5000}
__ESPLORA_FALLBACK__=${ESPLORA_FALLBACK:=[]}
# SECOND_CORE_RPC
__SECOND_CORE_RPC_HOST__=${SECOND_CORE_RPC_HOST:=127.0.0.1}
@ -59,6 +66,8 @@ __SECOND_CORE_RPC_PORT__=${SECOND_CORE_RPC_PORT:=8332}
__SECOND_CORE_RPC_USERNAME__=${SECOND_CORE_RPC_USERNAME:=mempool}
__SECOND_CORE_RPC_PASSWORD__=${SECOND_CORE_RPC_PASSWORD:=mempool}
__SECOND_CORE_RPC_TIMEOUT__=${SECOND_CORE_RPC_TIMEOUT:=60000}
__SECOND_CORE_RPC_COOKIE__=${SECOND_CORE_RPC_COOKIE:=false}
__SECOND_CORE_RPC_COOKIE_PATH__=${SECOND_CORE_RPC_COOKIE_PATH:=""}
# DATABASE
__DATABASE_ENABLED__=${DATABASE_ENABLED:=true}
@ -69,6 +78,7 @@ __DATABASE_DATABASE__=${DATABASE_DATABASE:=mempool}
__DATABASE_USERNAME__=${DATABASE_USERNAME:=mempool}
__DATABASE_PASSWORD__=${DATABASE_PASSWORD:=mempool}
__DATABASE_TIMEOUT__=${DATABASE_TIMEOUT:=180000}
__DATABASE_PID_DIR__=${DATABASE_PID_DIR:=""}
# SYSLOG
__SYSLOG_ENABLED__=${SYSLOG_ENABLED:=false}
@ -93,10 +103,6 @@ __SOCKS5PROXY_PORT__=${SOCKS5PROXY_PORT:=9050}
__SOCKS5PROXY_USERNAME__=${SOCKS5PROXY_USERNAME:=""}
__SOCKS5PROXY_PASSWORD__=${SOCKS5PROXY_PASSWORD:=""}
# PRICE_DATA_SERVER
__PRICE_DATA_SERVER_TOR_URL__=${PRICE_DATA_SERVER_TOR_URL:=http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices}
__PRICE_DATA_SERVER_CLEARNET_URL__=${PRICE_DATA_SERVER_CLEARNET_URL:=https://price.bisq.wiz.biz/getAllMarketPrices}
# EXTERNAL_DATA_SERVER
__EXTERNAL_DATA_SERVER_MEMPOOL_API__=${EXTERNAL_DATA_SERVER_MEMPOOL_API:=https://mempool.space/api/v1}
__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__=${EXTERNAL_DATA_SERVER_MEMPOOL_ONION:=http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1}
@ -136,6 +142,14 @@ __REPLICATION_AUDIT__=${REPLICATION_AUDIT:=true}
__REPLICATION_AUDIT_START_HEIGHT__=${REPLICATION_AUDIT_START_HEIGHT:=774000}
__REPLICATION_SERVERS__=${REPLICATION_SERVERS:=[]}
# MEMPOOL_SERVICES
__MEMPOOL_SERVICES_API__=${MEMPOOL_SERVICES_API:=""}
__MEMPOOL_SERVICES_ACCELERATIONS__=${MEMPOOL_SERVICES_ACCELERATIONS:=false}
# REDIS
__REDIS_ENABLED__=${REDIS_ENABLED:=false}
__REDIS_UNIX_SOCKET_PATH__=${REDIS_UNIX_SOCKET_PATH:=true}
__REDIS_BATCH_QUERY_BASE_SIZE__=${REDIS_BATCH_QUERY_BASE_SIZE:=5000}
mkdir -p "${__MEMPOOL_CACHE_DIR__}"
@ -147,6 +161,7 @@ sed -i "s!__MEMPOOL_SPAWN_CLUSTER_PROCS__!${__MEMPOOL_SPAWN_CLUSTER_PROCS__}!g"
sed -i "s!__MEMPOOL_API_URL_PREFIX__!${__MEMPOOL_API_URL_PREFIX__}!g" mempool-config.json
sed -i "s!__MEMPOOL_POLL_RATE_MS__!${__MEMPOOL_POLL_RATE_MS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CACHE_DIR__!${__MEMPOOL_CACHE_DIR__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CACHE_ENABLED__!${__MEMPOOL_CACHE_ENABLED__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CLEAR_PROTECTION_MINUTES__!${__MEMPOOL_CLEAR_PROTECTION_MINUTES__}!g" mempool-config.json
sed -i "s!__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__!${__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__}!g" mempool-config.json
sed -i "s!__MEMPOOL_BLOCK_WEIGHT_UNITS__!${__MEMPOOL_BLOCK_WEIGHT_UNITS__}!g" mempool-config.json
@ -165,19 +180,22 @@ sed -i "s!__MEMPOOL_POOLS_JSON_URL__!${__MEMPOOL_POOLS_JSON_URL__}!g" mempool-co
sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_AUDIT__!${__MEMPOOL_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_MEMPOOL__!${__MEMPOOL_ADVANCED_GBT_MEMPOOL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_RUST_GBT__!${__MEMPOOL_GBT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_RUST_GBT__!${__MEMPOOL_RUST_GBT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_AUDIT__!${__MEMPOOL_ADVANCED_GBT_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CPFP_INDEXING__!${__MEMPOOL_CPFP_INDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_BLOCKS_BULK_QUERY__!${__MEMPOOL_MAX_BLOCKS_BULK_QUERY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__!${__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__!${__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ALLOW_UNREACHABLE__!${__MEMPOOL_ALLOW_UNREACHABLE__}!g" mempool-config.json
sed -i "s!__MEMPOOL_PRICE_UPDATES_PER_HOUR__!${__MEMPOOL_PRICE_UPDATES_PER_HOUR__}!g" mempool-config.json
sed -i "s!__CORE_RPC_HOST__!${__CORE_RPC_HOST__}!g" mempool-config.json
sed -i "s!__CORE_RPC_PORT__!${__CORE_RPC_PORT__}!g" mempool-config.json
sed -i "s!__CORE_RPC_USERNAME__!${__CORE_RPC_USERNAME__}!g" mempool-config.json
sed -i "s!__CORE_RPC_PASSWORD__!${__CORE_RPC_PASSWORD__}!g" mempool-config.json
sed -i "s!__CORE_RPC_TIMEOUT__!${__CORE_RPC_TIMEOUT__}!g" mempool-config.json
sed -i "s!__CORE_RPC_COOKIE__!${__CORE_RPC_COOKIE__}!g" mempool-config.json
sed -i "s!__CORE_RPC_COOKIE_PATH__!${__CORE_RPC_COOKIE_PATH__}!g" mempool-config.json
sed -i "s!__ELECTRUM_HOST__!${__ELECTRUM_HOST__}!g" mempool-config.json
sed -i "s!__ELECTRUM_PORT__!${__ELECTRUM_PORT__}!g" mempool-config.json
@ -185,13 +203,19 @@ sed -i "s!__ELECTRUM_TLS_ENABLED__!${__ELECTRUM_TLS_ENABLED__}!g" mempool-config
sed -i "s!__ESPLORA_REST_API_URL__!${__ESPLORA_REST_API_URL__}!g" mempool-config.json
sed -i "s!__ESPLORA_UNIX_SOCKET_PATH__!${__ESPLORA_UNIX_SOCKET_PATH__}!g" mempool-config.json
sed -i "s!__ESPLORA_BATCH_QUERY_BASE_SIZE__!${__ESPLORA_BATCH_QUERY_BASE_SIZE__}!g" mempool-config.json
sed -i "s!__ESPLORA_RETRY_UNIX_SOCKET_AFTER__!${__ESPLORA_RETRY_UNIX_SOCKET_AFTER__}!g" mempool-config.json
sed -i "s!__ESPLORA_REQUEST_TIMEOUT__!${__ESPLORA_REQUEST_TIMEOUT__}!g" mempool-config.json
sed -i "s!__ESPLORA_FALLBACK_TIMEOUT__!${__ESPLORA_FALLBACK_TIMEOUT__}!g" mempool-config.json
sed -i "s!__ESPLORA_FALLBACK__!${__ESPLORA_FALLBACK__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_HOST__!${__SECOND_CORE_RPC_HOST__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_PORT__!${__SECOND_CORE_RPC_PORT__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_USERNAME__!${__SECOND_CORE_RPC_USERNAME__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_PASSWORD__!${__SECOND_CORE_RPC_PASSWORD__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_TIMEOUT__!${__SECOND_CORE_RPC_TIMEOUT__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_COOKIE__!${__SECOND_CORE_RPC_COOKIE__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_COOKIE_PATH__!${__SECOND_CORE_RPC_COOKIE_PATH__}!g" mempool-config.json
sed -i "s!__DATABASE_ENABLED__!${__DATABASE_ENABLED__}!g" mempool-config.json
sed -i "s!__DATABASE_HOST__!${__DATABASE_HOST__}!g" mempool-config.json
@ -201,6 +225,7 @@ sed -i "s!__DATABASE_DATABASE__!${__DATABASE_DATABASE__}!g" mempool-config.json
sed -i "s!__DATABASE_USERNAME__!${__DATABASE_USERNAME__}!g" mempool-config.json
sed -i "s!__DATABASE_PASSWORD__!${__DATABASE_PASSWORD__}!g" mempool-config.json
sed -i "s!__DATABASE_TIMEOUT__!${__DATABASE_TIMEOUT__}!g" mempool-config.json
sed -i "s!__DATABASE_PID_DIR__!${__DATABASE_PID_DIR__}!g" mempool-config.json
sed -i "s!__SYSLOG_ENABLED__!${__SYSLOG_ENABLED__}!g" mempool-config.json
sed -i "s!__SYSLOG_HOST__!${__SYSLOG_HOST__}!g" mempool-config.json
@ -221,9 +246,6 @@ sed -i "s!__SOCKS5PROXY_PORT__!${__SOCKS5PROXY_PORT__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_USERNAME__!${__SOCKS5PROXY_USERNAME__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_PASSWORD__!${__SOCKS5PROXY_PASSWORD__}!g" mempool-config.json
sed -i "s!__PRICE_DATA_SERVER_TOR_URL__!${__PRICE_DATA_SERVER_TOR_URL__}!g" mempool-config.json
sed -i "s!__PRICE_DATA_SERVER_CLEARNET_URL__!${__PRICE_DATA_SERVER_CLEARNET_URL__}!g" mempool-config.json
sed -i "s!__EXTERNAL_DATA_SERVER_MEMPOOL_API__!${__EXTERNAL_DATA_SERVER_MEMPOOL_API__}!g" mempool-config.json
sed -i "s!__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__!${__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__}!g" mempool-config.json
sed -i "s!__EXTERNAL_DATA_SERVER_LIQUID_API__!${__EXTERNAL_DATA_SERVER_LIQUID_API__}!g" mempool-config.json
@ -262,4 +284,13 @@ sed -i "s!__REPLICATION_AUDIT__!${__REPLICATION_AUDIT__}!g" mempool-config.json
sed -i "s!__REPLICATION_AUDIT_START_HEIGHT__!${__REPLICATION_AUDIT_START_HEIGHT__}!g" mempool-config.json
sed -i "s!__REPLICATION_SERVERS__!${__REPLICATION_SERVERS__}!g" mempool-config.json
# MEMPOOL_SERVICES
sed -i "s!__MEMPOOL_SERVICES_API__!${__MEMPOOL_SERVICES_API__}!g" mempool-config.json
sed -i "s!__MEMPOOL_SERVICES_ACCELERATIONS__!${__MEMPOOL_SERVICES_ACCELERATIONS__}!g" mempool-config.json
# REDIS
sed -i "s!__REDIS_ENABLED__!${__REDIS_ENABLED__}!g" mempool-config.json
sed -i "s!__REDIS_UNIX_SOCKET_PATH__!${__REDIS_UNIX_SOCKET_PATH__}!g" mempool-config.json
sed -i "s!__REDIS_BATCH_QUERY_BASE_SIZE__!${__REDIS_BATCH_QUERY_BASE_SIZE__}!g" mempool-config.json
node /backend/package/index.js

Some files were not shown because too many files have changed in this diff Show More