Compare commits

..

1 Commits

Author SHA1 Message Date
wiz
04eb3f3c22 [legal] Update trademark policy and guidelines, fix license text 2022-08-12 13:46:55 +09:00
482 changed files with 29572 additions and 110618 deletions

12
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# These are supported funding model platforms
github: ['mempool'] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: ['https://mempool.space/sponsor'] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

View File

@@ -1,28 +1,20 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- update-types: ["version-update:semver-major"]
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- update-types: ["version-update:semver-major"]
- package-ecosystem: docker
directory: "/docker/backend"
schedule:
interval: daily
ignore:
- update-types: ["version-update:semver-major"]
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
ignore:
- update-types: ["version-update:semver-major"]
- package-ecosystem: npm
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: docker
directory: "/docker/backend"
schedule:
interval: weekly
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View File

@@ -42,10 +42,8 @@ jobs:
run: npm run lint
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
- name: Unit Tests
if: ${{ matrix.flavor == 'dev'}}
run: npm run test
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
# - name: Test
# run: npm run test
- name: Build
run: npm run build

View File

@@ -1,7 +1,7 @@
name: Docker build on tag
env:
DOCKER_CLI_EXPERIMENTAL: enabled
TAG_FMT: "^refs/tags/(((.?[0-9]+){3,4}))$"
TAG_FMT: '^refs/tags/(((.?[0-9]+){3,4}))$'
DOCKER_BUILDKIT: 0
COMPOSE_DOCKER_CLI_BUILD: 0
@@ -21,46 +21,16 @@ jobs:
service:
- frontend
- backend
runs-on: ubuntu-latest
timeout-minutes: 120
runs-on: ubuntu-18.04
name: Build and push to DockerHub
steps:
# Workaround based on JonasAlfredsson/docker-on-tmpfs@v1.0.1
- name: Replace the current swap file
shell: bash
run: |
sudo swapoff /mnt/swapfile
sudo rm -v /mnt/swapfile
sudo fallocate -l 10G /mnt/swapfile
sudo chmod 600 /mnt/swapfile
sudo mkswap /mnt/swapfile
sudo swapon /mnt/swapfile
- name: Show current memory and swap status
shell: bash
run: |
sudo free -h
echo
sudo swapon --show
- name: Mount a tmpfs over /var/lib/docker
shell: bash
run: |
if [ ! -d "/var/lib/docker" ]; then
echo "Directory '/var/lib/docker' not found"
exit 1
fi
sudo mount -t tmpfs -o size=10G tmpfs /var/lib/docker
sudo systemctl restart docker
sudo df -h | grep docker
- name: Set env variables
run: echo "TAG=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV
- name: Show set environment variables
run: |
printf " TAG: %s\n" "$TAG"
- name: Add SHORT_SHA env property with commit short sha
run: echo "SHORT_SHA=`echo ${GITHUB_SHA} | cut -c1-8`" >> $GITHUB_ENV
@@ -68,24 +38,24 @@ jobs:
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
- name: Checkout project
uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0
uses: actions/checkout@629c2de402a417ea7690ca6ce3f33229e27606a5 # v2
- name: Init repo for Dockerization
run: docker/init.sh "$TAG"
- name: Set up QEMU
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # v2.1.0
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # v1
id: qemu
- name: Setup Docker buildx action
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # v2.2.1
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # v1
id: buildx
- name: Available platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Cache Docker layers
uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # v3.0.11
uses: actions/cache@661fd3eb7f2f20d8c7c84bc2b0509efd7a826628 # v2
id: cache
with:
path: /tmp/.buildx-cache

2
.gitignore vendored
View File

@@ -3,5 +3,3 @@ data
docker-compose.yml
backend/mempool-config.json
*.swp
frontend/src/resources/config.template.js
frontend/src/resources/config.js

23
LICENSE
View File

@@ -1,5 +1,5 @@
The Mempool Open Source Project
Copyright (c) 2019-2022 The Mempool Open Source Project Developers
The Mempool Open Source Project
Copyright (c) 2019-2022 Mempool Space K.K. and other shadowy super-coders
This program is free software; you can redistribute it and/or modify it under
the terms of (at your option) either:
@@ -12,13 +12,18 @@ the terms of (at your option) either:
Foundation, either version 3 of the License or any later version approved by a
proxy statement published on <https://mempool.space/about>.
However, this copyright license does not include an implied right or license to
use our trademarks: The Mempool Open Source Project™, mempool.space™, the
mempool Logo™, the mempool.space Vertical Logo™, the mempool.space Horizontal
Logo™, the mempool Square Logo™, and the mempool Blocks logo™ are registered
trademarks or trademarks of Mempool Space K.K in Japan, the United States,
and/or other countries. See our full Trademark Policy and Guidelines for more
details, published on <https://mempool.space/trademark-policy>.
However, the above copyright licenses do not include an implied right or license
to use any trademarks, service marks, logos, or trade names of Mempool Space K.K.
or any other contributor to The Mempool Open Source Project.
The Mempool Open Source Project™, Mempool Accelerator™, mempool.space™,
the mempool Logo, the mempool Square logo, the mempool Blocks logo,
the mempool.space Vertical Logo, and the mempool.space Horizontal logo
are registered trademarks or trademarks of Mempool Space K.K in Japan,
the United States, and/or other countries.
See our full Trademark Policy and Guidelines for more details, published on
<https://mempool.space/trademark-policy>.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A

View File

@@ -31,7 +31,6 @@
"prefer-const": 1,
"prefer-rest-params": 1,
"quotes": [1, "single", { "allowTemplateLiterals": true }],
"semi": 1,
"eqeqeq": 1
"semi": 1
}
}

3
backend/.gitignore vendored
View File

@@ -1,8 +1,7 @@
# See http://help.github.com/ignore-files/ for more about ignoring files.
# production config and external assets
!mempool-config.template.json
!mempool-config.sample.json
mempool-config.json
pools.json
icons.json

View File

@@ -110,11 +110,6 @@ Run the Mempool backend:
```
npm run start
```
You can also set env var `MEMPOOL_CONFIG_FILE` to specify a custom config file location:
```
MEMPOOL_CONFIG_FILE=/path/to/mempool-config.json npm run start
```
When it's running, you should see output like this:

View File

@@ -1,20 +0,0 @@
import type { Config } from "@jest/types"
const config: Config.InitialOptions = {
preset: "ts-jest",
testEnvironment: "node",
verbose: true,
automock: false,
collectCoverage: true,
collectCoverageFrom: ["./src/**/**.ts"],
coverageProvider: "babel",
coverageThreshold: {
global: {
lines: 1
}
},
setupFiles: [
"./testSetup.ts",
],
}
export default config;

View File

@@ -2,7 +2,6 @@
"MEMPOOL": {
"NETWORK": "mainnet",
"BACKEND": "electrum",
"ENABLED": true,
"HTTP_PORT": 8999,
"SPAWN_CLUSTER_PROCS": 0,
"API_URL_PREFIX": "/api/v1/",
@@ -24,10 +23,7 @@
"STDOUT_LOG_MIN_PRIORITY": "debug",
"AUTOMATIC_BLOCK_REINDEXING": false,
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master",
"ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false,
"TRANSACTION_INDEXING": false
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master"
},
"CORE_RPC": {
"HOST": "127.0.0.1",
@@ -81,21 +77,13 @@
},
"LIGHTNING": {
"ENABLED": false,
"BACKEND": "lnd",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": 20
"BACKEND": "lnd"
},
"LND": {
"TLS_CERT_PATH": "tls.cert",
"MACAROON_PATH": "readonly.macaroon",
"REST_API_URL": "https://localhost:8080"
},
"CLIGHTNING": {
"SOCKET": "lightning-rpc"
},
"SOCKS5PROXY": {
"ENABLED": false,
"USE_ONION": true,

6915
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -21,48 +21,39 @@
],
"main": "index.ts",
"scripts": {
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
"build": "npm run tsc && npm run create-resources",
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)",
"tsc": "./node_modules/typescript/bin/tsc",
"build": "npm run tsc",
"start": "node --max-old-space-size=2048 dist/index.js",
"start-production": "node --max-old-space-size=4096 dist/index.js",
"test": "./node_modules/.bin/jest --coverage",
"test": "echo \"Error: no test specified\" && exit 1",
"lint": "./node_modules/.bin/eslint . --ext .ts",
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
},
"dependencies": {
"@babel/core": "^7.20.5",
"@mempool/electrum-client": "^1.1.7",
"@types/node": "^16.11.41",
"axios": "~0.27.2",
"bitcoinjs-lib": "~6.0.2",
"crypto-js": "~4.1.1",
"express": "~4.18.2",
"maxmind": "~4.3.8",
"mysql2": "~2.3.3",
"node-worker-threads-pool": "~1.5.1",
"bitcoinjs-lib": "6.0.1",
"crypto-js": "^4.0.0",
"express": "^4.18.0",
"fast-xml-parser": "^4.0.9",
"maxmind": "^4.3.6",
"mysql2": "2.3.3",
"node-worker-threads-pool": "^1.5.1",
"socks-proxy-agent": "~7.0.0",
"typescript": "~4.7.4",
"ws": "~8.11.0"
"ws": "~8.8.0"
},
"devDependencies": {
"@babel/core": "^7.20.5",
"@babel/code-frame": "^7.18.6",
"@types/compression": "^1.7.2",
"@types/crypto-js": "^4.1.1",
"@types/express": "^4.17.14",
"@types/jest": "^29.2.3",
"@types/express": "^4.17.13",
"@types/ws": "~8.5.3",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"eslint": "^8.28.0",
"@typescript-eslint/eslint-plugin": "^5.30.5",
"@typescript-eslint/parser": "^5.30.5",
"eslint": "^8.19.0",
"eslint-config-prettier": "^8.5.0",
"jest": "^29.3.1",
"prettier": "^2.8.0",
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1"
"prettier": "^2.7.1"
}
}

View File

@@ -1,115 +0,0 @@
{
"MEMPOOL": {
"ENABLED": true,
"NETWORK": "__MEMPOOL_NETWORK__",
"BACKEND": "__MEMPOOL_BACKEND__",
"ENABLED": true,
"BLOCKS_SUMMARIES_INDEXING": true,
"HTTP_PORT": 1,
"SPAWN_CLUSTER_PROCS": 2,
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
"AUTOMATIC_BLOCK_REINDEXING": true,
"POLL_RATE_MS": 3,
"CACHE_DIR": "__MEMPOOL_CACHE_DIR__",
"CLEAR_PROTECTION_MINUTES": 4,
"RECOMMENDED_FEE_PERCENTILE": 5,
"BLOCK_WEIGHT_UNITS": 6,
"INITIAL_BLOCKS_AMOUNT": 7,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"PRICE_FEED_UPDATE_INTERVAL": 9,
"USE_SECOND_NODE_FOR_MINFEE": 10,
"EXTERNAL_ASSETS": 11,
"EXTERNAL_MAX_RETRY": 12,
"EXTERNAL_RETRY_INTERVAL": 13,
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
"INDEXING_BLOCKS_AMOUNT": 14,
"POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
"ADVANCED_GBT_AUDIT": "__ADVANCED_GBT_AUDIT__",
"ADVANCED_GBT_MEMPOOL": "__ADVANCED_GBT_MEMPOOL__",
"TRANSACTION_INDEXING": "__TRANSACTION_INDEXING__"
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
"PORT": 15,
"USERNAME": "__CORE_RPC_USERNAME__",
"PASSWORD": "__CORE_RPC_PASSWORD__"
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",
"PORT": 16,
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__"
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
"PORT": 17,
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__"
},
"DATABASE": {
"ENABLED": false,
"HOST": "__DATABASE_HOST__",
"SOCKET": "__DATABASE_SOCKET__",
"PORT": 18,
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__"
},
"SYSLOG": {
"ENABLED": false,
"HOST": "__SYSLOG_HOST__",
"PORT": 19,
"MIN_PRIORITY": "__SYSLOG_MIN_PRIORITY__",
"FACILITY": "__SYSLOG_FACILITY__"
},
"STATISTICS": {
"ENABLED": false,
"TX_PER_SECOND_SAMPLE_PERIOD": 20
},
"BISQ": {
"ENABLED": true,
"DATA_PATH": "__BISQ_DATA_PATH__"
},
"SOCKS5PROXY": {
"ENABLED": true,
"USE_ONION": true,
"HOST": "__SOCKS5PROXY_HOST__",
"PORT": "__SOCKS5PROXY_PORT__",
"USERNAME": "__SOCKS5PROXY_USERNAME__",
"PASSWORD": "__SOCKS5PROXY_PASSWORD__"
},
"PRICE_DATA_SERVER": {
"TOR_URL": "__PRICE_DATA_SERVER_TOR_URL__",
"CLEARNET_URL": "__PRICE_DATA_SERVER_CLEARNET_URL__"
},
"EXTERNAL_DATA_SERVER": {
"MEMPOOL_API": "__EXTERNAL_DATA_SERVER_MEMPOOL_API__",
"MEMPOOL_ONION": "__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__",
"LIQUID_API": "__EXTERNAL_DATA_SERVER_LIQUID_API__",
"LIQUID_ONION": "__EXTERNAL_DATA_SERVER_LIQUID_ONION__",
"BISQ_URL": "__EXTERNAL_DATA_SERVER_BISQ_URL__",
"BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__"
},
"LIGHTNING": {
"ENABLED": "__LIGHTNING_ENABLED__",
"BACKEND": "__LIGHTNING_BACKEND__",
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": "__FORENSICS_RATE_LIMIT__"
},
"LND": {
"TLS_CERT_PATH": "",
"MACAROON_PATH": "",
"REST_API_URL": "https://localhost:8080"
},
"CLIGHTNING": {
"SOCKET": "__CLIGHTNING_SOCKET__"
}
}

View File

@@ -1,62 +0,0 @@
import { calcDifficultyAdjustment, DifficultyAdjustment } from '../../api/difficulty-adjustment';
describe('Mempool Difficulty Adjustment', () => {
test('should calculate Difficulty Adjustments properly', () => {
const dt = (dtString) => {
return Math.floor(new Date(dtString).getTime() / 1000);
};
const vectors = [
[ // Vector 1
[ // Inputs
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
750134, // Current block height
0.6280047707459726, // Previous retarget % (Passed through)
'mainnet', // Network (if testnet, next value is non-zero)
0, // If not testnet, not used
],
{ // Expected Result
progressPercent: 9.027777777777777,
difficultyChange: 12.562233927411782,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
previousRetarget: 0.6280047707459726,
nextRetargetHeight: 751968,
timeAvg: 533038,
timeOffset: 0,
},
],
[ // Vector 2 (testnet)
[ // Inputs
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
750134, // Current block height
0.6280047707459726, // Previous retarget % (Passed through)
'testnet', // Network
dt('2022-08-19T13:52:46.000Z'), // Latest block timestamp in seconds
],
{ // Expected Result is same other than timeOffset
progressPercent: 9.027777777777777,
difficultyChange: 12.562233927411782,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
previousRetarget: 0.6280047707459726,
nextRetargetHeight: 751968,
timeAvg: 533038,
timeOffset: -667000, // 11 min 7 seconds since last block (testnet only)
// If we add time avg to abs(timeOffset) it makes exactly 1200000 ms, or 20 minutes
},
],
] as [[number, number, number, number, string, number], DifficultyAdjustment][];
for (const vector of vectors) {
const result = calcDifficultyAdjustment(...vector[0]);
// previousRetarget is passed through untouched
expect(result.previousRetarget).toStrictEqual(vector[0][3]);
expect(result).toStrictEqual(vector[1]);
}
});
});

View File

@@ -1,143 +0,0 @@
import * as fs from 'fs';
describe('Mempool Backend Config', () => {
beforeEach(() => {
jest.resetAllMocks();
jest.resetModules();
});
test('should return defaults when no file is present', () => {
jest.isolateModules(() => {
jest.mock('../../mempool-config.json', () => ({}), { virtual: true });
const config = jest.requireActual('../config').default;
expect(config.MEMPOOL).toStrictEqual({
ENABLED: true,
NETWORK: 'mainnet',
BACKEND: 'none',
BLOCKS_SUMMARIES_INDEXING: false,
HTTP_PORT: 8999,
SPAWN_CLUSTER_PROCS: 0,
API_URL_PREFIX: '/api/v1/',
AUTOMATIC_BLOCK_REINDEXING: false,
POLL_RATE_MS: 2000,
CACHE_DIR: './cache',
CLEAR_PROTECTION_MINUTES: 20,
RECOMMENDED_FEE_PERCENTILE: 50,
BLOCK_WEIGHT_UNITS: 4000000,
INITIAL_BLOCKS_AMOUNT: 8,
MEMPOOL_BLOCKS_AMOUNT: 8,
INDEXING_BLOCKS_AMOUNT: 11000,
PRICE_FEED_UPDATE_INTERVAL: 600,
USE_SECOND_NODE_FOR_MINFEE: false,
EXTERNAL_ASSETS: [],
EXTERNAL_MAX_RETRY: 1,
EXTERNAL_RETRY_INTERVAL: 0,
USER_AGENT: 'mempool',
STDOUT_LOG_MIN_PRIORITY: 'debug',
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false,
TRANSACTION_INDEXING: false,
});
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
expect(config.ESPLORA).toStrictEqual({ REST_API_URL: 'http://127.0.0.1:3000' });
expect(config.CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool'
});
expect(config.SECOND_CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool'
});
expect(config.DATABASE).toStrictEqual({
ENABLED: true,
HOST: '127.0.0.1',
SOCKET: '',
PORT: 3306,
DATABASE: 'mempool',
USERNAME: 'mempool',
PASSWORD: 'mempool'
});
expect(config.SYSLOG).toStrictEqual({
ENABLED: true,
HOST: '127.0.0.1',
PORT: 514,
MIN_PRIORITY: 'info',
FACILITY: 'local7'
});
expect(config.STATISTICS).toStrictEqual({ ENABLED: true, TX_PER_SECOND_SAMPLE_PERIOD: 150 });
expect(config.BISQ).toStrictEqual({ ENABLED: false, DATA_PATH: '/bisq/statsnode-data/btc_mainnet/db' });
expect(config.SOCKS5PROXY).toStrictEqual({
ENABLED: false,
USE_ONION: true,
HOST: '127.0.0.1',
PORT: 9050,
USERNAME: '',
PASSWORD: ''
});
expect(config.PRICE_DATA_SERVER).toStrictEqual({
TOR_URL: 'http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices',
CLEARNET_URL: 'https://price.bisq.wiz.biz/getAllMarketPrices'
});
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual({
MEMPOOL_API: 'https://mempool.space/api/v1',
MEMPOOL_ONION: 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
LIQUID_API: 'https://liquid.network/api/v1',
LIQUID_ONION: 'http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1',
BISQ_URL: 'https://bisq.markets/api',
BISQ_ONION: 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
});
});
});
test('should override the default values with the passed values', () => {
jest.isolateModules(() => {
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
jest.mock('../../mempool-config.json', () => (fixture), { virtual: true });
const config = jest.requireActual('../config').default;
expect(config.MEMPOOL).toStrictEqual(fixture.MEMPOOL);
expect(config.ELECTRUM).toStrictEqual(fixture.ELECTRUM);
expect(config.ESPLORA).toStrictEqual(fixture.ESPLORA);
expect(config.CORE_RPC).toStrictEqual(fixture.CORE_RPC);
expect(config.SECOND_CORE_RPC).toStrictEqual(fixture.SECOND_CORE_RPC);
expect(config.DATABASE).toStrictEqual(fixture.DATABASE);
expect(config.SYSLOG).toStrictEqual(fixture.SYSLOG);
expect(config.STATISTICS).toStrictEqual(fixture.STATISTICS);
expect(config.BISQ).toStrictEqual(fixture.BISQ);
expect(config.SOCKS5PROXY).toStrictEqual(fixture.SOCKS5PROXY);
expect(config.PRICE_DATA_SERVER).toStrictEqual(fixture.PRICE_DATA_SERVER);
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual(fixture.EXTERNAL_DATA_SERVER);
});
});
});

View File

@@ -1,138 +0,0 @@
import config from '../config';
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { Common } from './common';
import { TransactionExtended, MempoolBlockWithTransactions, AuditScore } from '../mempool.interfaces';
import blocksRepository from '../repositories/BlocksRepository';
import blocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import blocks from '../api/blocks';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
: { censored: string[], added: string[], fresh: string[], score: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], fresh: [], score: 0 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
const inBlock = {};
const inTemplate = {};
const now = Math.round((Date.now() / 1000));
for (const tx of transactions) {
inBlock[tx.txid] = tx;
}
// coinbase is always expected
if (transactions[0]) {
inTemplate[transactions[0].txid] = true;
}
// look for transactions that were expected in the template, but missing from the mined block
for (const txid of projectedBlocks[0].transactionIds) {
if (!inBlock[txid]) {
// tx is recent, may have reached the miner too late for inclusion
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
fresh.push(txid);
} else {
isCensored[txid] = true;
}
displacedWeight += mempool[txid].weight;
}
inTemplate[txid] = true;
}
displacedWeight += (4000 - transactions[0].weight);
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
// these displaced transactions should occupy the first N weight units of the next projected block
let displacedWeightRemaining = displacedWeight;
let index = 0;
let lastFeeRate = Infinity;
let failures = 0;
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
const txid = projectedBlocks[1].transactionIds[index];
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
}
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= mempool[txid].weight;
}
failures = 0;
} else {
failures++;
}
index++;
}
// mark unexpected transactions in the mined block as 'added'
let overflowWeight = 0;
let totalWeight = 0;
for (const tx of transactions) {
if (inTemplate[tx.txid]) {
matches.push(tx.txid);
} else {
if (!isDisplaced[tx.txid]) {
added.push(tx.txid);
} else {
}
let blockIndex = -1;
let index = -1;
projectedBlocks.forEach((block, bi) => {
const i = block.transactionIds.indexOf(tx.txid);
if (i >= 0) {
blockIndex = bi;
index = i;
}
});
overflowWeight += tx.weight;
}
totalWeight += tx.weight;
}
// transactions missing from near the end of our template are probably not being censored
let overflowWeightRemaining = overflowWeight - (config.MEMPOOL.BLOCK_WEIGHT_UNITS - totalWeight);
let maxOverflowRate = 0;
let rateThreshold = 0;
index = projectedBlocks[0].transactionIds.length - 1;
while (index >= 0) {
const txid = projectedBlocks[0].transactionIds[index];
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = mempool[txid].effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
}
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
index--;
}
const numCensored = Object.keys(isCensored).length;
const score = matches.length > 0 ? (matches.length / (matches.length + numCensored)) : 0;
return {
censored: Object.keys(isCensored),
added,
fresh,
score
};
}
}
export default new Audit();

View File

@@ -1,37 +1,60 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
import * as fs from 'fs';
import * as os from 'os';
import logger from '../logger';
import { IBackendInfo } from '../mempool.interfaces';
const { spawnSync } = require('child_process');
class BackendInfo {
private backendInfo: IBackendInfo;
private gitCommitHash = '';
private hostname = '';
private version = '';
constructor() {
// This file is created by ./fetch-version.ts during building
const versionFile = path.join(__dirname, 'version.json')
var versionInfo;
if (fs.existsSync(versionFile)) {
versionInfo = JSON.parse(fs.readFileSync(versionFile).toString());
} else {
// Use dummy values if `versionFile` doesn't exist (e.g., during testing)
versionInfo = {
version: '?',
gitCommit: '?'
};
}
this.backendInfo = {
hostname: os.hostname(),
version: versionInfo.version,
gitCommit: versionInfo.gitCommit
};
this.setLatestCommitHash();
this.setVersion();
this.hostname = os.hostname();
}
public getBackendInfo(): IBackendInfo {
return this.backendInfo;
return {
hostname: this.hostname,
gitCommit: this.gitCommitHash,
version: this.version,
};
}
public getShortCommitHash() {
return this.backendInfo.gitCommit.slice(0, 7);
return this.gitCommitHash.slice(0, 7);
}
private setLatestCommitHash(): void {
//TODO: share this logic with `generate-config.js`
if (process.env.DOCKER_COMMIT_HASH) {
this.gitCommitHash = process.env.DOCKER_COMMIT_HASH;
} else {
try {
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
if (!gitRevParse.error) {
const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
this.gitCommitHash = output ? output : '?';
} else if (gitRevParse.error.code === 'ENOENT') {
console.log('git not found, cannot parse git hash');
this.gitCommitHash = '?';
}
} catch (e: any) {
console.log('Could not load git commit info: ' + e.message);
this.gitCommitHash = '?';
}
}
}
private setVersion(): void {
try {
const packageJson = fs.readFileSync('package.json').toString();
this.version = JSON.parse(packageJson).version;
} catch (e) {
throw new Error(e instanceof Error ? e.message : 'Error');
}
}
}

View File

@@ -3,14 +3,13 @@ import { IEsploraApi } from './esplora-api.interface';
export interface AbstractBitcoinApi {
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]>;
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean, lazyPrevouts?: boolean): Promise<IEsploraApi.Transaction>;
$getTransactionHex(txId: string): Promise<string>;
$getBlockHeightTip(): Promise<number>;
$getBlockHashTip(): Promise<string>;
$getTxIdsForBlock(hash: string): Promise<string[]>;
$getBlockHash(height: number): Promise<string>;
$getBlockHeader(hash: string): Promise<string>;
$getBlock(hash: string): Promise<IEsploraApi.Block>;
$getRawBlock(hash: string): Promise<Buffer>;
$getRawBlock(hash: string): Promise<string>;
$getAddress(address: string): Promise<IEsploraApi.Address>;
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$getAddressPrefix(prefix: string): string[];

View File

@@ -57,11 +57,6 @@ class BitcoinApi implements AbstractBitcoinApi {
});
}
$getTransactionHex(txId: string): Promise<string> {
return this.$getRawTransaction(txId, true)
.then((tx) => tx.hex || '');
}
$getBlockHeightTip(): Promise<number> {
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => {
@@ -81,7 +76,7 @@ class BitcoinApi implements AbstractBitcoinApi {
.then((rpcBlock: IBitcoinApi.Block) => rpcBlock.tx);
}
$getRawBlock(hash: string): Promise<Buffer> {
$getRawBlock(hash: string): Promise<string> {
return this.bitcoindClient.getBlock(hash, 0)
.then((raw: string) => Buffer.from(raw, "hex"));
}

View File

@@ -1,6 +1,5 @@
import { Application, Request, Response } from 'express';
import axios from 'axios';
import * as bitcoinjs from 'bitcoinjs-lib';
import config from '../../config';
import websocketHandler from '../websocket-handler';
import mempool from '../mempool';
@@ -17,14 +16,13 @@ import logger from '../../logger';
import blocks from '../blocks';
import bitcoinClient from './bitcoin-client';
import difficultyAdjustment from '../difficulty-adjustment';
import transactionRepository from '../../repositories/TransactionRepository';
class BitcoinRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.$getCpfpInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.getCpfpInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', this.getMempoolBlocks)
@@ -89,9 +87,7 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', this.getBlocks.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', this.getBlocks.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions);
;
if (config.MEMPOOL.BACKEND !== 'esplora') {
@@ -189,36 +185,29 @@ class BitcoinRoutes {
}
}
private async $getCpfpInfo(req: Request, res: Response) {
private getCpfpInfo(req: Request, res: Response) {
if (!/^[a-fA-F0-9]{64}$/.test(req.params.txId)) {
res.status(501).send(`Invalid transaction ID.`);
return;
}
const tx = mempool.getMempool()[req.params.txId];
if (tx) {
if (tx?.cpfpChecked) {
res.json({
ancestors: tx.ancestors,
bestDescendant: tx.bestDescendant || null,
descendants: tx.descendants || null,
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
});
return;
}
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
res.json(cpfpInfo);
if (!tx) {
res.status(404).send(`Transaction doesn't exist in the mempool.`);
return;
} else {
const cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
if (cpfpInfo) {
res.json(cpfpInfo);
return;
}
}
res.status(404).send(`Transaction has no CPFP info available.`);
if (tx.cpfpChecked) {
res.json({
ancestors: tx.ancestors,
bestDescendant: tx.bestDescendant || null,
});
return;
}
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
res.json(cpfpInfo);
}
private getBackendInfo(req: Request, res: Response) {
@@ -252,74 +241,6 @@ class BitcoinRoutes {
}
}
/**
* Takes the PSBT as text/plain body, parses it, and adds the full
* parent transaction to each input that doesn't already have it.
* This is used for BTCPayServer / Trezor users which need access to
* the full parent transaction even with segwit inputs.
* It will respond with a text/plain PSBT in the same format (hex|base64).
*/
private async postPsbtCompletion(req: Request, res: Response): Promise<void> {
res.setHeader('content-type', 'text/plain');
const notFoundError = `Couldn't get transaction hex for parent of input`;
try {
let psbt: bitcoinjs.Psbt;
let format: 'hex' | 'base64';
let isModified = false;
try {
psbt = bitcoinjs.Psbt.fromBase64(req.body);
format = 'base64';
} catch (e1) {
try {
psbt = bitcoinjs.Psbt.fromHex(req.body);
format = 'hex';
} catch (e2) {
throw new Error(`Unable to parse PSBT`);
}
}
for (const [index, input] of psbt.data.inputs.entries()) {
if (!input.nonWitnessUtxo) {
// Buffer.from ensures it won't be modified in place by reverse()
const txid = Buffer.from(psbt.txInputs[index].hash)
.reverse()
.toString('hex');
let transactionHex: string;
// If missing transaction, return 404 status error
try {
transactionHex = await bitcoinApi.$getTransactionHex(txid);
if (!transactionHex) {
throw new Error('');
}
} catch (err) {
throw new Error(`${notFoundError} #${index} @ ${txid}`);
}
psbt.updateInput(index, {
nonWitnessUtxo: Buffer.from(transactionHex, 'hex'),
});
if (!isModified) {
isModified = true;
}
}
}
if (isModified) {
res.send(format === 'hex' ? psbt.toHex() : psbt.toBase64());
} else {
// Not modified
// 422 Unprocessable Entity
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422
res.status(422).send(`Psbt had no missing nonWitnessUtxos.`);
}
} catch (e: any) {
if (e instanceof Error && new RegExp(notFoundError).test(e.message)) {
res.status(404).send(e.message);
} else {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
private async getTransactionStatus(req: Request, res: Response) {
try {
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
@@ -333,16 +254,6 @@ class BitcoinRoutes {
}
}
private async getStrippedBlockTransactions(req: Request, res: Response) {
try {
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlock(req: Request, res: Response) {
try {
const block = await blocks.$getBlock(req.params.hash);
@@ -375,9 +286,9 @@ class BitcoinRoutes {
}
}
private async getBlockAuditSummary(req: Request, res: Response) {
private async getStrippedBlockTransactions(req: Request, res: Response) {
try {
const transactions = await blocks.$getBlockAuditSummary(req.params.hash);
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
} catch (e) {
@@ -599,12 +510,7 @@ class BitcoinRoutes {
private getDifficultyChange(req: Request, res: Response) {
try {
const da = difficultyAdjustment.getDifficultyAdjustment();
if (da) {
res.json(da);
} else {
res.status(503).send(`Service Temporarily Unavailable`);
}
res.json(difficultyAdjustment.getDifficultyAdjustment());
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}

View File

@@ -20,11 +20,6 @@ class ElectrsApi implements AbstractBitcoinApi {
.then((response) => response.data);
}
$getTransactionHex(txId: string): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex', this.axiosConfig)
.then((response) => response.data);
}
$getBlockHeightTip(): Promise<number> {
return axios.get<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height', this.axiosConfig)
.then((response) => response.data);
@@ -55,9 +50,9 @@ class ElectrsApi implements AbstractBitcoinApi {
.then((response) => response.data);
}
$getRawBlock(hash: string): Promise<Buffer> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", { ...this.axiosConfig, responseType: 'arraybuffer' })
.then((response) => { return Buffer.from(response.data); });
$getRawBlock(hash: string): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", this.axiosConfig)
.then((response) => response.data);
}
$getAddress(address: string): Promise<IEsploraApi.Address> {

View File

@@ -20,14 +20,10 @@ import indexer from '../indexer';
import fiatConversion from './fiat-conversion';
import poolsParser from './pools-parser';
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import cpfpRepository from '../repositories/CpfpRepository';
import transactionRepository from '../repositories/TransactionRepository';
import mining from './mining/mining';
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
import PricesRepository from '../repositories/PricesRepository';
import priceUpdater from '../tasks/price-updater';
import { Block } from 'bitcoinjs-lib';
class Blocks {
private blocks: BlockExtended[] = [];
@@ -37,7 +33,6 @@ class Blocks {
private lastDifficultyAdjustmentTime = 0;
private previousDifficultyRetarget = 0;
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = [];
constructor() { }
@@ -61,10 +56,6 @@ class Blocks {
this.newBlockCallbacks.push(fn);
}
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) {
this.newAsyncBlockCallbacks.push(fn);
}
/**
* Return the list of transaction for a block
* @param blockHash
@@ -138,7 +129,7 @@ class Blocks {
const stripped = block.tx.map((tx) => {
return {
txid: tx.txid,
vsize: tx.weight / 4,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee * 100000000) : 0,
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0) * 100000000)
};
@@ -195,18 +186,14 @@ class Blocks {
if (!pool) { // We should never have this situation in practise
logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. ` +
`Check your "pools" table entries`);
} else {
blockExtended.extras.pool = {
id: pool.id,
name: pool.name,
slug: pool.slug,
};
return blockExtended;
}
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
if (auditScore != null) {
blockExtended.extras.matchRate = auditScore.matchRate;
}
blockExtended.extras.pool = {
id: pool.id,
name: pool.name,
slug: pool.slug,
};
}
return blockExtended;
@@ -263,7 +250,7 @@ class Blocks {
/**
* [INDEXING] Index all blocks summaries for the block txs visualization
*/
public async $generateBlocksSummariesDatabase(): Promise<void> {
public async $generateBlocksSummariesDatabase() {
if (Common.blocksSummariesIndexingEnabled() === false) {
return;
}
@@ -319,57 +306,6 @@ class Blocks {
}
}
/**
* [INDEXING] Index transaction CPFP data for all blocks
*/
public async $generateCPFPDatabase(): Promise<void> {
if (Common.cpfpIndexingEnabled() === false) {
return;
}
try {
// Get all indexed block hash
const unindexedBlocks = await blocksRepository.$getCPFPUnindexedBlocks();
if (!unindexedBlocks?.length) {
return;
}
// Logging
let count = 0;
let countThisRun = 0;
let timer = new Date().getTime() / 1000;
const startedAt = new Date().getTime() / 1000;
for (const block of unindexedBlocks) {
// Logging
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
if (elapsedSeconds > 5) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, countThisRun / elapsedSeconds);
const progress = Math.round(count / unindexedBlocks.length * 10000) / 100;
logger.debug(`Indexing cpfp clusters for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${count}/${unindexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
timer = new Date().getTime() / 1000;
countThisRun = 0;
}
await this.$indexCPFP(block.hash, block.height); // Calculate and save CPFP data for transactions in this block
// Logging
count++;
countThisRun++;
}
if (count > 0) {
logger.notice(`CPFP indexing completed: indexed ${count} blocks`);
} else {
logger.debug(`CPFP indexing completed: indexed ${count} blocks`);
}
} catch (e) {
logger.err(`CPFP indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
/**
* [INDEXING] Index all blocks metadata for the mining dashboard
*/
@@ -413,7 +349,7 @@ class Blocks {
}
++indexedThisRun;
++totalIndexed;
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
@@ -503,9 +439,6 @@ class Blocks {
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
// start async callbacks
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
if (Common.indexingEnabled()) {
if (!fastForwarded) {
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
@@ -515,13 +448,9 @@ class Blocks {
await BlocksRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
await HashratesRepository.$deleteLastEntries();
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
await cpfpRepository.$deleteClustersFrom(lastBlock['height'] - 10);
for (let i = 10; i >= 0; --i) {
const newBlock = await this.$indexBlock(lastBlock['height'] - i);
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
if (config.MEMPOOL.TRANSACTION_INDEXING) {
await this.$indexCPFP(newBlock.id, lastBlock['height'] - i);
}
}
await mining.$indexDifficultyAdjustments();
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
@@ -547,9 +476,6 @@ class Blocks {
if (Common.blocksSummariesIndexingEnabled() === true) {
await this.$getStrippedBlockTransactions(blockExtended.id, true);
}
if (config.MEMPOOL.TRANSACTION_INDEXING) {
this.$indexCPFP(blockExtended.id, this.currentBlockHeight);
}
}
}
@@ -583,9 +509,6 @@ class Blocks {
if (!memPool.hasPriority()) {
diskCache.$saveCacheToDisk();
}
// wait for pending async callbacks to finish
await Promise.all(callbackPromises);
}
}
@@ -651,7 +574,7 @@ class Blocks {
if (skipMemoryCache === false) {
// Check the memory cache
const cachedSummary = this.getBlockSummaries().find((b) => b.id === hash);
if (cachedSummary?.transactions?.length) {
if (cachedSummary) {
return cachedSummary.transactions;
}
}
@@ -659,7 +582,7 @@ class Blocks {
// Check if it's indexed in db
if (skipDBLookup === false && Common.blocksSummariesIndexingEnabled() === true) {
const indexedSummary = await BlocksSummariesRepository.$getByBlockId(hash);
if (indexedSummary !== undefined && indexedSummary?.transactions?.length) {
if (indexedSummary !== undefined) {
return indexedSummary.transactions;
}
}
@@ -712,22 +635,6 @@ class Blocks {
return returnBlocks;
}
public async $getBlockAuditSummary(hash: string): Promise<any> {
let summary;
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
summary = await BlocksAuditsRepository.$getBlockAudit(hash);
}
// fallback to non-audited transaction summary
if (!summary?.transactions?.length) {
const strippedTransactions = await this.$getStrippedBlockTransactions(hash);
summary = {
transactions: strippedTransactions
};
}
return summary;
}
public getLastDifficultyAdjustmentTime(): number {
return this.lastDifficultyAdjustmentTime;
}
@@ -739,62 +646,6 @@ class Blocks {
public getCurrentBlockHeight(): number {
return this.currentBlockHeight;
}
public async $indexCPFP(hash: string, height: number): Promise<void> {
let transactions;
if (false/*Common.blocksSummariesIndexingEnabled()*/) {
transactions = await this.$getStrippedBlockTransactions(hash);
const rawBlock = await bitcoinApi.$getRawBlock(hash);
const block = Block.fromBuffer(rawBlock);
const txMap = {};
for (const tx of block.transactions || []) {
txMap[tx.getId()] = tx;
}
for (const tx of transactions) {
if (txMap[tx.txid]?.ins) {
tx.vin = txMap[tx.txid].ins.map(vin => {
return {
txid: vin.hash
};
});
}
}
} else {
const block = await bitcoinClient.getBlock(hash, 2);
transactions = block.tx.map(tx => {
tx.vsize = tx.weight / 4;
return tx;
});
}
let cluster: TransactionStripped[] = [];
let ancestors: { [txid: string]: boolean } = {};
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
cluster.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += tx.vsize;
});
const effectiveFeePerVsize = (totalFee * 100_000_000) / totalVSize;
if (cluster.length > 1) {
await cpfpRepository.$saveCluster(height, cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: (tx.fee || 0) * 100_000_000 }; }), effectiveFeePerVsize);
for (const tx of cluster) {
await transactionRepository.$setCluster(tx.txid, cluster[0].txid);
}
}
cluster = [];
ancestors = {};
}
cluster.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
await blocksRepository.$setCPFPIndexed(hash);
}
}
export default new Blocks();

View File

@@ -1,7 +1,5 @@
import { CpfpInfo, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import config from '../config';
import { NodeSocket } from '../repositories/NodesSocketsRepository';
import { isIP } from 'net';
export class Common {
static nativeAssetId = config.MEMPOOL.NETWORK === 'liquidtestnet' ?
'144c654344aa716d6f3abcc1ca90e5641e4e2a7f633bc09fe3baf64585819a49'
@@ -187,13 +185,6 @@ export class Common {
);
}
static cpfpIndexingEnabled(): boolean {
return (
Common.indexingEnabled() &&
config.MEMPOOL.TRANSACTION_INDEXING === true
);
}
static setDateMidnight(date: Date): void {
date.setUTCHours(0);
date.setUTCMinutes(0);
@@ -214,10 +205,6 @@ export class Common {
/** Decodes a channel id returned by lnd as uint64 to a short channel id */
static channelIntegerIdToShortId(id: string): string {
if (id.indexOf('/') !== -1) {
id = id.slice(0, -2);
}
if (id.indexOf('x') !== -1) { // Already a short id
return id;
}
@@ -234,76 +221,4 @@ export class Common {
const d = new Date((date || 0) * 1000);
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
}
static findSocketNetwork(addr: string): {network: string | null, url: string} {
let network: string | null = null;
let url = addr.split('://')[1];
if (!url) {
return {
network: null,
url: addr,
};
}
if (addr.indexOf('onion') !== -1) {
if (url.split('.')[0].length >= 56) {
network = 'torv3';
} else {
network = 'torv2';
}
} else if (addr.indexOf('i2p') !== -1) {
network = 'i2p';
} else if (addr.indexOf('ipv4') !== -1) {
const ipv = isIP(url.split(':')[0]);
if (ipv === 4) {
network = 'ipv4';
} else {
return {
network: null,
url: addr,
};
}
} else if (addr.indexOf('ipv6') !== -1) {
url = url.split('[')[1].split(']')[0];
const ipv = isIP(url);
if (ipv === 6) {
const parts = addr.split(':');
network = 'ipv6';
url = `[${url}]:${parts[parts.length - 1]}`;
} else {
return {
network: null,
url: addr,
};
}
} else {
return {
network: null,
url: addr,
};
}
return {
network: network,
url: url,
};
}
static formatSocket(publicKey: string, socket: {network: string, addr: string}): NodeSocket {
if (config.LIGHTNING.BACKEND === 'cln') {
return {
publicKey: publicKey,
network: socket.network,
addr: socket.addr,
};
} else /* if (config.LIGHTNING.BACKEND === 'lnd') */ {
const formatted = this.findSocketNetwork(socket.addr);
return {
publicKey: publicKey,
network: formatted.network,
addr: formatted.url,
};
}
}
}

View File

@@ -4,8 +4,8 @@ import logger from '../logger';
import { Common } from './common';
class DatabaseMigration {
private static currentVersion = 49;
private queryTimeout = 3600_000;
private static currentVersion = 36;
private queryTimeout = 120000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@@ -107,22 +107,18 @@ class DatabaseMigration {
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
await this.updateToSchemaVersion(2);
}
if (databaseSchemaVersion < 3) {
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
await this.updateToSchemaVersion(3);
}
if (databaseSchemaVersion < 4) {
await this.$executeQuery('DROP table IF EXISTS blocks;');
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
await this.updateToSchemaVersion(4);
}
if (databaseSchemaVersion < 5 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(5);
}
if (databaseSchemaVersion < 6 && isBitcoin === true) {
@@ -145,13 +141,11 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
await this.updateToSchemaVersion(6);
}
if (databaseSchemaVersion < 7 && isBitcoin === true) {
await this.$executeQuery('DROP table IF EXISTS hashrates;');
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
await this.updateToSchemaVersion(7);
}
if (databaseSchemaVersion < 8 && isBitcoin === true) {
@@ -161,7 +155,6 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
await this.updateToSchemaVersion(8);
}
if (databaseSchemaVersion < 9 && isBitcoin === true) {
@@ -169,12 +162,10 @@ class DatabaseMigration {
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
await this.updateToSchemaVersion(9);
}
if (databaseSchemaVersion < 10 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
await this.updateToSchemaVersion(10);
}
if (databaseSchemaVersion < 11 && isBitcoin === true) {
@@ -187,13 +178,11 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(11);
}
if (databaseSchemaVersion < 12 && isBitcoin === true) {
// No need to re-index because the new data type can contain larger values
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(12);
}
if (databaseSchemaVersion < 13 && isBitcoin === true) {
@@ -201,7 +190,6 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(13);
}
if (databaseSchemaVersion < 14 && isBitcoin === true) {
@@ -209,45 +197,37 @@ class DatabaseMigration {
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(14);
}
if (databaseSchemaVersion < 16 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
await this.updateToSchemaVersion(16);
}
if (databaseSchemaVersion < 17 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
await this.updateToSchemaVersion(17);
}
if (databaseSchemaVersion < 18 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `hash` (`hash`);');
await this.updateToSchemaVersion(18);
}
if (databaseSchemaVersion < 19) {
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
await this.updateToSchemaVersion(19);
}
if (databaseSchemaVersion < 20 && isBitcoin === true) {
await this.$executeQuery(this.getCreateBlocksSummariesTableQuery(), await this.$checkIfTableExists('blocks_summaries'));
await this.updateToSchemaVersion(20);
}
if (databaseSchemaVersion < 21) {
await this.$executeQuery('DROP TABLE IF EXISTS `rates`');
await this.$executeQuery(this.getCreatePricesTableQuery(), await this.$checkIfTableExists('prices'));
await this.updateToSchemaVersion(21);
}
if (databaseSchemaVersion < 22 && isBitcoin === true) {
await this.$executeQuery('DROP TABLE IF EXISTS `difficulty_adjustments`');
await this.$executeQuery(this.getCreateDifficultyAdjustmentsTableQuery(), await this.$checkIfTableExists('difficulty_adjustments'));
await this.updateToSchemaVersion(22);
}
if (databaseSchemaVersion < 23) {
@@ -260,21 +240,19 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `prices` ADD `CHF` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `AUD` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `JPY` float DEFAULT "0"');
await this.updateToSchemaVersion(23);
}
if (databaseSchemaVersion < 24 && isBitcoin == true) {
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_audits`');
await this.$executeQuery(this.getCreateBlocksAuditsTableQuery(), await this.$checkIfTableExists('blocks_audits'));
await this.updateToSchemaVersion(24);
}
if (databaseSchemaVersion < 25 && isBitcoin === true) {
await this.$executeQuery(`INSERT INTO state VALUES('last_node_stats', 0, '1970-01-01');`);
await this.$executeQuery(this.getCreateLightningStatisticsQuery(), await this.$checkIfTableExists('lightning_stats'));
await this.$executeQuery(this.getCreateNodesQuery(), await this.$checkIfTableExists('nodes'));
await this.$executeQuery(this.getCreateChannelsQuery(), await this.$checkIfTableExists('channels'));
await this.$executeQuery(this.getCreateNodesStatsQuery(), await this.$checkIfTableExists('node_stats'));
await this.updateToSchemaVersion(25);
}
if (databaseSchemaVersion < 26 && isBitcoin === true) {
@@ -285,7 +263,6 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD tor_nodes int(11) NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_nodes int(11) NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD unannounced_nodes int(11) NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(26);
}
if (databaseSchemaVersion < 27 && isBitcoin === true) {
@@ -295,7 +272,6 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(27);
}
if (databaseSchemaVersion < 28 && isBitcoin === true) {
@@ -305,7 +281,6 @@ class DatabaseMigration {
await this.$executeQuery(`TRUNCATE lightning_stats`);
await this.$executeQuery(`TRUNCATE node_stats`);
await this.$executeQuery(`ALTER TABLE lightning_stats MODIFY added DATE`);
await this.updateToSchemaVersion(28);
}
if (databaseSchemaVersion < 29 && isBitcoin === true) {
@@ -317,130 +292,37 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `nodes` ADD subdivision_id int(11) unsigned NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD longitude double NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD latitude double NULL DEFAULT NULL');
await this.updateToSchemaVersion(29);
}
if (databaseSchemaVersion < 30 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization") NOT NULL');
await this.updateToSchemaVersion(30);
}
if (databaseSchemaVersion < 31 && isBitcoin == true) { // Link blocks to prices
await this.$executeQuery('ALTER TABLE `prices` ADD `id` int NULL AUTO_INCREMENT UNIQUE');
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_prices`');
await this.$executeQuery(this.getCreateBlocksPricesTableQuery(), await this.$checkIfTableExists('blocks_prices'));
await this.updateToSchemaVersion(31);
}
if (databaseSchemaVersion < 32 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `blocks_summaries` ADD `template` JSON DEFAULT "[]"');
await this.updateToSchemaVersion(32);
}
if (databaseSchemaVersion < 33 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization", "country_iso_code") NOT NULL');
await this.updateToSchemaVersion(33);
}
if (databaseSchemaVersion < 34 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_tor_nodes int(11) NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(34);
}
if (databaseSchemaVersion < 35 && isBitcoin == true) {
await this.$executeQuery('DELETE from `lightning_stats` WHERE added > "2021-09-19"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD CONSTRAINT added_unique UNIQUE (added);');
await this.updateToSchemaVersion(35);
}
if (databaseSchemaVersion < 36 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD status TINYINT NOT NULL DEFAULT "1"');
await this.updateToSchemaVersion(36);
}
if (databaseSchemaVersion < 37 && isBitcoin == true) {
await this.$executeQuery(this.getCreateLNNodesSocketsTableQuery(), await this.$checkIfTableExists('nodes_sockets'));
await this.updateToSchemaVersion(37);
}
if (databaseSchemaVersion < 38 && isBitcoin == true) {
if (config.LIGHTNING.ENABLED) {
this.uniqueLog(logger.notice, `'lightning_stats' and 'node_stats' tables have been truncated.`);
}
await this.$executeQuery(`TRUNCATE lightning_stats`);
await this.$executeQuery(`TRUNCATE node_stats`);
await this.$executeQuery('ALTER TABLE `lightning_stats` CHANGE `added` `added` timestamp NULL');
await this.$executeQuery('ALTER TABLE `node_stats` CHANGE `added` `added` timestamp NULL');
await this.updateToSchemaVersion(38);
}
if (databaseSchemaVersion < 39 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD alias_search TEXT NULL DEFAULT NULL AFTER `alias`');
await this.$executeQuery('ALTER TABLE nodes ADD FULLTEXT(alias_search)');
await this.updateToSchemaVersion(39);
}
if (databaseSchemaVersion < 40 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD capacity bigint(20) unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD channels int(11) unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD INDEX `capacity` (`capacity`);');
await this.updateToSchemaVersion(40);
}
if (databaseSchemaVersion < 41 && isBitcoin === true) {
await this.$executeQuery('UPDATE channels SET closing_reason = NULL WHERE closing_reason = 1');
await this.updateToSchemaVersion(41);
}
if (databaseSchemaVersion < 42 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `channels` ADD closing_resolved tinyint(1) DEFAULT 0');
await this.updateToSchemaVersion(42);
}
if (databaseSchemaVersion < 43 && isBitcoin === true) {
await this.$executeQuery(this.getCreateLNNodeRecordsTableQuery(), await this.$checkIfTableExists('nodes_records'));
await this.updateToSchemaVersion(43);
}
if (databaseSchemaVersion < 44 && isBitcoin === true) {
await this.$executeQuery('UPDATE blocks_summaries SET template = NULL');
await this.updateToSchemaVersion(44);
}
if (databaseSchemaVersion < 45 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD fresh_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(45);
}
if (databaseSchemaVersion < 46) {
await this.$executeQuery(`ALTER TABLE blocks MODIFY blockTimestamp timestamp NOT NULL DEFAULT 0`);
await this.updateToSchemaVersion(46);
}
if (databaseSchemaVersion < 47) {
await this.$executeQuery('ALTER TABLE `blocks` ADD cpfp_indexed tinyint(1) DEFAULT 0');
await this.$executeQuery(this.getCreateCPFPTableQuery(), await this.$checkIfTableExists('cpfp_clusters'));
await this.$executeQuery(this.getCreateTransactionsTableQuery(), await this.$checkIfTableExists('transactions'));
await this.updateToSchemaVersion(47);
}
if (databaseSchemaVersion < 48 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `channels` ADD source_checked tinyint(1) DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD closing_fee bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node1_funding_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node2_funding_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node1_closing_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node2_closing_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD funding_ratio float unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `channels` ADD closed_by varchar(66) DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `channels` ADD single_funded tinyint(1) DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD outputs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(48);
}
if (databaseSchemaVersion < 49 && isBitcoin === true) {
await this.$executeQuery('TRUNCATE TABLE `blocks_audits`');
await this.updateToSchemaVersion(49);
}
}
@@ -579,10 +461,6 @@ class DatabaseMigration {
return `UPDATE state SET number = ${DatabaseMigration.currentVersion} WHERE name = 'schema_version';`;
}
private async updateToSchemaVersion(version): Promise<void> {
await this.$executeQuery(`UPDATE state SET number = ${version} WHERE name = 'schema_version';`);
}
/**
* Print current database version
*/
@@ -859,7 +737,7 @@ class DatabaseMigration {
names text DEFAULT NULL,
UNIQUE KEY id (id,type),
KEY id_2 (id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`
}
private getCreateBlocksPricesTableQuery(): string {
@@ -871,48 +749,6 @@ class DatabaseMigration {
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateLNNodesSocketsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS nodes_sockets (
public_key varchar(66) NOT NULL,
socket varchar(100) NOT NULL,
type enum('ipv4', 'ipv6', 'torv2', 'torv3', 'i2p', 'dns', 'websocket') NULL,
UNIQUE KEY public_key_socket (public_key, socket),
INDEX (public_key)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateLNNodeRecordsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS nodes_records (
public_key varchar(66) NOT NULL,
type int(10) unsigned NOT NULL,
payload blob NOT NULL,
UNIQUE KEY public_key_type (public_key, type),
INDEX (public_key),
FOREIGN KEY (public_key)
REFERENCES nodes (public_key)
ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateCPFPTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS cpfp_clusters (
root varchar(65) NOT NULL,
height int(10) NOT NULL,
txs JSON DEFAULT NULL,
fee_rate double unsigned NOT NULL,
PRIMARY KEY (root)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateTransactionsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS transactions (
txid varchar(65) NOT NULL,
cluster varchar(65) DEFAULT NULL,
PRIMARY KEY (txid),
FOREIGN KEY (cluster) REFERENCES cpfp_clusters (root) ON DELETE SET NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
public async $truncateIndexedData(tables: string[]) {
const allowedTables = ['blocks', 'hashrates', 'prices'];

View File

@@ -2,100 +2,65 @@ import config from '../config';
import { IDifficultyAdjustment } from '../mempool.interfaces';
import blocks from './blocks';
export interface DifficultyAdjustment {
progressPercent: number; // Percent: 0 to 100
difficultyChange: number; // Percent: -75 to 300
estimatedRetargetDate: number; // Unix time in ms
remainingBlocks: number; // Block count
remainingTime: number; // Duration of time in ms
previousRetarget: number; // Percent: -75 to 300
nextRetargetHeight: number; // Block Height
timeAvg: number; // Duration of time in ms
timeOffset: number; // (Testnet) Time since last block (cap @ 20min) in ms
}
export function calcDifficultyAdjustment(
DATime: number,
nowSeconds: number,
blockHeight: number,
previousRetarget: number,
network: string,
latestBlockTimestamp: number,
): DifficultyAdjustment {
const ESTIMATE_LAG_BLOCKS = 146; // For first 7.2% of epoch, don't estimate.
const EPOCH_BLOCK_LENGTH = 2016; // Bitcoin mainnet
const BLOCK_SECONDS_TARGET = 600; // Bitcoin mainnet
const TESTNET_MAX_BLOCK_SECONDS = 1200; // Bitcoin testnet
const diffSeconds = nowSeconds - DATime;
const blocksInEpoch = (blockHeight >= 0) ? blockHeight % EPOCH_BLOCK_LENGTH : 0;
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
let difficultyChange = 0;
let timeAvgSecs = BLOCK_SECONDS_TARGET;
// Only calculate the estimate once we have 7.2% of blocks in current epoch
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
timeAvgSecs = diffSeconds / blocksInEpoch;
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
}
// Testnet difficulty is set to 1 after 20 minutes of no blocks,
// therefore the time between blocks will always be below 20 minutes (1200s).
let timeOffset = 0;
if (network === 'testnet') {
if (timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
timeAvgSecs = TESTNET_MAX_BLOCK_SECONDS;
}
const secondsSinceLastBlock = nowSeconds - latestBlockTimestamp;
if (secondsSinceLastBlock + timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
timeOffset = -Math.min(secondsSinceLastBlock, TESTNET_MAX_BLOCK_SECONDS) * 1000;
}
}
const timeAvg = Math.floor(timeAvgSecs * 1000);
const remainingTime = remainingBlocks * timeAvg;
const estimatedRetargetDate = remainingTime + nowSeconds * 1000;
return {
progressPercent,
difficultyChange,
estimatedRetargetDate,
remainingBlocks,
remainingTime,
previousRetarget,
nextRetargetHeight,
timeAvg,
timeOffset,
};
}
class DifficultyAdjustmentApi {
public getDifficultyAdjustment(): IDifficultyAdjustment | null {
constructor() { }
public getDifficultyAdjustment(): IDifficultyAdjustment {
const DATime = blocks.getLastDifficultyAdjustmentTime();
const previousRetarget = blocks.getPreviousDifficultyRetarget();
const blockHeight = blocks.getCurrentBlockHeight();
const blocksCache = blocks.getBlocks();
const latestBlock = blocksCache[blocksCache.length - 1];
if (!latestBlock) {
return null;
}
const nowSeconds = Math.floor(new Date().getTime() / 1000);
return calcDifficultyAdjustment(
DATime, nowSeconds, blockHeight, previousRetarget,
config.MEMPOOL.NETWORK, latestBlock.timestamp
);
const now = new Date().getTime() / 1000;
const diff = now - DATime;
const blocksInEpoch = blockHeight % 2016;
const progressPercent = (blocksInEpoch >= 0) ? blocksInEpoch / 2016 * 100 : 100;
const remainingBlocks = 2016 - blocksInEpoch;
const nextRetargetHeight = blockHeight + remainingBlocks;
let difficultyChange = 0;
if (remainingBlocks < 1870) {
if (blocksInEpoch > 0) {
difficultyChange = (600 / (diff / blocksInEpoch) - 1) * 100;
}
if (difficultyChange > 300) {
difficultyChange = 300;
}
if (difficultyChange < -75) {
difficultyChange = -75;
}
}
let timeAvgMins = blocksInEpoch && blocksInEpoch > 146 ? diff / blocksInEpoch / 60 : 10;
// Testnet difficulty is set to 1 after 20 minutes of no blocks,
// therefore the time between blocks will always be below 20 minutes (1200s).
let timeOffset = 0;
if (config.MEMPOOL.NETWORK === 'testnet') {
if (timeAvgMins > 20) {
timeAvgMins = 20;
}
if (now - latestBlock.timestamp + timeAvgMins * 60 > 1200) {
timeOffset = -Math.min(now - latestBlock.timestamp, 1200) * 1000;
}
}
const timeAvg = timeAvgMins * 60 * 1000 ;
const remainingTime = (remainingBlocks * timeAvg) + (now * 1000);
const estimatedRetargetDate = remainingTime + now;
return {
progressPercent,
difficultyChange,
estimatedRetargetDate,
remainingBlocks,
remainingTime,
previousRetarget,
nextRetargetHeight,
timeAvg,
timeOffset,
};
}
}

View File

@@ -17,61 +17,32 @@ class ChannelsApi {
}
}
public async $getAllChannelsGeo(publicKey?: string, style?: string): Promise<any[]> {
public async $getAllChannelsGeo(publicKey?: string): Promise<any[]> {
try {
let select: string;
if (style === 'widget') {
select = `
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude
`;
} else {
select = `
nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias,
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias,
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude
`;
}
const params: string[] = [];
let query = `SELECT ${select}
FROM channels
JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key
JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key
WHERE channels.status = 1
AND nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL
AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL
let query = `SELECT nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias,
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias,
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude,
channels.capacity
FROM channels
JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key
JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key
WHERE nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL
AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL
`;
if (publicKey !== undefined) {
query += ' AND (nodes_1.public_key = ? OR nodes_2.public_key = ?)';
params.push(publicKey);
params.push(publicKey);
} else {
query += ` AND channels.capacity > 1000000
GROUP BY nodes_1.public_key, nodes_2.public_key
ORDER BY channels.capacity DESC
LIMIT 10000
`;
}
const [rows]: any = await DB.query(query, params);
return rows.map((row) => {
if (style === 'widget') {
return [
row.node1_longitude, row.node1_latitude,
row.node2_longitude, row.node2_latitude,
];
} else {
return [
row.node1_public_key, row.node1_alias,
row.node1_longitude, row.node1_latitude,
row.node2_public_key, row.node2_alias,
row.node2_longitude, row.node2_latitude,
];
}
});
return rows.map((row) => [
row.node1_public_key, row.node1_alias, row.node1_longitude, row.node1_latitude,
row.node2_public_key, row.node2_alias, row.node2_longitude, row.node2_latitude,
row.capacity]);
} catch (e) {
logger.err('$getAllChannelsGeo error: ' + (e instanceof Error ? e.message : e));
throw e;
@@ -81,7 +52,7 @@ class ChannelsApi {
public async $searchChannelsById(search: string): Promise<any[]> {
try {
const searchStripped = search.replace('%', '') + '%';
const query = `SELECT id, short_id, capacity, status FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
const query = `SELECT id, short_id, capacity FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
return rows;
} catch (e) {
@@ -90,14 +61,9 @@ class ChannelsApi {
}
}
public async $getChannelsByStatus(status: number | number[]): Promise<any[]> {
public async $getChannelsByStatus(status: number): Promise<any[]> {
try {
let query: string;
if (Array.isArray(status)) {
query = `SELECT * FROM channels WHERE status IN (${status.join(',')})`;
} else {
query = `SELECT * FROM channels WHERE status = ?`;
}
const query = `SELECT * FROM channels WHERE status = ?`;
const [rows]: any = await DB.query(query, [status]);
return rows;
} catch (e) {
@@ -117,32 +83,6 @@ class ChannelsApi {
}
}
public async $getUnresolvedClosedChannels(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsWithoutSourceChecked(): Promise<any[]> {
try {
const query = `
SELECT channels.*
FROM channels
WHERE channels.source_checked != 1
`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsWithoutCreatedDate(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE created IS NULL`;
@@ -256,14 +196,9 @@ class ChannelsApi {
public async $getChannelsByTransactionId(transactionIds: string[]): Promise<any[]> {
try {
const query = `
SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.*
FROM channels
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
WHERE channels.transaction_id IN ? OR channels.closing_transaction_id IN ?
`;
const [rows]: any = await DB.query(query, [[transactionIds], [transactionIds]]);
transactionIds = transactionIds.map((id) => '\'' + id + '\'');
const query = `SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.* FROM channels LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key WHERE channels.transaction_id IN (${transactionIds.join(', ')}) OR channels.closing_transaction_id IN (${transactionIds.join(', ')})`;
const [rows]: any = await DB.query(query);
const channels = rows.map((row) => this.convertChannel(row));
return channels;
} catch (e) {
@@ -272,197 +207,57 @@ class ChannelsApi {
}
}
public async $getChannelByClosingId(transactionId: string): Promise<any> {
try {
const query = `
SELECT
channels.*
FROM channels
WHERE channels.closing_transaction_id = ?
`;
const [rows]: any = await DB.query(query, [transactionId]);
if (rows.length > 0) {
rows[0].outputs = JSON.parse(rows[0].outputs);
return rows[0];
}
} catch (e) {
logger.err('$getChannelByClosingId error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $getChannelsByOpeningId(transactionId: string): Promise<any> {
try {
const query = `
SELECT
channels.*
FROM channels
WHERE channels.transaction_id = ?
`;
const [rows]: any = await DB.query(query, [transactionId]);
if (rows.length > 0) {
return rows.map(row => {
row.outputs = JSON.parse(row.outputs);
return row;
});
}
} catch (e) {
logger.err('$getChannelsByOpeningId error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $updateClosingInfo(channelInfo: { id: string, node1_closing_balance: number, node2_closing_balance: number, closed_by: string | null, closing_fee: number, outputs: ILightningApi.ForensicOutput[]}): Promise<void> {
try {
const query = `
UPDATE channels SET
node1_closing_balance = ?,
node2_closing_balance = ?,
closed_by = ?,
closing_fee = ?,
outputs = ?
WHERE channels.id = ?
`;
await DB.query<ResultSetHeader>(query, [
channelInfo.node1_closing_balance || 0,
channelInfo.node2_closing_balance || 0,
channelInfo.closed_by,
channelInfo.closing_fee || 0,
JSON.stringify(channelInfo.outputs),
channelInfo.id,
]);
} catch (e) {
logger.err('$updateClosingInfo error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $updateOpeningInfo(channelInfo: { id: string, node1_funding_balance: number, node2_funding_balance: number, funding_ratio: number, single_funded: boolean | void }): Promise<void> {
try {
const query = `
UPDATE channels SET
node1_funding_balance = ?,
node2_funding_balance = ?,
funding_ratio = ?,
single_funded = ?
WHERE channels.id = ?
`;
await DB.query<ResultSetHeader>(query, [
channelInfo.node1_funding_balance || 0,
channelInfo.node2_funding_balance || 0,
channelInfo.funding_ratio,
channelInfo.single_funded ? 1 : 0,
channelInfo.id,
]);
} catch (e) {
logger.err('$updateOpeningInfo error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $markChannelSourceChecked(id: string): Promise<void> {
try {
const query = `
UPDATE channels
SET source_checked = 1
WHERE id = ?
`;
await DB.query<ResultSetHeader>(query, [id]);
} catch (e) {
logger.err('$markChannelSourceChecked error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $getChannelsForNode(public_key: string, index: number, length: number, status: string): Promise<any[]> {
try {
let channelStatusFilter;
if (status === 'open') {
channelStatusFilter = '< 2';
} else if (status === 'active') {
channelStatusFilter = '= 1';
} else if (status === 'closed') {
channelStatusFilter = '= 2';
} else {
throw new Error('getChannelsForNode: Invalid status requested');
}
// Channels originating from node
let query = `
SELECT COALESCE(node2.alias, SUBSTRING(node2_public_key, 0, 20)) AS alias, COALESCE(node2.public_key, node2_public_key) AS public_key,
channels.status, channels.node1_fee_rate,
channels.capacity, channels.short_id, channels.id, channels.closing_reason,
UNIX_TIMESTAMP(closing_date) as closing_date, UNIX_TIMESTAMP(channels.updated_at) as updated_at
SELECT node2.alias, node2.public_key, channels.status, channels.node1_fee_rate,
channels.capacity, channels.short_id, channels.id
FROM channels
LEFT JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key
JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key
WHERE node1_public_key = ? AND channels.status ${channelStatusFilter}
`;
const [channelsFromNode]: any = await DB.query(query, [public_key]);
const [channelsFromNode]: any = await DB.query(query, [public_key, index, length]);
// Channels incoming to node
query = `
SELECT COALESCE(node1.alias, SUBSTRING(node1_public_key, 0, 20)) AS alias, COALESCE(node1.public_key, node1_public_key) AS public_key,
channels.status, channels.node2_fee_rate,
channels.capacity, channels.short_id, channels.id, channels.closing_reason,
UNIX_TIMESTAMP(closing_date) as closing_date, UNIX_TIMESTAMP(channels.updated_at) as updated_at
SELECT node1.alias, node1.public_key, channels.status, channels.node2_fee_rate,
channels.capacity, channels.short_id, channels.id
FROM channels
LEFT JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key
JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key
WHERE node2_public_key = ? AND channels.status ${channelStatusFilter}
`;
const [channelsToNode]: any = await DB.query(query, [public_key]);
const [channelsToNode]: any = await DB.query(query, [public_key, index, length]);
let allChannels = channelsFromNode.concat(channelsToNode);
allChannels.sort((a, b) => {
if (status === 'closed') {
if (!b.closing_date && !a.closing_date) {
return (b.updated_at ?? 0) - (a.updated_at ?? 0);
} else {
return (b.closing_date ?? 0) - (a.closing_date ?? 0);
}
} else {
return b.capacity - a.capacity;
}
return b.capacity - a.capacity;
});
if (index >= 0) {
allChannels = allChannels.slice(index, index + length);
} else if (index === -1) { // Node channels tree chart
allChannels = allChannels.slice(0, 1000);
}
allChannels = allChannels.slice(index, index + length);
const channels: any[] = []
for (const row of allChannels) {
let channel;
if (index >= 0) {
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
channel = {
status: row.status,
closing_reason: row.closing_reason,
closing_date: row.closing_date,
capacity: row.capacity ?? 0,
short_id: row.short_id,
id: row.id,
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
node: {
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
public_key: row.public_key,
channels: activeChannelsStats.active_channel_count ?? 0,
capacity: activeChannelsStats.capacity ?? 0,
}
};
} else if (index === -1) {
channel = {
capacity: row.capacity ?? 0,
short_id: row.short_id,
id: row.id,
node: {
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
public_key: row.public_key,
}
};
}
channels.push(channel);
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
channels.push({
status: row.status,
capacity: row.capacity ?? 0,
short_id: row.short_id,
id: row.id,
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
node: {
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
public_key: row.public_key,
channels: activeChannelsStats.active_channel_count ?? 0,
capacity: activeChannelsStats.capacity ?? 0,
}
});
}
return channels;
@@ -502,15 +297,10 @@ class ChannelsApi {
'transaction_id': channel.transaction_id,
'transaction_vout': channel.transaction_vout,
'closing_transaction_id': channel.closing_transaction_id,
'closing_fee': channel.closing_fee,
'closing_reason': channel.closing_reason,
'closing_date': channel.closing_date,
'updated_at': channel.updated_at,
'created': channel.created,
'status': channel.status,
'funding_ratio': channel.funding_ratio,
'closed_by': channel.closed_by,
'single_funded': !!channel.single_funded,
'node_left': {
'alias': channel.alias_left,
'public_key': channel.node1_public_key,
@@ -525,9 +315,6 @@ class ChannelsApi {
'updated_at': channel.node1_updated_at,
'longitude': channel.node1_longitude,
'latitude': channel.node1_latitude,
'funding_balance': channel.node1_funding_balance,
'closing_balance': channel.node1_closing_balance,
'initiated_close': channel.closed_by === channel.node1_public_key ? true : undefined,
},
'node_right': {
'alias': channel.alias_right,
@@ -543,9 +330,6 @@ class ChannelsApi {
'updated_at': channel.node2_updated_at,
'longitude': channel.node2_longitude,
'latitude': channel.node2_latitude,
'funding_balance': channel.node2_funding_balance,
'closing_balance': channel.node2_closing_balance,
'initiated_close': channel.closed_by === channel.node2_public_key ? true : undefined,
},
};
}
@@ -553,7 +337,7 @@ class ChannelsApi {
/**
* Save or update a channel present in the graph
*/
public async $saveChannel(channel: ILightningApi.Channel, status = 1): Promise<void> {
public async $saveChannel(channel: ILightningApi.Channel): Promise<void> {
const [ txid, vout ] = channel.chan_point.split(':');
const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
@@ -585,11 +369,11 @@ class ChannelsApi {
node2_min_htlc_mtokens,
node2_updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ${status}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
capacity = ?,
updated_at = ?,
status = ${status},
status = 1,
node1_public_key = ?,
node1_base_fee_mtokens = ?,
node1_cltv_delta = ?,
@@ -678,23 +462,6 @@ class ChannelsApi {
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
}
}
public async $getLatestChannelUpdateForNode(publicKey: string): Promise<number> {
try {
const query = `
SELECT MAX(UNIX_TIMESTAMP(updated_at)) as updated_at
FROM channels
WHERE node1_public_key = ?
`;
const [rows]: any[] = await DB.query(query, [publicKey]);
if (rows.length > 0) {
return rows[0].updated_at;
}
} catch (e) {
logger.err(`Can't getLatestChannelUpdateForNode for ${publicKey}. Reason ${e instanceof Error ? e.message : e}`);
}
return 0;
}
}
export default new ChannelsApi();

View File

@@ -47,17 +47,8 @@ class ChannelsRoutes {
res.status(400).send('Missing parameter: public_key');
return;
}
const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0;
const status: string = typeof req.query.status === 'string' ? req.query.status : '';
if (index < -1) {
res.status(400).send('Invalid index');
}
if (['open', 'active', 'closed'].includes(status) === false) {
res.status(400).send('Invalid status');
}
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status);
const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status);
res.header('Pragma', 'public');
@@ -70,7 +61,7 @@ class ChannelsRoutes {
}
}
private async $getChannelsByTransactionIds(req: Request, res: Response): Promise<void> {
private async $getChannelsByTransactionIds(req: Request, res: Response) {
try {
if (!Array.isArray(req.query.txId)) {
res.status(400).send('Not an array');
@@ -83,26 +74,27 @@ class ChannelsRoutes {
}
}
const channels = await channelsApi.$getChannelsByTransactionId(txIds);
const result: any[] = [];
const inputs: any[] = [];
const outputs: any[] = [];
for (const txid of txIds) {
const inputs: any = {};
const outputs: any = {};
// Assuming that we only have one lightning close input in each transaction. This may not be true in the future
const foundChannelsFromInput = channels.find((channel) => channel.closing_transaction_id === txid);
if (foundChannelsFromInput) {
inputs[0] = foundChannelsFromInput;
const foundChannelInputs = channels.find((channel) => channel.closing_transaction_id === txid);
if (foundChannelInputs) {
inputs.push(foundChannelInputs);
} else {
inputs.push(null);
}
const foundChannelsFromOutputs = channels.filter((channel) => channel.transaction_id === txid);
for (const output of foundChannelsFromOutputs) {
outputs[output.transaction_vout] = output;
const foundChannelOutputs = channels.find((channel) => channel.transaction_id === txid);
if (foundChannelOutputs) {
outputs.push(foundChannelOutputs);
} else {
outputs.push(null);
}
result.push({
inputs,
outputs,
});
}
res.json(result);
res.json({
inputs: inputs,
outputs: outputs,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@@ -110,11 +102,7 @@ class ChannelsRoutes {
private async $getAllChannelsGeo(req: Request, res: Response) {
try {
const style: string = typeof req.query.style === 'string' ? req.query.style : '';
const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey, style);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey);
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);

View File

@@ -2,61 +2,17 @@ import logger from '../../logger';
import DB from '../../database';
import { ResultSetHeader } from 'mysql2';
import { ILightningApi } from '../lightning/lightning-api.interface';
import { ITopNodesPerCapacity, ITopNodesPerChannels } from '../../mempool.interfaces';
class NodesApi {
public async $getWorldNodes(): Promise<any> {
try {
let query = `
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.longitude, nodes.latitude,
geo_names_country.names as country, geo_names_iso.names as isoCode
FROM nodes
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
WHERE status = 1 AND nodes.as_number IS NOT NULL
ORDER BY capacity
`;
const [nodes]: any[] = await DB.query(query);
for (let i = 0; i < nodes.length; ++i) {
nodes[i].country = JSON.parse(nodes[i].country);
}
query = `
SELECT MAX(nodes.capacity) as maxLiquidity, MAX(nodes.channels) as maxChannels
FROM nodes
WHERE status = 1 AND nodes.as_number IS NOT NULL
`;
const [maximums]: any[] = await DB.query(query);
return {
maxLiquidity: maximums[0].maxLiquidity,
maxChannels: maximums[0].maxChannels,
nodes: nodes.map(node => [
node.longitude, node.latitude,
node.publicKey, node.alias, node.capacity, node.channels,
node.country, node.isoCode
])
};
} catch (e) {
logger.err(`Can't get world nodes list. Reason: ${e instanceof Error ? e.message : e}`);
}
}
public async $getNode(public_key: string): Promise<any> {
try {
// General info
let query = `
SELECT public_key, alias, UNIX_TIMESTAMP(first_seen) AS first_seen,
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
as_number, city_id, country_id, subdivision_id, longitude, latitude,
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
geo_names_country.names as country, geo_names_subdivision.names as subdivision
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
as_number, city_id, country_id, subdivision_id, longitude, latitude,
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
geo_names_country.names as country, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
@@ -105,18 +61,6 @@ class NodesApi {
node.closed_channel_count = rows[0].closed_channel_count;
}
// Custom records
query = `
SELECT type, payload
FROM nodes_records
WHERE public_key = ?
`;
[rows] = await DB.query(query, [public_key]);
node.custom_records = {};
for (const record of rows) {
node.custom_records[record.type] = Buffer.from(record.payload, 'binary').toString('hex');
}
return node;
} catch (e) {
logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
@@ -141,56 +85,6 @@ class NodesApi {
}
}
public async $getFeeHistogram(node_public_key: string): Promise<unknown> {
try {
const inQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node2_fee_rate WHEN node2_public_key = ? THEN node1_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [inRows]: any[] = await DB.query(inQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
const outQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node1_fee_rate WHEN node2_public_key = ? THEN node2_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [outRows]: any[] = await DB.query(outQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
return {
incoming: inRows.length > 0 ? inRows : [],
outgoing: outRows.length > 0 ? outRows : [],
};
} catch (e) {
logger.err(`Cannot get node fee distribution for ${node_public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
public async $getAllNodes(): Promise<any> {
try {
const query = `SELECT * FROM nodes`;
@@ -218,43 +112,20 @@ class NodesApi {
}
}
public async $getTopCapacityNodes(full: boolean): Promise<ITopNodesPerCapacity[]> {
public async $getTopCapacityNodes(): Promise<any> {
try {
let rows: any;
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
nodes.capacity
FROM nodes
ORDER BY capacity DESC
LIMIT 100
`;
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
const latestDate = rows[0].maxAdded;
[rows] = await DB.query(query);
} else {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
ORDER BY capacity DESC
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
const query = `
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.capacity, node_stats.channels
FROM node_stats
JOIN nodes ON nodes.public_key = node_stats.public_key
WHERE added = FROM_UNIXTIME(${latestDate})
ORDER BY capacity DESC
LIMIT 10;
`;
[rows] = await DB.query(query);
return rows;
} catch (e) {
@@ -263,94 +134,20 @@ class NodesApi {
}
}
public async $getTopChannelsNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
try {
let rows: any;
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
nodes.channels
FROM nodes
ORDER BY channels DESC
LIMIT 100;
`;
[rows] = await DB.query(query);
} else {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
ORDER BY channels DESC
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
return rows;
} catch (e) {
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getOldestNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
public async $getTopChannelsNodes(): Promise<any> {
try {
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
const latestDate = rows[0].maxAdded;
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
node_stats.channels
FROM node_stats
JOIN nodes ON nodes.public_key = node_stats.public_key
WHERE added = FROM_UNIXTIME(${latestDate})
ORDER BY first_seen
LIMIT 100;
`;
[rows] = await DB.query(query);
} else {
query = `
SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias,
CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity,
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM node_stats
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
WHERE added = FROM_UNIXTIME(${latestDate})
ORDER BY first_seen
LIMIT 100
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
}
const query = `
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.capacity, node_stats.channels
FROM node_stats
JOIN nodes ON nodes.public_key = node_stats.public_key
WHERE added = FROM_UNIXTIME(${latestDate})
ORDER BY channels DESC
LIMIT 10;
`;
[rows] = await DB.query(query);
return rows;
} catch (e) {
@@ -361,10 +158,9 @@ class NodesApi {
public async $searchNodeByPublicKeyOrAlias(search: string) {
try {
const publicKeySearch = search.replace('%', '') + '%';
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
const query = `SELECT public_key, alias, capacity, channels, status FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`;
const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]);
const searchStripped = search.replace('%', '') + '%';
const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR nodes.alias LIKE ? GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`;
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
return rows;
} catch (e) {
logger.err('$searchNodeByPublicKeyOrAlias error: ' + (e instanceof Error ? e.message : e));
@@ -372,120 +168,64 @@ class NodesApi {
}
}
public async $getNodesISPRanking() {
public async $getNodesISPRanking(groupBy: string, showTor: boolean) {
try {
let query = '';
const orderBy = groupBy === 'capacity' ? `CAST(SUM(capacity) as INT)` : `COUNT(DISTINCT nodes.public_key)`;
// Clearnet
let query = `SELECT GROUP_CONCAT(DISTINCT(nodes.as_number)) as ispId, geo_names.names as names,
COUNT(DISTINCT nodes.public_key) as nodesCount, CAST(SUM(capacity) as INT) as capacity
FROM nodes
JOIN geo_names ON geo_names.id = nodes.as_number
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
GROUP BY geo_names.names
ORDER BY ${orderBy} DESC
`;
const [nodesCountPerAS]: any = await DB.query(query);
// List all channels and the two linked ISP
query = `
SELECT short_id, channels.capacity,
channels.node1_public_key AS node1PublicKey, isp1.names AS isp1, isp1.id as isp1ID,
channels.node2_public_key AS node2PublicKey, isp2.names AS isp2, isp2.id as isp2ID
FROM channels
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
JOIN geo_names isp1 ON isp1.id = node1.as_number
JOIN geo_names isp2 ON isp2.id = node2.as_number
WHERE channels.status = 1
ORDER BY short_id DESC
`;
const [channelsIsp]: any = await DB.query(query);
let total = 0;
const nodesPerAs: any[] = [];
// Sum channels capacity and node count per ISP
const ispList = {};
for (const channel of channelsIsp) {
const isp1 = JSON.parse(channel.isp1);
const isp2 = JSON.parse(channel.isp2);
if (!ispList[isp1]) {
ispList[isp1] = {
id: channel.isp1ID.toString(),
capacity: 0,
channels: 0,
nodes: {},
};
} else if (ispList[isp1].id.indexOf(channel.isp1ID) === -1) {
ispList[isp1].id += ',' + channel.isp1ID.toString();
for (const asGroup of nodesCountPerAS) {
if (groupBy === 'capacity') {
total += asGroup.capacity;
} else {
total += asGroup.nodesCount;
}
if (!ispList[isp2]) {
ispList[isp2] = {
id: channel.isp2ID.toString(),
capacity: 0,
channels: 0,
nodes: {},
};
} else if (ispList[isp2].id.indexOf(channel.isp2ID) === -1) {
ispList[isp2].id += ',' + channel.isp2ID.toString();
}
ispList[isp1].capacity += channel.capacity;
ispList[isp1].channels += 1;
ispList[isp1].nodes[channel.node1PublicKey] = true;
ispList[isp2].capacity += channel.capacity;
ispList[isp2].channels += 1;
ispList[isp2].nodes[channel.node2PublicKey] = true;
}
const ispRanking: any[] = [];
for (const isp of Object.keys(ispList)) {
ispRanking.push([
ispList[isp].id,
isp,
ispList[isp].capacity,
ispList[isp].channels,
Object.keys(ispList[isp].nodes).length,
]);
// Tor
if (showTor) {
query = `SELECT COUNT(DISTINCT nodes.public_key) as nodesCount, CAST(SUM(capacity) as INT) as capacity
FROM nodes
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
ORDER BY ${orderBy} DESC
`;
const [nodesCountTor]: any = await DB.query(query);
total += groupBy === 'capacity' ? nodesCountTor[0].capacity : nodesCountTor[0].nodesCount;
nodesPerAs.push({
ispId: null,
name: 'Tor',
count: nodesCountTor[0].nodesCount,
share: Math.floor((groupBy === 'capacity' ? nodesCountTor[0].capacity : nodesCountTor[0].nodesCount) / total * 10000) / 100,
capacity: nodesCountTor[0].capacity,
});
}
// Total active channels capacity
query = `SELECT SUM(capacity) AS capacity FROM channels WHERE status = 1`;
const [totalCapacity]: any = await DB.query(query);
for (const as of nodesCountPerAS) {
nodesPerAs.push({
ispId: as.ispId,
name: JSON.parse(as.names),
count: as.nodesCount,
share: Math.floor((groupBy === 'capacity' ? as.capacity : as.nodesCount) / total * 10000) / 100,
capacity: as.capacity,
});
}
// Get the total capacity of all channels which have at least one node on clearnet
query = `
SELECT SUM(capacity) as capacity
FROM (
SELECT capacity, GROUP_CONCAT(socket1.type, socket2.type) as networks
FROM channels
JOIN nodes_sockets socket1 ON node1_public_key = socket1.public_key
JOIN nodes_sockets socket2 ON node2_public_key = socket2.public_key
AND channels.status = 1
GROUP BY short_id
) channels_tmp
WHERE channels_tmp.networks LIKE '%ipv%'
`;
const [clearnetCapacity]: any = await DB.query(query);
// Get the total capacity of all channels which have both nodes on Tor
query = `
SELECT SUM(capacity) as capacity
FROM (
SELECT capacity, GROUP_CONCAT(socket1.type, socket2.type) as networks
FROM channels
JOIN nodes_sockets socket1 ON node1_public_key = socket1.public_key
JOIN nodes_sockets socket2 ON node2_public_key = socket2.public_key
AND channels.status = 1
GROUP BY short_id
) channels_tmp
WHERE channels_tmp.networks NOT LIKE '%ipv%' AND
channels_tmp.networks NOT LIKE '%dns%' AND
channels_tmp.networks NOT LIKE '%websocket%'
`;
const [torCapacity]: any = await DB.query(query);
const clearnetCapacityValue = parseInt(clearnetCapacity[0].capacity, 10);
const torCapacityValue = parseInt(torCapacity[0].capacity, 10);
const unknownCapacityValue = parseInt(totalCapacity[0].capacity) - clearnetCapacityValue - torCapacityValue;
return {
clearnetCapacity: clearnetCapacityValue,
torCapacity: torCapacityValue,
unknownCapacity: unknownCapacityValue,
ispRanking: ispRanking,
};
return nodesPerAs;
} catch (e) {
logger.err(`Cannot get LN ISP ranking. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Cannot get nodes grouped by AS. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
@@ -493,27 +233,25 @@ class NodesApi {
public async $getNodesPerCountry(countryId: string) {
try {
const query = `
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision,
nodes.longitude, nodes.latitude, nodes.as_number, geo_names_isp.names as isp
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
geo_names_city.names as city
FROM node_stats
JOIN (
SELECT public_key, MAX(added) as last_added
FROM node_stats
GROUP BY public_key
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
LEFT JOIN geo_names geo_names_isp on geo_names_isp.id = nodes.as_number AND geo_names_isp.type = 'as_organization'
WHERE geo_names_country.id = ?
ORDER BY capacity DESC
`;
const [rows]: any = await DB.query(query, [countryId]);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
rows[i].subdivision = JSON.parse(rows[i].subdivision);
rows[i].isp = JSON.parse(rows[i].isp);
}
return rows;
} catch (e) {
@@ -524,63 +262,29 @@ class NodesApi {
public async $getNodesPerISP(ISPId: string) {
try {
let query = `
SELECT channels.node1_public_key AS node1PublicKey, isp1.id as isp1ID,
channels.node2_public_key AS node2PublicKey, isp2.id as isp2ID
FROM channels
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
JOIN geo_names isp1 ON isp1.id = node1.as_number
JOIN geo_names isp2 ON isp2.id = node2.as_number
WHERE channels.status = 1 AND (node1.as_number IN (?) OR node2.as_number IN (?))
ORDER BY short_id DESC
`;
const IPSIds = ISPId.split(',');
const [rows]: any = await DB.query(query, [IPSIds, IPSIds]);
if (!rows || rows.length === 0) {
return [];
}
const nodes = {};
const intISPIds: number[] = [];
for (const ispId of IPSIds) {
intISPIds.push(parseInt(ispId, 10));
}
for (const channel of rows) {
if (intISPIds.includes(channel.isp1ID)) {
nodes[channel.node1PublicKey] = true;
}
if (intISPIds.includes(channel.isp2ID)) {
nodes[channel.node2PublicKey] = true;
}
}
query = `
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
const query = `
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision,
nodes.longitude, nodes.latitude
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
geo_names_city.names as city, geo_names_country.names as country
FROM node_stats
JOIN (
SELECT public_key, MAX(added) as last_added
FROM node_stats
GROUP BY public_key
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
WHERE nodes.public_key IN (?)
WHERE nodes.as_number IN (?)
ORDER BY capacity DESC
`;
const [rows2]: any = await DB.query(query, [Object.keys(nodes)]);
for (let i = 0; i < rows2.length; ++i) {
rows2[i].country = JSON.parse(rows2[i].country);
rows2[i].city = JSON.parse(rows2[i].city);
rows2[i].subdivision = JSON.parse(rows2[i].subdivision);
const [rows]: any = await DB.query(query, [ISPId.split(',')]);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
return rows2;
return rows;
} catch (e) {
logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
@@ -593,6 +297,7 @@ class NodesApi {
FROM nodes
JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country'
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
GROUP BY country_id
ORDER BY COUNT(DISTINCT nodes.public_key) DESC
`;
@@ -630,24 +335,21 @@ class NodesApi {
first_seen,
updated_at,
alias,
alias_search,
color,
sockets,
status
)
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1)
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, alias_search = ?, color = ?, sockets = ?, status = 1`;
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, 1)
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, color = ?, sockets = ?, status = 1`;
await DB.query(query, [
node.pub_key,
node.last_update,
node.alias,
this.aliasToSearchText(node.alias),
node.color,
sockets,
node.last_update,
node.alias,
this.aliasToSearchText(node.alias),
node.color,
sockets,
]);
@@ -656,18 +358,6 @@ class NodesApi {
}
}
/**
* Update node sockets
*/
public async $updateNodeSockets(publicKey: string, sockets: {network: string; addr: string}[]): Promise<void> {
const formattedSockets = (sockets.map(a => a.addr).join(',')) ?? '';
try {
await DB.query(`UPDATE nodes SET sockets = ? WHERE public_key = ?`, [formattedSockets, publicKey]);
} catch (e) {
logger.err(`Cannot update node sockets for ${publicKey}. Reason: ${e instanceof Error ? e.message : e}`);
}
}
/**
* Set all nodes not in `nodesPubkeys` as inactive (status = 0)
*/
@@ -693,10 +383,6 @@ class NodesApi {
logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));
}
}
private aliasToSearchText(str: string): string {
return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '');
}
}
export default new NodesApi();

View File

@@ -2,27 +2,20 @@ import config from '../../config';
import { Application, Request, Response } from 'express';
import nodesApi from './nodes.api';
import DB from '../../database';
import { INodesRanking } from '../../mempool.interfaces';
class NodesRoutes {
constructor() { }
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/world', this.$getWorldNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/country/:country', this.$getNodesPerCountry)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/search/:search', this.$searchNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/top', this.$getTopNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp-ranking', this.$getISPRanking)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp/:isp', this.$getNodesPerISP)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/countries', this.$getNodesCountries)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings', this.$getNodesRanking)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/liquidity', this.$getTopNodesByCapacity)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/connectivity', this.$getTopNodesByChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/age', this.$getOldestNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/fees/histogram', this.$getFeeHistogram)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/group/:name', this.$getNodeGroup)
;
}
@@ -35,39 +28,6 @@ class NodesRoutes {
}
}
private async $getNodeGroup(req: Request, res: Response) {
try {
let nodesList;
let nodes: any[] = [];
switch (config.MEMPOOL.NETWORK) {
case 'testnet':
nodesList = ['032c7c7819276c4f706a04df1a0f1e10a5495994a7be4c1d3d28ca766e5a2b957b', '025a7e38c2834dd843591a4d23d5f09cdeb77ddca85f673c2d944a14220ff14cf7', '0395e2731a1673ef21d7a16a727c4fc4d4c35a861c428ce2c819c53d2b81c8bd55', '032ab2028c0b614c6d87824e2373529652fd7e4221b4c70cc4da7c7005c49afcf0', '029001b22fe70b48bee12d014df91982eb85ff1bd404ec772d5c83c4ee3e88d2c3', '0212e2848d79f928411da5f2ff0a8c95ec6ccb5a09d2031b6f71e91309dcde63af', '03e871a2229523d34f76e6311ff197cfe7f26c2fbec13554b93a46f4e710c47dab', '032202ec98d976b0e928bd1d91924e8bd3eab07231fc39feb3737b010071073df8', '02fa7c5a948d03d563a9f36940c2205a814e594d17c0042ced242c71a857d72605', '039c14fdec2d958e3d14cebf657451bbd9e039196615785e82c917f274e3fb2205', '033589bbcb233ffc416cefd5437c7f37e9d7cb7942d405e39e72c4c846d9b37f18', '029293110441c6e2eacb57e1255bf6ef05c41a6a676fe474922d33c19f98a7d584'];
break;
case 'signet':
nodesList = ['03ddab321b760433cbf561b615ef62ac7d318630c5f51d523aaf5395b90b751956', '033d92c7bfd213ef1b34c90e985fb5dc77f9ec2409d391492484e57a44c4aca1de', '02ad010dda54253c1eb9efe38b0760657a3b43ecad62198c359c051c9d99d45781', '025196512905b8a3f1597428b867bec63ec9a95e5089eb7dc7e63e2d2691669029', '027c625aa1fbe3768db68ebcb05b53b6dc0ce68b7b54b8900d326d167363e684fe', '03f1629af3101fcc56b7aac2667016be84e3defbf3d0c8719f836c9b41c9a57a43', '02dfb81e2f7a3c4c9e8a51b70ef82b4a24549cc2fab1f5b2fd636501774a918991', '02d01ccf832944c68f10d39006093769c5b8bda886d561b128534e313d729fdb34', '02499ed23027d4698a6904ff4ec1b6085a61f10b9a6937f90438f9947e38e8ea86', '038310e3a786340f2bd7770704c7ccfe560fd163d9a1c99d67894597419d12cbf7', '03e5e9d879b72c7d67ecd483bae023bd33e695bb32b981a4021260f7b9d62bc761', '028d16e1a0ace4c0c0a421536d8d32ce484dfe6e2f726b7b0e7c30f12a195f8cc7'];
break;
default:
nodesList = ['03fbc17549ec667bccf397ababbcb4cdc0e3394345e4773079ab2774612ec9be61', '03da9a8623241ccf95f19cd645c6cecd4019ac91570e976eb0a128bebbc4d8a437', '03ca5340cf85cb2e7cf076e489f785410838de174e40be62723e8a60972ad75144', '0238bd27f02d67d6c51e269692bc8c9a32357a00e7777cba7f4f1f18a2a700b108', '03f983dcabed6baa1eab5b56c8b2e8fdc846ab3fd931155377897335e85a9fa57c', '03e399589533581e48796e29a825839a010036a61b20744fda929d6709fcbffcc5', '021f5288b5f72c42cd0d8801086af7ce09a816d8ee9a4c47a4b436399b26cb601a', '032b01b7585f781420cd4148841a82831ba37fa952342052cec16750852d4f2dd9', '02848036488d4b8fb1f1c4064261ec36151f43b085f0b51bd239ade3ddfc940c34', '02b6b1640fe029e304c216951af9fbefdb23b0bdc9baaf327540d31b6107841fdf', '03694289827203a5b3156d753071ddd5bf92e371f5a462943f9555eef6d2d6606c', '0283d850db7c3e8ea7cc9c4abc7afaab12bbdf72b677dcba1d608350d2537d7d43'];
}
for (let pubKey of nodesList) {
try {
const node = await nodesApi.$getNode(pubKey);
if (node) {
nodes.push(node);
}
} catch (e) {}
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(nodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNode(req: Request, res: Response) {
try {
const node = await nodesApi.$getNode(req.params.public_key);
@@ -96,30 +56,11 @@ class NodesRoutes {
}
}
private async $getFeeHistogram(req: Request, res: Response) {
private async $getTopNodes(req: Request, res: Response) {
try {
const node = await nodesApi.$getFeeHistogram(req.params.public_key);
if (!node) {
res.status(404).send('Node not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(node);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesRanking(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(false);
const topChannelsNodes = await nodesApi.$getTopChannelsNodes(false);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(<INodesRanking>{
const topCapacityNodes = await nodesApi.$getTopCapacityNodes();
const topChannelsNodes = await nodesApi.$getTopChannelsNodes();
res.json({
topByCapacity: topCapacityNodes,
topByChannels: topChannelsNodes,
});
@@ -128,45 +69,18 @@ class NodesRoutes {
}
}
private async $getTopNodesByCapacity(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getTopNodesByChannels(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopChannelsNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getOldestNodes(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getOldestNodes(true);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(topCapacityNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getISPRanking(req: Request, res: Response): Promise<void> {
try {
const nodesPerAs = await nodesApi.$getNodesISPRanking();
const groupBy = req.query.groupBy as string;
const showTor = req.query.showTor as string === 'true' ? true : false;
if (!['capacity', 'node-count'].includes(groupBy)) {
res.status(400).send(`groupBy must be one of 'capacity' or 'node-count'`);
return;
}
const nodesPerAs = await nodesApi.$getNodesISPRanking(groupBy, showTor);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
@@ -176,18 +90,6 @@ class NodesRoutes {
}
}
private async $getWorldNodes(req: Request, res: Response) {
try {
const worldNodes = await nodesApi.$getWorldNodes();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
res.json(worldNodes);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesPerCountry(req: Request, res: Response) {
try {
const [country]: any[] = await DB.query(

View File

@@ -6,8 +6,7 @@ class StatisticsApi {
public async $getStatistics(interval: string | null = null): Promise<any> {
interval = Common.getSqlInterval(interval);
let query = `SELECT UNIX_TIMESTAMP(added) AS added, channel_count, total_capacity,
tor_nodes, clearnet_nodes, unannounced_nodes, clearnet_tor_nodes
let query = `SELECT UNIX_TIMESTAMP(added) AS added, channel_count, total_capacity, tor_nodes, clearnet_nodes, unannounced_nodes
FROM lightning_stats`;
if (interval) {
@@ -28,7 +27,7 @@ class StatisticsApi {
public async $getLatestStatistics(): Promise<any> {
try {
const [rows]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1`);
const [rows2]: any = await DB.query(`SELECT * FROM lightning_stats WHERE DATE(added) = DATE(NOW() - INTERVAL 7 DAY)`);
const [rows2]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1 OFFSET 7`);
return {
latest: rows[0],
previous: rows2[0],

View File

@@ -1,37 +0,0 @@
import fs from 'fs';
import path from "path";
const { spawnSync } = require('child_process');
function getVersion(): string {
const packageJson = fs.readFileSync('package.json').toString();
return JSON.parse(packageJson).version;
}
function getGitCommit(): string {
if (process.env.MEMPOOL_COMMIT_HASH) {
return process.env.MEMPOOL_COMMIT_HASH;
} else {
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
if (!gitRevParse.error) {
const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
if (output) {
return output;
} else {
console.log('Could not fetch git commit: No repo available');
}
} else if (gitRevParse.error.code === 'ENOENT') {
console.log('Could not fetch git commit: Command `git` is unavailable');
}
}
return '?';
}
const versionInfo = {
version: getVersion(),
gitCommit: getGitCommit()
}
fs.writeFileSync(
path.join(__dirname, 'version.json'),
JSON.stringify(versionInfo, null, 2) + "\n"
);

View File

@@ -7,32 +7,18 @@ import { Common } from '../../common';
* Convert a clightning "listnode" entry to a lnd node entry
*/
export function convertNode(clNode: any): ILightningApi.Node {
let custom_records: { [type: number]: string } | undefined = undefined;
if (clNode.option_will_fund) {
try {
custom_records = { '1': Buffer.from(clNode.option_will_fund.compact_lease || '', 'hex').toString('base64') };
} catch (e) {
logger.err(`Cannot decode option_will_fund compact_lease for ${clNode.nodeid}). Reason: ` + (e instanceof Error ? e.message : e));
custom_records = undefined;
}
}
return {
alias: clNode.alias ?? '',
color: `#${clNode.color ?? ''}`,
features: [], // TODO parse and return clNode.feature
pub_key: clNode.nodeid,
addresses: clNode.addresses?.map((addr) => {
let address = addr.address;
if (addr.type === 'ipv6') {
address = `[${address}]`;
}
return {
network: addr.type,
addr: `${address}:${addr.port}`
addr: `${addr.address}:${addr.port}`
};
}) ?? [],
}),
last_update: clNode?.last_timestamp ?? 0,
custom_records
};
}
@@ -80,8 +66,6 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
loggerTimer = new Date().getTime() / 1000;
}
channelProcessed++;
}
return consolidatedChannelList;
@@ -136,7 +120,7 @@ async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Cha
*/
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
return {
time_lock_delta: clChannel.delay,
time_lock_delta: 0, // TODO
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
max_htlc_msat: clChannel.htlc_maximum_msat.slice(0, -4),
fee_base_msat: clChannel.base_fee_millisatoshi,

View File

@@ -49,7 +49,6 @@ export namespace ILightningApi {
}[];
color: string;
features: { [key: number]: Feature };
custom_records?: { [type: number]: string };
}
export interface Info {
@@ -83,10 +82,4 @@ export namespace ILightningApi {
is_required: boolean;
is_known: boolean;
}
export interface ForensicOutput {
node?: 1 | 2;
type: number;
value: number;
}
}
}

View File

@@ -2,16 +2,10 @@ import logger from '../logger';
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces';
import { Common } from './common';
import config from '../config';
import { StaticPool } from 'node-worker-threads-pool';
import path from 'path';
class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
private makeTemplatesPool = new StaticPool({
size: 1,
task: path.resolve(__dirname, './tx-selection-worker.js'),
});
constructor() {}
@@ -77,15 +71,15 @@ class MempoolBlocks {
const time = end - start;
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
const { blocks, deltas } = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
this.mempoolBlocks = blocks;
this.mempoolBlockDeltas = deltas;
}
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] {
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]):
{ blocks: MempoolBlockWithTransactions[], deltas: MempoolBlockDelta[] } {
const mempoolBlocks: MempoolBlockWithTransactions[] = [];
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
let blockWeight = 0;
let blockSize = 0;
let transactions: TransactionExtended[] = [];
@@ -105,12 +99,7 @@ class MempoolBlocks {
if (transactions.length) {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
}
return mempoolBlocks;
}
private calculateMempoolDeltas(prevBlocks: MempoolBlockWithTransactions[], mempoolBlocks: MempoolBlockWithTransactions[]): MempoolBlockDelta[] {
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
// Calculate change from previous block states
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
let added: TransactionStripped[] = [];
let removed: string[] = [];
@@ -143,26 +132,10 @@ class MempoolBlocks {
removed
});
}
return mempoolBlockDeltas;
}
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): Promise<void> {
const { mempool, blocks } = await this.makeTemplatesPool.exec({ mempool: newMempool, blockLimit, weightLimit, condenseRest });
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
// copy CPFP info across to main thread's mempool
Object.keys(newMempool).forEach((txid) => {
if (newMempool[txid] && mempool[txid]) {
newMempool[txid].effectiveFeePerVsize = mempool[txid].effectiveFeePerVsize;
newMempool[txid].ancestors = mempool[txid].ancestors;
newMempool[txid].descendants = mempool[txid].descendants;
newMempool[txid].bestDescendant = mempool[txid].bestDescendant;
newMempool[txid].cpfpChecked = mempool[txid].cpfpChecked;
}
});
this.mempoolBlocks = blocks;
this.mempoolBlockDeltas = deltas;
return {
blocks: mempoolBlocks,
deltas: mempoolBlockDeltas
};
}
private dataToMempoolBlocks(transactions: TransactionExtended[],

View File

@@ -20,8 +20,6 @@ class Mempool {
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
private txPerSecondArray: number[] = [];
private txPerSecond: number = 0;
@@ -65,11 +63,6 @@ class Mempool {
this.mempoolChangedCallback = fn;
}
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => Promise<void>) {
this.asyncMempoolChangedCallback = fn;
}
public getMempool(): { [txid: string]: TransactionExtended } {
return this.mempoolCache;
}
@@ -79,9 +72,6 @@ class Mempool {
if (this.mempoolChangedCallback) {
this.mempoolChangedCallback(this.mempoolCache, [], []);
}
if (this.asyncMempoolChangedCallback) {
this.asyncMempoolChangedCallback(this.mempoolCache, [], []);
}
}
public async $updateMemPoolInfo() {
@@ -113,11 +103,12 @@ class Mempool {
return txTimes;
}
public async $updateMempool(): Promise<void> {
logger.debug(`Updating mempool...`);
public async $updateMempool() {
logger.debug('Updating mempool');
const start = new Date().getTime();
let hasChange: boolean = false;
const currentMempoolSize = Object.keys(this.mempoolCache).length;
let txCount = 0;
const transactions = await bitcoinApi.$getRawMempool();
const diff = transactions.length - currentMempoolSize;
const newTransactions: TransactionExtended[] = [];
@@ -133,6 +124,7 @@ class Mempool {
try {
const transaction = await transactionUtils.$getTransactionExtended(txid);
this.mempoolCache[txid] = transaction;
txCount++;
if (this.inSync) {
this.txPerSecondArray.push(new Date().getTime());
this.vBytesPerSecondArray.push({
@@ -141,9 +133,14 @@ class Mempool {
});
}
hasChange = true;
if (diff > 0) {
logger.debug('Fetched transaction ' + txCount + ' / ' + diff);
} else {
logger.debug('Fetched transaction ' + txCount);
}
newTransactions.push(transaction);
} catch (e) {
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
}
@@ -197,13 +194,11 @@ class Mempool {
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
if (this.asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
await this.asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
const end = new Date().getTime();
const time = end - start;
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
logger.debug(`New mempool size: ${Object.keys(this.mempoolCache).length} Change: ${diff}`);
logger.debug('Mempool updated in ' + time / 1000 + ' seconds');
}
public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) {

View File

@@ -1,7 +1,6 @@
import { Application, Request, Response } from 'express';
import config from "../../config";
import logger from '../../logger';
import audits from '../audit';
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
import BlocksRepository from '../../repositories/BlocksRepository';
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
@@ -27,11 +26,7 @@ class MiningRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores/:height', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/timestamp/:timestamp', this.$getHeightFromTimestamp)
;
}
@@ -243,12 +238,6 @@ class MiningRoutes {
public async $getBlockAudit(req: Request, res: Response) {
try {
const audit = await BlocksAuditsRepository.$getBlockAudit(req.params.hash);
if (!audit) {
res.status(404).send(`This block has not been audited.`);
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
@@ -257,55 +246,6 @@ class MiningRoutes {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHeightFromTimestamp(req: Request, res: Response) {
try {
const timestamp = parseInt(req.params.timestamp, 10);
// This will prevent people from entering milliseconds etc.
// Block timestamps are allowed to be up to 2 hours off, so 24 hours
// will never put the maximum value before the most recent block
const nowPlus1day = Math.floor(Date.now() / 1000) + 60 * 60 * 24;
// Prevent non-integers that are not seconds
if (!/^[1-9][0-9]*$/.test(req.params.timestamp) || timestamp > nowPlus1day) {
throw new Error(`Invalid timestamp, value must be Unix seconds`);
}
const result = await BlocksRepository.$getBlockHeightFromTimestamp(
timestamp,
);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getBlockAuditScores(req: Request, res: Response) {
try {
let height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
if (height == null) {
height = await BlocksRepository.$mostRecentBlockHeight();
}
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await BlocksAuditsRepository.$getBlockAuditScores(height, height - 15));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
public async $getBlockAuditScore(req: Request, res: Response) {
try {
const audit = await BlocksAuditsRepository.$getBlockAuditScore(req.params.hash);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
res.json(audit || 'null');
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new MiningRoutes();

View File

@@ -14,10 +14,10 @@ interface Pool {
class PoolsParser {
miningPools: any[] = [];
unknownPool: any = {
'name': 'Unknown',
'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction',
'regexes': '[]',
'addresses': '[]',
'name': "Unknown",
'link': "https://learnmeabitcoin.com/technical/coinbase-transaction",
'regexes': "[]",
'addresses': "[]",
'slug': 'unknown'
};
slugWarnFlag = false;
@@ -25,7 +25,7 @@ class PoolsParser {
/**
* Parse the pools.json file, consolidate the data and dump it into the database
*/
public async migratePoolsJson(poolsJson: object): Promise<void> {
public async migratePoolsJson(poolsJson: object) {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
@@ -81,7 +81,6 @@ class PoolsParser {
// Finally, we generate the final consolidated pools data
const finalPoolDataAdd: Pool[] = [];
const finalPoolDataUpdate: Pool[] = [];
const finalPoolDataRename: Pool[] = [];
for (let i = 0; i < poolNames.length; ++i) {
let allAddresses: string[] = [];
let allRegexes: string[] = [];
@@ -128,26 +127,8 @@ class PoolsParser {
finalPoolDataUpdate.push(poolObj);
}
} else {
// Double check that if we're not just renaming a pool (same address same regex)
const [poolToRename]: any[] = await DB.query(`
SELECT * FROM pools
WHERE addresses = ? OR regexes = ?`,
[JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)]
);
if (poolToRename && poolToRename.length > 0) {
// We're actually renaming an existing pool
finalPoolDataRename.push({
'name': poolObj.name,
'link': poolObj.link,
'regexes': allRegexes,
'addresses': allAddresses,
'slug': slug
});
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`);
} else {
logger.debug(`Add '${finalPoolName}' mining pool`);
finalPoolDataAdd.push(poolObj);
}
logger.debug(`Add '${finalPoolName}' mining pool`);
finalPoolDataAdd.push(poolObj);
}
this.miningPools.push({
@@ -164,9 +145,7 @@ class PoolsParser {
return;
}
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
finalPoolDataRename.length > 0
) {
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) {
logger.debug(`Update pools table now`);
// Add new mining pools into the database
@@ -190,22 +169,8 @@ class PoolsParser {
;`);
}
// Rename mining pools
const renameQueries: string[] = [];
for (let i = 0; i < finalPoolDataRename.length; ++i) {
renameQueries.push(`
UPDATE pools
SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}',
slug='${finalPoolDataRename[i].slug}'
WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}'
AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}'
;`);
}
try {
if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) {
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
}
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
if (finalPoolDataAdd.length > 0) {
await DB.query({ sql: queryAdd, timeout: 120000 });
@@ -213,9 +178,6 @@ class PoolsParser {
for (const query of updateQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
for (const query of renameQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
await this.insertUnknownPool();
logger.info('Mining pools.json import completed');
} catch (e) {

View File

@@ -1,338 +0,0 @@
import config from '../config';
import logger from '../logger';
import { TransactionExtended, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
import { PairingHeap } from '../utils/pairing-heap';
import { Common } from './common';
import { parentPort } from 'worker_threads';
if (parentPort) {
parentPort.on('message', (params: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null, condenseRest: boolean}) => {
const { mempool, blocks } = makeBlockTemplates(params);
// return the result to main thread.
if (parentPort) {
parentPort.postMessage({ mempool, blocks });
}
});
}
/*
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
*
* blockLimit: number of blocks to build in total.
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
* condenseRest: whether to ignore excess transactions or append them to the final block.
*/
function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit?: number | null, condenseRest?: boolean | null })
: { mempool: { [txid: string]: TransactionExtended }, blocks: MempoolBlockWithTransactions[] } {
const start = Date.now();
const auditPool: { [txid: string]: AuditTransaction } = {};
const mempoolArray: AuditTransaction[] = [];
const restOfArray: TransactionExtended[] = [];
let weight = 0;
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
// grab the top feerate txs up to maxWeight
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
weight += tx.weight;
if (weight >= maxWeight) {
restOfArray.push(tx);
return;
}
// initializing everything up front helps V8 optimize property access later
auditPool[tx.txid] = {
txid: tx.txid,
fee: tx.fee,
size: tx.size,
weight: tx.weight,
feePerVsize: tx.feePerVsize,
vin: tx.vin,
relativesSet: false,
ancestorMap: new Map<string, AuditTransaction>(),
children: new Set<AuditTransaction>(),
ancestorFee: 0,
ancestorWeight: 0,
score: 0,
used: false,
modified: false,
modifiedNode: null,
};
mempoolArray.push(auditPool[tx.txid]);
});
// Build relatives graph & calculate ancestor scores
for (const tx of mempoolArray) {
if (!tx.relativesSet) {
setRelatives(tx, auditPool);
}
}
// Sort by descending ancestor score
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
// Build blocks by greedily choosing the highest feerate package
// (i.e. the package rooted in the transaction with the best ancestor score)
const blocks: MempoolBlockWithTransactions[] = [];
let blockWeight = 4000;
let blockSize = 0;
let transactions: AuditTransaction[] = [];
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
let overflow: AuditTransaction[] = [];
let failures = 0;
let top = 0;
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) {
// skip invalid transactions
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
top++;
}
// Select best next package
let nextTx;
const nextPoolTx = mempoolArray[top];
const nextModifiedTx = modified.peek();
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
nextTx = nextPoolTx;
top++;
} else {
modified.pop();
if (nextModifiedTx) {
nextTx = nextModifiedTx;
nextTx.modifiedNode = undefined;
}
}
if (nextTx && !nextTx?.used) {
// Check if the package fits into this block
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
const descendants: AuditTransaction[] = [];
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
const used: AuditTransaction[] = [];
while (sortedTxSet.length) {
const ancestor = sortedTxSet.pop();
const mempoolTx = mempool[ancestor.txid];
ancestor.used = true;
ancestor.usedBy = nextTx.txid;
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
mempoolTx.ancestors = sortedTxSet.map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
}).reverse();
mempoolTx.descendants = descendants.map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
});
descendants.push(ancestor);
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
blockWeight += ancestor.weight;
used.push(ancestor);
}
// remove these as valid package ancestors for any descendants remaining in the mempool
if (used.length) {
used.forEach(tx => {
updateDescendants(tx, auditPool, modified);
});
}
failures = 0;
} else {
// hold this package in an overflow list while we check for smaller options
overflow.push(nextTx);
failures++;
}
}
// this block is full
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
const queueEmpty = top >= mempoolArray.length && modified.isEmpty();
if ((exceededPackageTries || queueEmpty) && (!condenseRest || blocks.length < blockLimit - 1)) {
// construct this block
if (transactions.length) {
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
}
// reset for the next block
transactions = [];
blockSize = 0;
blockWeight = 4000;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
for (const overflowTx of overflow.reverse()) {
if (overflowTx.modified) {
overflowTx.modifiedNode = modified.add(overflowTx);
} else {
top--;
mempoolArray[top] = overflowTx;
}
}
overflow = [];
}
}
if (condenseRest) {
// pack any leftover transactions into the last block
for (const tx of overflow) {
if (!tx || tx?.used) {
continue;
}
blockWeight += tx.weight;
blockSize += tx.size;
const mempoolTx = mempool[tx.txid];
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = tx.score;
mempoolTx.ancestors = (Array.from(tx.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
});
mempoolTx.bestDescendant = null;
mempoolTx.cpfpChecked = true;
transactions.push(tx);
tx.used = true;
}
const blockTransactions = transactions.map(t => mempool[t.txid]);
restOfArray.forEach(tx => {
blockWeight += tx.weight;
blockSize += tx.size;
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.cpfpChecked = false;
tx.ancestors = [];
tx.bestDescendant = null;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
}
transactions = [];
} else if (transactions.length) {
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
}
const end = Date.now();
const time = end - start;
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
return {
mempool,
blocks
};
}
// traverse in-mempool ancestors
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
function setRelatives(
tx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
): void {
for (const parent of tx.vin) {
const parentTx = mempool[parent.txid];
if (parentTx && !tx.ancestorMap?.has(parent.txid)) {
tx.ancestorMap.set(parent.txid, parentTx);
parentTx.children.add(tx);
// visit each node only once
if (!parentTx.relativesSet) {
setRelatives(parentTx, mempool);
}
parentTx.ancestorMap.forEach((ancestor) => {
tx.ancestorMap.set(ancestor.txid, ancestor);
});
}
};
tx.ancestorFee = tx.fee || 0;
tx.ancestorWeight = tx.weight || 0;
tx.ancestorMap.forEach((ancestor) => {
tx.ancestorFee += ancestor.fee;
tx.ancestorWeight += ancestor.weight;
});
tx.score = tx.ancestorFee / ((tx.ancestorWeight / 4) || 1);
tx.relativesSet = true;
}
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
// avoids recursion to limit call stack depth
function updateDescendants(
rootTx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
modified: PairingHeap<AuditTransaction>,
): void {
const descendantSet: Set<AuditTransaction> = new Set();
// stack of nodes left to visit
const descendants: AuditTransaction[] = [];
let descendantTx;
let tmpScore;
rootTx.children.forEach(childTx => {
if (!descendantSet.has(childTx)) {
descendants.push(childTx);
descendantSet.add(childTx);
}
});
while (descendants.length) {
descendantTx = descendants.pop();
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
// remove tx as ancestor
descendantTx.ancestorMap.delete(rootTx.txid);
descendantTx.ancestorFee -= rootTx.fee;
descendantTx.ancestorWeight -= rootTx.weight;
tmpScore = descendantTx.score;
descendantTx.score = descendantTx.ancestorFee / (descendantTx.ancestorWeight / 4);
if (!descendantTx.modifiedNode) {
descendantTx.modified = true;
descendantTx.modifiedNode = modified.add(descendantTx);
} else {
// rebalance modified heap if score has changed
if (descendantTx.score < tmpScore) {
modified.decreasePriority(descendantTx.modifiedNode);
} else if (descendantTx.score > tmpScore) {
modified.increasePriority(descendantTx.modifiedNode);
}
}
// add this node's children to the stack
descendantTx.children.forEach(childTx => {
// visit each node only once
if (!descendantSet.has(childTx)) {
descendants.push(childTx);
descendantSet.add(childTx);
}
});
}
}
}
function dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
}
if (transactions.length > 4000) {
rangeLength = 6;
} else if (transactions.length > 10000) {
rangeLength = 8;
}
return {
blockSize: blockSize,
blockVSize: blockWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
};
}

View File

@@ -18,7 +18,6 @@ import difficultyAdjustment from './difficulty-adjustment';
import feeApi from './fee-api';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import Audit from './audit';
class WebsocketHandler {
private wss: WebSocket.Server | undefined;
@@ -244,18 +243,13 @@ class WebsocketHandler {
});
}
async handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]): Promise<void> {
handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.makeBlockTemplates(newMempool, 8, null, true);
} else {
mempoolBlocks.updateMempoolBlocks(newMempool);
}
mempoolBlocks.updateMempoolBlocks(newMempool);
const mBlocks = mempoolBlocks.getMempoolBlocks();
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
const mempoolInfo = memPool.getMempoolInfo();
@@ -410,71 +404,76 @@ class WebsocketHandler {
}
});
}
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> {
handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
let mBlocks: undefined | MempoolBlock[];
let mBlockDeltas: undefined | MempoolBlockDelta[];
let matchRate = 0;
const _memPool = memPool.getMempool();
const _mempoolBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
await mempoolBlocks.makeBlockTemplates(_memPool, 2);
} else {
mempoolBlocks.updateMempoolBlocks(_memPool);
}
if (_mempoolBlocks[0]) {
const matches: string[] = [];
const added: string[] = [];
const missing: string[] = [];
if (Common.indexingEnabled() && memPool.isInSync()) {
const projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
const { censored, added, fresh, score } = Audit.auditBlock(transactions, projectedBlocks, _memPool);
const matchRate = Math.round(score * 100 * 100) / 100;
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
return {
txid: tx.txid,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee) : 0,
value: tx.value,
};
}) : [];
BlocksSummariesRepository.$saveTemplate({
height: block.height,
template: {
id: block.id,
transactions: stripped
for (const txId of txIds) {
if (_mempoolBlocks[0].transactionIds.indexOf(txId) > -1) {
matches.push(txId);
} else {
added.push(txId);
}
});
delete _memPool[txId];
}
BlocksAuditsRepository.$saveAudit({
time: block.timestamp,
height: block.height,
hash: block.id,
addedTxs: added,
missingTxs: censored,
freshTxs: fresh,
matchRate: matchRate,
});
for (const txId of _mempoolBlocks[0].transactionIds) {
if (matches.includes(txId) || added.includes(txId)) {
continue;
}
missing.push(txId);
}
if (block.extras) {
block.extras.matchRate = matchRate;
matchRate = Math.round((Math.max(0, matches.length - missing.length - added.length) / txIds.length * 100) * 100) / 100;
mempoolBlocks.updateMempoolBlocks(_memPool);
mBlocks = mempoolBlocks.getMempoolBlocks();
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
if (Common.indexingEnabled()) {
const stripped = _mempoolBlocks[0].transactions.map((tx) => {
return {
txid: tx.txid,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee) : 0,
value: tx.value,
};
});
BlocksSummariesRepository.$saveSummary({
height: block.height,
template: {
id: block.id,
transactions: stripped
}
});
BlocksAuditsRepository.$saveAudit({
time: block.timestamp,
height: block.height,
hash: block.id,
addedTxs: added,
missingTxs: missing,
matchRate: matchRate,
});
}
}
// Update mempool to remove transactions included in the new block
for (const txId of txIds) {
delete _memPool[txId];
if (block.extras) {
block.extras.matchRate = matchRate;
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.makeBlockTemplates(_memPool, 8, null, true);
} else {
mempoolBlocks.updateMempoolBlocks(_memPool);
}
const mBlocks = mempoolBlocks.getMempoolBlocks();
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
const da = difficultyAdjustment.getDifficultyAdjustment();
const fees = feeApi.getRecommendedFee();

View File

@@ -1,10 +1,7 @@
const configFromFile = require(
process.env.MEMPOOL_CONFIG_FILE ? process.env.MEMPOOL_CONFIG_FILE : '../mempool-config.json'
);
const configFile = require('../mempool-config.json');
interface IConfig {
MEMPOOL: {
ENABLED: boolean;
NETWORK: 'mainnet' | 'testnet' | 'signet' | 'liquid' | 'liquidtestnet';
BACKEND: 'esplora' | 'electrum' | 'none';
HTTP_PORT: number;
@@ -29,9 +26,6 @@ interface IConfig {
AUTOMATIC_BLOCK_REINDEXING: boolean;
POOLS_JSON_URL: string,
POOLS_JSON_TREE_URL: string,
ADVANCED_GBT_AUDIT: boolean;
ADVANCED_GBT_MEMPOOL: boolean;
TRANSACTION_INDEXING: boolean;
};
ESPLORA: {
REST_API_URL: string;
@@ -42,9 +36,6 @@ interface IConfig {
TOPOLOGY_FOLDER: string;
STATS_REFRESH_INTERVAL: number;
GRAPH_REFRESH_INTERVAL: number;
LOGGER_UPDATE_INTERVAL: number;
FORENSICS_INTERVAL: number;
FORENSICS_RATE_LIMIT: number;
};
LND: {
TLS_CERT_PATH: string;
@@ -125,7 +116,6 @@ interface IConfig {
const defaults: IConfig = {
'MEMPOOL': {
'ENABLED': true,
'NETWORK': 'mainnet',
'BACKEND': 'none',
'HTTP_PORT': 8999,
@@ -150,9 +140,6 @@ const defaults: IConfig = {
'AUTOMATIC_BLOCK_REINDEXING': false,
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
'ADVANCED_GBT_AUDIT': false,
'ADVANCED_GBT_MEMPOOL': false,
'TRANSACTION_INDEXING': false,
},
'ESPLORA': {
'REST_API_URL': 'http://127.0.0.1:3000',
@@ -204,9 +191,6 @@ const defaults: IConfig = {
'TOPOLOGY_FOLDER': '',
'STATS_REFRESH_INTERVAL': 600,
'GRAPH_REFRESH_INTERVAL': 600,
'LOGGER_UPDATE_INTERVAL': 30,
'FORENSICS_INTERVAL': 43200,
'FORENSICS_RATE_LIMIT': 20,
},
'LND': {
'TLS_CERT_PATH': '',
@@ -236,11 +220,11 @@ const defaults: IConfig = {
'BISQ_URL': 'https://bisq.markets/api',
'BISQ_ONION': 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
},
'MAXMIND': {
"MAXMIND": {
'ENABLED': false,
'GEOLITE2_CITY': '/usr/local/share/GeoIP/GeoLite2-City.mmdb',
'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
},
};
@@ -263,7 +247,7 @@ class Config implements IConfig {
MAXMIND: IConfig['MAXMIND'];
constructor() {
const configs = this.merge(configFromFile, defaults);
const configs = this.merge(configFile, defaults);
this.MEMPOOL = configs.MEMPOOL;
this.ESPLORA = configs.ESPLORA;
this.ELECTRUM = configs.ELECTRUM;

View File

@@ -1,4 +1,4 @@
import express from 'express';
import express from "express";
import { Application, Request, Response, NextFunction } from 'express';
import * as http from 'http';
import * as WebSocket from 'ws';
@@ -34,8 +34,7 @@ import miningRoutes from './api/mining/mining-routes';
import bisqRoutes from './api/bisq/bisq.routes';
import liquidRoutes from './api/liquid/liquid.routes';
import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
import fundingTxFetcher from './tasks/lightning/sync-tasks/funding-tx-fetcher';
import forensicsService from './tasks/lightning/forensics.service';
import fundingTxFetcher from "./tasks/lightning/sync-tasks/funding-tx-fetcher";
class Server {
private wss: WebSocket.Server | undefined;
@@ -75,7 +74,7 @@ class Server {
}
}
async startServer(worker = false): Promise<void> {
async startServer(worker = false) {
logger.notice(`Starting Mempool Server${worker ? ' (worker)' : ''}... (${backendInfo.getShortCommitHash()})`);
this.app
@@ -84,7 +83,7 @@ class Server {
next();
})
.use(express.urlencoded({ extended: true }))
.use(express.text({ type: ['text/plain', 'application/base64'] }))
.use(express.text())
;
this.server = http.createServer(this.app);
@@ -93,9 +92,7 @@ class Server {
this.setUpWebsocketHandling();
await syncAssets.syncAssets$();
if (config.MEMPOOL.ENABLED) {
diskCache.loadMempoolCache();
}
diskCache.loadMempoolCache();
if (config.DATABASE.ENABLED) {
await DB.checkDbConnection();
@@ -130,10 +127,7 @@ class Server {
fiatConversion.startService();
this.setUpHttpApiRoutes();
if (config.MEMPOOL.ENABLED) {
this.runMainUpdateLoop();
}
this.runMainUpdateLoop();
if (config.BISQ.ENABLED) {
bisq.startBisqService();
@@ -155,7 +149,7 @@ class Server {
});
}
async runMainUpdateLoop(): Promise<void> {
async runMainUpdateLoop() {
try {
try {
await memPool.$updateMemPoolInfo();
@@ -189,20 +183,19 @@ class Server {
}
}
async $runLightningBackend(): Promise<void> {
async $runLightningBackend() {
try {
await fundingTxFetcher.$init();
await networkSyncService.$startService();
await forensicsService.$startService();
await lightningStatsUpdater.$startService();
} catch(e) {
logger.err(`Nodejs lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(`Lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
await Common.sleep$(1000 * 60);
this.$runLightningBackend();
};
}
setUpWebsocketHandling(): void {
setUpWebsocketHandling() {
if (this.wss) {
websocketHandler.setWebsocketServer(this.wss);
}
@@ -216,21 +209,19 @@ class Server {
});
}
websocketHandler.setupConnectionHandling();
if (config.MEMPOOL.ENABLED) {
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
memPool.setAsyncMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
blocks.setNewAsyncBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
}
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
blocks.setNewBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
memPool.setMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
fiatConversion.setProgressChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler));
loadingIndicators.setProgressChangedCallback(websocketHandler.handleLoadingChanged.bind(websocketHandler));
}
setUpHttpApiRoutes(): void {
setUpHttpApiRoutes() {
bitcoinRoutes.initRoutes(this.app);
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && config.MEMPOOL.ENABLED) {
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED) {
statisticsRoutes.initRoutes(this.app);
}
if (Common.indexingEnabled() && config.MEMPOOL.ENABLED) {
if (Common.indexingEnabled()) {
miningRoutes.initRoutes(this.app);
}
if (config.BISQ.ENABLED) {
@@ -247,4 +238,4 @@ class Server {
}
}
((): Server => new Server())();
const server = new Server();

View File

@@ -77,7 +77,6 @@ class Indexer {
await mining.$generateNetworkHashrateHistory();
await mining.$generatePoolHashrateHistory();
await blocks.$generateBlocksSummariesDatabase();
await blocks.$generateCPFPDatabase();
} catch (e) {
this.indexerRunning = false;
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));

View File

@@ -74,7 +74,7 @@ class Logger {
private getNetwork(): string {
if (config.LIGHTNING.ENABLED) {
return config.MEMPOOL.NETWORK === 'mainnet' ? 'lightning' : `${config.MEMPOOL.NETWORK}-lightning`;
return 'lightning';
}
if (config.BISQ.ENABLED) {
return 'bisq';

View File

@@ -1,5 +1,4 @@
import { IEsploraApi } from './api/bitcoin/esplora-api.interface';
import { HeapNode } from "./utils/pairing-heap";
export interface PoolTag {
id: number; // mysql row id
@@ -28,16 +27,10 @@ export interface BlockAudit {
height: number,
hash: string,
missingTxs: string[],
freshTxs: string[],
addedTxs: string[],
matchRate: number,
}
export interface AuditScore {
hash: string,
matchRate?: number,
}
export interface MempoolBlock {
blockSize: number;
blockVSize: number;
@@ -72,46 +65,17 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
firstSeen?: number;
effectiveFeePerVsize: number;
ancestors?: Ancestor[];
descendants?: Ancestor[];
bestDescendant?: BestDescendant | null;
cpfpChecked?: boolean;
deleteAfter?: number;
}
export interface AuditTransaction {
txid: string;
fee: number;
size: number;
weight: number;
feePerVsize: number;
vin: IEsploraApi.Vin[];
relativesSet: boolean;
ancestorMap: Map<string, AuditTransaction>;
children: Set<AuditTransaction>;
ancestorFee: number;
ancestorWeight: number;
score: number;
used: boolean;
modified: boolean;
modifiedNode: HeapNode<AuditTransaction>;
}
export interface Ancestor {
interface Ancestor {
txid: string;
weight: number;
fee: number;
}
export interface TransactionSet {
fee: number;
weight: number;
score: number;
children?: Set<string>;
available?: boolean;
modified?: boolean;
modifiedNode?: HeapNode<string>;
}
interface BestDescendant {
txid: string;
weight: number;
@@ -120,9 +84,7 @@ interface BestDescendant {
export interface CpfpInfo {
ancestors: Ancestor[];
bestDescendant?: BestDescendant | null;
descendants?: Ancestor[];
effectiveFeePerVsize?: number;
bestDescendant: BestDescendant | null;
}
export interface TransactionStripped {
@@ -289,41 +251,3 @@ export interface RewardStats {
totalFee: number;
totalTx: number;
}
export interface ITopNodesPerChannels {
publicKey: string,
alias: string,
channels?: number,
capacity: number,
firstSeen?: number,
updatedAt?: number,
city?: any,
country?: any,
}
export interface ITopNodesPerCapacity {
publicKey: string,
alias: string,
capacity: number,
channels?: number,
firstSeen?: number,
updatedAt?: number,
city?: any,
country?: any,
}
export interface INodesRanking {
topByCapacity: ITopNodesPerCapacity[];
topByChannels: ITopNodesPerChannels[];
}
export interface IOldestNodes {
publicKey: string,
alias: string,
firstSeen: number,
channels?: number,
capacity: number,
updatedAt?: number,
city?: any,
country?: any,
}

View File

@@ -1,14 +1,14 @@
import blocks from '../api/blocks';
import transactionUtils from '../api/transaction-utils';
import DB from '../database';
import logger from '../logger';
import { BlockAudit, AuditScore } from '../mempool.interfaces';
import { BlockAudit } from '../mempool.interfaces';
class BlocksAuditRepositories {
public async $saveAudit(audit: BlockAudit): Promise<void> {
try {
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, match_rate)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), audit.matchRate]);
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, match_rate)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), audit.matchRate]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
@@ -52,58 +52,24 @@ class BlocksAuditRepositories {
const [rows]: any[] = await DB.query(
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
blocks.weight, blocks.tx_count,
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, fresh_txs as freshTxs, match_rate as matchRate
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, match_rate as matchRate
FROM blocks_audits
JOIN blocks ON blocks.hash = blocks_audits.hash
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
WHERE blocks_audits.hash = "${hash}"
`);
if (rows.length) {
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
rows[0].transactions = JSON.parse(rows[0].transactions);
rows[0].template = JSON.parse(rows[0].template);
if (rows[0].transactions.length) {
return rows[0];
}
}
return null;
} catch (e: any) {
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getBlockAuditScore(hash: string): Promise<AuditScore> {
try {
const [rows]: any[] = await DB.query(
`SELECT hash, match_rate as matchRate
FROM blocks_audits
WHERE blocks_audits.hash = "${hash}"
`);
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
rows[0].transactions = JSON.parse(rows[0].transactions);
rows[0].template = JSON.parse(rows[0].template);
return rows[0];
} catch (e: any) {
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getBlockAuditScores(maxHeight: number, minHeight: number): Promise<AuditScore[]> {
try {
const [rows]: any[] = await DB.query(
`SELECT hash, match_rate as matchRate
FROM blocks_audits
WHERE blocks_audits.height BETWEEN ? AND ?
`, [minHeight, maxHeight]);
return rows;
} catch (e: any) {
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new BlocksAuditRepositories();

View File

@@ -392,36 +392,6 @@ class BlocksRepository {
}
}
/**
* Get the first block at or directly after a given timestamp
* @param timestamp number unix time in seconds
* @returns The height and timestamp of a block (timestamp might vary from given timestamp)
*/
public async $getBlockHeightFromTimestamp(
timestamp: number,
): Promise<{ height: number; hash: string; timestamp: number }> {
try {
// Get first block at or after the given timestamp
const query = `SELECT height, hash, blockTimestamp as timestamp FROM blocks
WHERE blockTimestamp <= FROM_UNIXTIME(?)
ORDER BY blockTimestamp DESC
LIMIT 1`;
const params = [timestamp];
const [rows]: any[][] = await DB.query(query, params);
if (rows.length === 0) {
throw new Error(`No block was found before timestamp ${timestamp}`);
}
return rows[0];
} catch (e) {
logger.err(
'Cannot get block height from timestamp from the db. Reason: ' +
(e instanceof Error ? e.message : e),
);
throw e;
}
}
/**
* Return blocks height
*/
@@ -662,23 +632,6 @@ class BlocksRepository {
}
}
/**
* Get a list of blocks that have not had CPFP data indexed
*/
public async $getCPFPUnindexedBlocks(): Promise<any[]> {
try {
const [rows]: any = await DB.query(`SELECT height, hash FROM blocks WHERE cpfp_indexed = 0 ORDER BY height DESC`);
return rows;
} catch (e) {
logger.err('Cannot fetch CPFP unindexed blocks. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $setCPFPIndexed(hash: string): Promise<void> {
await DB.query(`UPDATE blocks SET cpfp_indexed = 1 WHERE hash = ?`, [hash]);
}
/**
* Return the oldest block from a consecutive chain of block from the most recent one
*/

View File

@@ -17,16 +17,19 @@ class BlocksSummariesRepository {
return undefined;
}
public async $saveSummary(params: { height: number, mined?: BlockSummary}) {
const blockId = params.mined?.id;
public async $saveSummary(params: { height: number, mined?: BlockSummary, template?: BlockSummary}) {
const blockId = params.mined?.id ?? params.template?.id;
try {
const transactions = JSON.stringify(params.mined?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
transactions = ?
`, [params.height, blockId, transactions, '[]', transactions]);
const [dbSummary]: any[] = await DB.query(`SELECT * FROM blocks_summaries WHERE id = "${blockId}"`);
if (dbSummary.length === 0) { // First insertion
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?, ?)`, [
params.height, blockId, JSON.stringify(params.mined?.transactions ?? []), JSON.stringify(params.template?.transactions ?? [])
]);
} else if (params.mined !== undefined) { // Update mined block summary
await DB.query(`UPDATE blocks_summaries SET transactions = ? WHERE id = "${params.mined.id}"`, [JSON.stringify(params.mined.transactions)]);
} else if (params.template !== undefined) { // Update template block summary
await DB.query(`UPDATE blocks_summaries SET template = ? WHERE id = "${params.template.id}"`, [JSON.stringify(params.template?.transactions)]);
}
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
@@ -37,26 +40,6 @@ class BlocksSummariesRepository {
}
}
public async $saveTemplate(params: { height: number, template: BlockSummary}) {
const blockId = params.template?.id;
try {
const transactions = JSON.stringify(params.template?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
template = ?
`, [params.height, blockId, '[]', transactions, transactions]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block template for ${blockId} because it has already been indexed, ignoring`);
} else {
logger.debug(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
}
public async $getIndexedSummariesId(): Promise<string[]> {
try {
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);

View File

@@ -1,43 +0,0 @@
import DB from '../database';
import logger from '../logger';
import { Ancestor } from '../mempool.interfaces';
class CpfpRepository {
public async $saveCluster(height: number, txs: Ancestor[], effectiveFeePerVsize: number): Promise<void> {
try {
const txsJson = JSON.stringify(txs);
await DB.query(
`
INSERT INTO cpfp_clusters(root, height, txs, fee_rate)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
height = ?,
txs = ?,
fee_rate = ?
`,
[txs[0].txid, height, txsJson, effectiveFeePerVsize, height, txsJson, effectiveFeePerVsize, height]
);
} catch (e: any) {
logger.err(`Cannot save cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $deleteClustersFrom(height: number): Promise<void> {
logger.info(`Delete newer cpfp clusters from height ${height} from the database`);
try {
await DB.query(
`
DELETE from cpfp_clusters
WHERE height >= ?
`,
[height]
);
} catch (e: any) {
logger.err(`Cannot delete cpfp clusters from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new CpfpRepository();

View File

@@ -1,67 +0,0 @@
import { ResultSetHeader, RowDataPacket } from 'mysql2';
import DB from '../database';
import logger from '../logger';
export interface NodeRecord {
publicKey: string; // node public key
type: number; // TLV extension record type
payload: string; // base64 record payload
}
class NodesRecordsRepository {
public async $saveRecord(record: NodeRecord): Promise<void> {
try {
const payloadBytes = Buffer.from(record.payload, 'base64');
await DB.query(`
INSERT INTO nodes_records(public_key, type, payload)
VALUE (?, ?, ?)
ON DUPLICATE KEY UPDATE
payload = ?
`, [record.publicKey, record.type, payloadBytes, payloadBytes]);
} catch (e: any) {
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
logger.err(`Cannot save node record (${[record.publicKey, record.type, record.payload]}) into db. Reason: ` + (e instanceof Error ? e.message : e));
// We don't throw, not a critical issue if we miss some nodes records
}
}
}
public async $getRecordTypes(publicKey: string): Promise<any> {
try {
const query = `
SELECT type FROM nodes_records
WHERE public_key = ?
`;
const [rows] = await DB.query<RowDataPacket[][]>(query, [publicKey]);
return rows.map(row => row['type']);
} catch (e) {
logger.err(`Cannot retrieve custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
return [];
}
}
public async $deleteUnusedRecords(publicKey: string, recordTypes: number[]): Promise<number> {
try {
let query;
if (recordTypes.length) {
query = `
DELETE FROM nodes_records
WHERE public_key = ?
AND type NOT IN (${recordTypes.map(type => `${type}`).join(',')})
`;
} else {
query = `
DELETE FROM nodes_records
WHERE public_key = ?
`;
}
const [result] = await DB.query<ResultSetHeader>(query, [publicKey]);
return result.affectedRows;
} catch (e) {
logger.err(`Cannot delete unused custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
return 0;
}
}
}
export default new NodesRecordsRepository();

View File

@@ -1,45 +0,0 @@
import { ResultSetHeader } from 'mysql2';
import DB from '../database';
import logger from '../logger';
export interface NodeSocket {
publicKey: string;
network: string | null;
addr: string;
}
class NodesSocketsRepository {
public async $saveSocket(socket: NodeSocket): Promise<void> {
try {
await DB.query(`
INSERT INTO nodes_sockets(public_key, socket, type)
VALUE (?, ?, ?)
`, [socket.publicKey, socket.addr, socket.network]);
} catch (e: any) {
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
logger.err(`Cannot save node socket (${[socket.publicKey, socket.addr, socket.network]}) into db. Reason: ` + (e instanceof Error ? e.message : e));
// We don't throw, not a critical issue if we miss some nodes sockets
}
}
}
public async $deleteUnusedSockets(publicKey: string, addresses: string[]): Promise<number> {
if (addresses.length === 0) {
return 0;
}
try {
const query = `
DELETE FROM nodes_sockets
WHERE public_key = ?
AND socket NOT IN (${addresses.map(id => `"${id}"`).join(',')})
`;
const [result] = await DB.query<ResultSetHeader>(query, [publicKey]);
return result.affectedRows;
} catch (e) {
logger.err(`Cannot delete unused sockets for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
return 0;
}
}
}
export default new NodesSocketsRepository();

View File

@@ -1,77 +0,0 @@
import DB from '../database';
import logger from '../logger';
import { Ancestor, CpfpInfo } from '../mempool.interfaces';
interface CpfpSummary {
txid: string;
cluster: string;
root: string;
txs: Ancestor[];
height: number;
fee_rate: number;
}
class TransactionRepository {
public async $setCluster(txid: string, cluster: string): Promise<void> {
try {
await DB.query(
`
INSERT INTO transactions
(
txid,
cluster
)
VALUE (?, ?)
ON DUPLICATE KEY UPDATE
cluster = ?
;`,
[txid, cluster, cluster]
);
} catch (e: any) {
logger.err(`Cannot save transaction cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getCpfpInfo(txid: string): Promise<CpfpInfo | void> {
try {
let query = `
SELECT *
FROM transactions
LEFT JOIN cpfp_clusters AS cluster ON cluster.root = transactions.cluster
WHERE transactions.txid = ?
`;
const [rows]: any = await DB.query(query, [txid]);
if (rows.length) {
rows[0].txs = JSON.parse(rows[0].txs) as Ancestor[];
return this.convertCpfp(rows[0]);
}
} catch (e) {
logger.err('Cannot get transaction cpfp info from db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
private convertCpfp(cpfp: CpfpSummary): CpfpInfo {
const descendants: Ancestor[] = [];
const ancestors: Ancestor[] = [];
let matched = false;
for (const tx of cpfp.txs) {
if (tx.txid === cpfp.txid) {
matched = true;
} else if (!matched) {
descendants.push(tx);
} else {
ancestors.push(tx);
}
}
return {
descendants,
ancestors,
effectiveFeePerVsize: cpfp.fee_rate
};
}
}
export default new TransactionRepository();

View File

@@ -1,457 +0,0 @@
import DB from '../../database';
import logger from '../../logger';
import channelsApi from '../../api/explorer/channels.api';
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
import config from '../../config';
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
import { Common } from '../../api/common';
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
const tempCacheSize = 10000;
class ForensicsService {
loggerTimer = 0;
closedChannelsScanBlock = 0;
txCache: { [txid: string]: IEsploraApi.Transaction } = {};
tempCached: string[] = [];
constructor() {}
public async $startService(): Promise<void> {
logger.info('Starting lightning network forensics service');
this.loggerTimer = new Date().getTime() / 1000;
await this.$runTasks();
}
private async $runTasks(): Promise<void> {
try {
logger.info(`Running forensics scans`);
if (config.MEMPOOL.BACKEND === 'esplora') {
await this.$runClosedChannelsForensics(false);
await this.$runOpenedChannelsForensics();
}
} catch (e) {
logger.err('ForensicsService.$runTasks() error: ' + (e instanceof Error ? e.message : e));
}
setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.FORENSICS_INTERVAL);
}
/*
1. Mutually closed
2. Forced closed
3. Forced closed with penalty
┌────────────────────────────────────┐ ┌────────────────────────────┐
│ outputs contain revocation script? ├──yes──► force close w/ penalty = 3 │
└──────────────┬─────────────────────┘ └────────────────────────────┘
no
┌──────────────▼──────────────────────────┐
│ outputs contain other lightning script? ├──┐
└──────────────┬──────────────────────────┘ │
no yes
┌──────────────▼─────────────┐ │
│ sequence starts with 0x80 │ ┌────────▼────────┐
│ and ├──────► force close = 2 │
│ locktime starts with 0x20? │ └─────────────────┘
└──────────────┬─────────────┘
no
┌─────────▼────────┐
│ mutual close = 1 │
└──────────────────┘
*/
public async $runClosedChannelsForensics(onlyNewChannels: boolean = false): Promise<void> {
if (config.MEMPOOL.BACKEND !== 'esplora') {
return;
}
let progress = 0;
try {
logger.info(`Started running closed channel forensics...`);
let channels;
if (onlyNewChannels) {
channels = await channelsApi.$getClosedChannelsWithoutReason();
} else {
channels = await channelsApi.$getUnresolvedClosedChannels();
}
for (const channel of channels) {
let reason = 0;
let resolvedForceClose = false;
// Only Esplora backend can retrieve spent transaction outputs
const cached: string[] = [];
try {
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
continue;
}
const lightningScriptReasons: number[] = [];
for (const outspend of outspends) {
if (outspend.spent && outspend.txid) {
let spendingTx = await this.fetchTransaction(outspend.txid);
if (!spendingTx) {
continue;
}
cached.push(spendingTx.txid);
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
lightningScriptReasons.push(lightningScript);
}
}
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
if (filteredReasons.length) {
if (filteredReasons.some((r) => r === 2 || r === 4)) {
reason = 3;
} else {
reason = 2;
resolvedForceClose = true;
}
} else {
/*
We can detect a commitment transaction (force close) by reading Sequence and Locktime
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
*/
let closingTx = await this.fetchTransaction(channel.closing_transaction_id, true);
if (!closingTx) {
continue;
}
cached.push(closingTx.txid);
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
const locktimeHex: string = closingTx.locktime.toString(16);
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
reason = 2; // Here we can't be sure if it's a penalty or not
} else {
reason = 1;
}
}
if (reason) {
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
if (reason === 2 && resolvedForceClose) {
await DB.query(`UPDATE channels SET closing_resolved = ? WHERE id = ?`, [true, channel.id]);
}
if (reason !== 2 || resolvedForceClose) {
cached.forEach(txid => {
delete this.txCache[txid];
});
}
}
} catch (e) {
logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`);
}
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Closed channels forensics scan complete.`);
} catch (e) {
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
}
}
private findLightningScript(vin: IEsploraApi.Vin): number {
const topElement = vin.witness?.length > 2 ? vin.witness[vin.witness.length - 2] : null;
if (/^OP_IF OP_PUSHBYTES_33 \w{66} OP_ELSE OP_PUSH(NUM_\d+|BYTES_(1 \w{2}|2 \w{4})) OP_CSV OP_DROP OP_PUSHBYTES_33 \w{66} OP_ENDIF OP_CHECKSIG$/.test(vin.inner_witnessscript_asm)) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction-outputs
if (topElement === '01') {
// top element is '01' to get in the revocation path
// 'Revoked Lightning Force Close';
// Penalty force closed
return 2;
} else {
// top element is '', this is a delayed to_local output
// 'Lightning Force Close';
return 3;
}
} else if (
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_NOTIF OP_DROP OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm) ||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_IF OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_DROP OP_PUSHBYTES_3 \w{6} OP_CLTV OP_DROP OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm)
) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#offered-htlc-outputs
// https://github.com/lightning/bolts/blob/master/03-transactions.md#received-htlc-outputs
if (topElement?.length === 66) {
// top element is a public key
// 'Revoked Lightning HTLC'; Penalty force closed
return 4;
} else if (topElement) {
// top element is a preimage
// 'Lightning HTLC';
return 5;
} else {
// top element is '' to get in the expiry of the script
// 'Expired Lightning HTLC';
return 6;
}
} else if (/^OP_PUSHBYTES_33 \w{66} OP_CHECKSIG OP_IFDUP OP_NOTIF OP_PUSHNUM_16 OP_CSV OP_ENDIF$/.test(vin.inner_witnessscript_asm)) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#to_local_anchor-and-to_remote_anchor-output-option_anchors
if (topElement) {
// top element is a signature
// 'Lightning Anchor';
return 7;
} else {
// top element is '', it has been swept after 16 blocks
// 'Swept Lightning Anchor';
return 8;
}
}
return 1;
}
// If a channel open tx spends funds from a another channel transaction,
// we can attribute that output to a specific counterparty
private async $runOpenedChannelsForensics(): Promise<void> {
const runTimer = Date.now();
let progress = 0;
try {
logger.info(`Started running open channel forensics...`);
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
for (const openChannel of channels) {
let openTx = await this.fetchTransaction(openChannel.transaction_id, true);
if (!openTx) {
continue;
}
for (const input of openTx.vin) {
const closeChannel = await channelsApi.$getChannelByClosingId(input.txid);
if (closeChannel) {
// this input directly spends a channel close output
await this.$attributeChannelBalances(closeChannel, openChannel, input);
} else {
const prevOpenChannels = await channelsApi.$getChannelsByOpeningId(input.txid);
if (prevOpenChannels?.length) {
// this input spends a channel open change output
for (const prevOpenChannel of prevOpenChannels) {
await this.$attributeChannelBalances(prevOpenChannel, openChannel, input, null, null, true);
}
} else {
// check if this input spends any swept channel close outputs
await this.$attributeSweptChannelCloses(openChannel, input);
}
}
}
// calculate how much of the total input value is attributable to the channel open output
openChannel.funding_ratio = openTx.vout[openChannel.transaction_vout].value / ((openTx.vout.reduce((sum, v) => sum + v.value, 0) || 1) + openTx.fee);
// save changes to the opening channel, and mark it as checked
if (openTx?.vin?.length === 1) {
openChannel.single_funded = true;
}
if (openChannel.node1_funding_balance || openChannel.node2_funding_balance || openChannel.node1_closing_balance || openChannel.node2_closing_balance || openChannel.closed_by) {
await channelsApi.$updateOpeningInfo(openChannel);
}
await channelsApi.$markChannelSourceChecked(openChannel.id);
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating opened channel forensics ${progress}/${channels?.length}`);
this.loggerTimer = new Date().getTime() / 1000;
this.truncateTempCache();
}
if (Date.now() - runTimer > (config.LIGHTNING.FORENSICS_INTERVAL * 1000)) {
break;
}
}
logger.info(`Open channels forensics scan complete.`);
} catch (e) {
logger.err('$runOpenedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
} finally {
this.clearTempCache();
}
}
// Check if a channel open tx input spends the result of a swept channel close output
private async $attributeSweptChannelCloses(openChannel: ILightningApi.Channel, input: IEsploraApi.Vin): Promise<void> {
let sweepTx = await this.fetchTransaction(input.txid, true);
if (!sweepTx) {
logger.err(`couldn't find input transaction for channel forensics ${openChannel.channel_id} ${input.txid}`);
return;
}
const openContribution = sweepTx.vout[input.vout].value;
for (const sweepInput of sweepTx.vin) {
const lnScriptType = this.findLightningScript(sweepInput);
if (lnScriptType > 1) {
const closeChannel = await channelsApi.$getChannelByClosingId(sweepInput.txid);
if (closeChannel) {
const initiator = (lnScriptType === 2 || lnScriptType === 4) ? 'remote' : (lnScriptType === 3 ? 'local' : null);
await this.$attributeChannelBalances(closeChannel, openChannel, sweepInput, openContribution, initiator);
}
}
}
}
private async $attributeChannelBalances(
prevChannel, openChannel, input: IEsploraApi.Vin, openContribution: number | null = null,
initiator: 'remote' | 'local' | null = null, linkedOpenings: boolean = false
): Promise<void> {
// figure out which node controls the input/output
let openSide;
let prevLocal;
let prevRemote;
let matched = false;
let ambiguous = false; // if counterparties are the same in both channels, we can't tell them apart
if (openChannel.node1_public_key === prevChannel.node1_public_key) {
openSide = 1;
prevLocal = 1;
prevRemote = 2;
matched = true;
} else if (openChannel.node1_public_key === prevChannel.node2_public_key) {
openSide = 1;
prevLocal = 2;
prevRemote = 1;
matched = true;
}
if (openChannel.node2_public_key === prevChannel.node1_public_key) {
openSide = 2;
prevLocal = 1;
prevRemote = 2;
if (matched) {
ambiguous = true;
}
matched = true;
} else if (openChannel.node2_public_key === prevChannel.node2_public_key) {
openSide = 2;
prevLocal = 2;
prevRemote = 1;
if (matched) {
ambiguous = true;
}
matched = true;
}
if (matched && !ambiguous) {
// fetch closing channel transaction and perform forensics on the outputs
let prevChannelTx = await this.fetchTransaction(input.txid, true);
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(input.txid);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + input.txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
}
if (!outspends || !prevChannelTx) {
return;
}
if (!linkedOpenings) {
if (!prevChannel.outputs || !prevChannel.outputs.length) {
prevChannel.outputs = prevChannelTx.vout.map(vout => {
return {
type: 0,
value: vout.value,
};
});
}
for (let i = 0; i < outspends?.length; i++) {
const outspend = outspends[i];
const output = prevChannel.outputs[i];
if (outspend.spent && outspend.txid) {
try {
const spendingTx = await this.fetchTransaction(outspend.txid, true);
if (spendingTx) {
output.type = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
}
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + outspend.txid}. Reason ${e instanceof Error ? e.message : e}`);
}
} else {
output.type = 0;
}
}
// attribute outputs to each counterparty, and sum up total known balances
prevChannel.outputs[input.vout].node = prevLocal;
const isPenalty = prevChannel.outputs.filter((out) => out.type === 2 || out.type === 4)?.length > 0;
const normalOutput = [1,3].includes(prevChannel.outputs[input.vout].type);
const mutualClose = ((prevChannel.status === 2 || prevChannel.status === 'closed') && prevChannel.closing_reason === 1);
let localClosingBalance = 0;
let remoteClosingBalance = 0;
for (const output of prevChannel.outputs) {
if (isPenalty) {
// penalty close, so local node takes everything
localClosingBalance += output.value;
} else if (output.node) {
// this output determinstically linked to one of the counterparties
if (output.node === prevLocal) {
localClosingBalance += output.value;
} else {
remoteClosingBalance += output.value;
}
} else if (normalOutput && (output.type === 1 || output.type === 3 || (mutualClose && prevChannel.outputs.length === 2))) {
// local node had one main output, therefore remote node takes the other
remoteClosingBalance += output.value;
}
}
prevChannel[`node${prevLocal}_closing_balance`] = localClosingBalance;
prevChannel[`node${prevRemote}_closing_balance`] = remoteClosingBalance;
prevChannel.closing_fee = prevChannelTx.fee;
if (initiator && !linkedOpenings) {
const initiatorSide = initiator === 'remote' ? prevRemote : prevLocal;
prevChannel.closed_by = prevChannel[`node${initiatorSide}_public_key`];
}
// save changes to the closing channel
await channelsApi.$updateClosingInfo(prevChannel);
} else {
if (prevChannelTx.vin.length <= 1) {
prevChannel[`node${prevLocal}_funding_balance`] = prevChannel.capacity;
prevChannel.single_funded = true;
prevChannel.funding_ratio = 1;
// save changes to the closing channel
await channelsApi.$updateOpeningInfo(prevChannel);
}
}
openChannel[`node${openSide}_funding_balance`] = openChannel[`node${openSide}_funding_balance`] + (openContribution || prevChannelTx?.vout[input.vout]?.value || 0);
}
}
async fetchTransaction(txid: string, temp: boolean = false): Promise<IEsploraApi.Transaction | null> {
let tx = this.txCache[txid];
if (!tx) {
try {
tx = await bitcoinApi.$getRawTransaction(txid);
this.txCache[txid] = tx;
if (temp) {
this.tempCached.push(txid);
}
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
return null;
}
}
return tx;
}
clearTempCache(): void {
for (const txid of this.tempCached) {
delete this.txCache[txid];
}
this.tempCached = [];
}
truncateTempCache(): void {
if (this.tempCached.length > tempCacheSize) {
const removed = this.tempCached.splice(0, this.tempCached.length - tempCacheSize);
for (const txid of removed) {
delete this.txCache[txid];
}
}
}
}
export default new ForensicsService();

View File

@@ -10,15 +10,9 @@ import lightningApi from '../../api/lightning/lightning-api-factory';
import nodesApi from '../../api/explorer/nodes.api';
import { ResultSetHeader } from 'mysql2';
import fundingTxFetcher from './sync-tasks/funding-tx-fetcher';
import NodesSocketsRepository from '../../repositories/NodesSocketsRepository';
import { Common } from '../../api/common';
import blocks from '../../api/blocks';
import NodeRecordsRepository from '../../repositories/NodeRecordsRepository';
import forensicsService from './forensics.service';
class NetworkSyncService {
loggerTimer = 0;
closedChannelsScanBlock = 0;
constructor() {}
@@ -31,7 +25,6 @@ class NetworkSyncService {
}
private async $runTasks(): Promise<void> {
const taskStartTime = Date.now();
try {
logger.info(`Updating nodes and channels`);
@@ -48,17 +41,15 @@ class NetworkSyncService {
await this.$lookUpCreationDateFromChain();
await this.$updateNodeFirstSeen();
await this.$scanForClosedChannels();
if (config.MEMPOOL.BACKEND === 'esplora') {
// run forensics on new channels only
await forensicsService.$runClosedChannelsForensics(true);
await this.$runClosedChannelsForensics();
}
} catch (e) {
logger.err('$runTasks() error: ' + (e instanceof Error ? e.message : e));
}
setTimeout(() => { this.$runTasks(); }, Math.max(1, (1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL) - (Date.now() - taskStartTime)));
setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL);
}
/**
@@ -67,13 +58,8 @@ class NetworkSyncService {
private async $updateNodesList(nodes: ILightningApi.Node[]): Promise<void> {
let progress = 0;
let deletedSockets = 0;
let deletedRecords = 0;
const graphNodesPubkeys: string[] = [];
for (const node of nodes) {
const latestUpdated = await channelsApi.$getLatestChannelUpdateForNode(node.pub_key);
node.last_update = Math.max(node.last_update, latestUpdated);
await nodesApi.$saveNode(node);
graphNodesPubkeys.push(node.pub_key);
++progress;
@@ -83,33 +69,11 @@ class NetworkSyncService {
logger.info(`Updating node ${progress}/${nodes.length}`);
this.loggerTimer = new Date().getTime() / 1000;
}
const addresses: string[] = [];
for (const socket of node.addresses) {
await NodesSocketsRepository.$saveSocket(Common.formatSocket(node.pub_key, socket));
addresses.push(socket.addr);
}
deletedSockets += await NodesSocketsRepository.$deleteUnusedSockets(node.pub_key, addresses);
const oldRecordTypes = await NodeRecordsRepository.$getRecordTypes(node.pub_key);
const customRecordTypes: number[] = [];
for (const [type, payload] of Object.entries(node.custom_records || {})) {
const numericalType = parseInt(type);
await NodeRecordsRepository.$saveRecord({
publicKey: node.pub_key,
type: numericalType,
payload,
});
customRecordTypes.push(numericalType);
}
if (oldRecordTypes.reduce((changed, type) => changed || customRecordTypes.indexOf(type) === -1, false)) {
deletedRecords += await NodeRecordsRepository.$deleteUnusedRecords(node.pub_key, customRecordTypes);
}
}
logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`);
logger.info(`${progress} nodes updated`);
// If a channel if not present in the graph, mark it as inactive
await nodesApi.$setNodesInactive(graphNodesPubkeys);
nodesApi.$setNodesInactive(graphNodesPubkeys);
if (config.MAXMIND.ENABLED) {
$lookupNodeLocation();
@@ -121,19 +85,11 @@ class NetworkSyncService {
*/
private async $updateChannelsList(channels: ILightningApi.Channel[]): Promise<void> {
try {
const [closedChannelsRaw]: any[] = await DB.query(`SELECT id FROM channels WHERE status = 2`);
const closedChannels = {};
for (const closedChannel of closedChannelsRaw) {
closedChannels[closedChannel.id] = true;
}
let progress = 0;
const graphChannelsIds: string[] = [];
for (const channel of channels) {
if (!closedChannels[channel.channel_id]) {
await channelsApi.$saveChannel(channel);
}
await channelsApi.$saveChannel(channel);
graphChannelsIds.push(channel.channel_id);
++progress;
@@ -147,7 +103,7 @@ class NetworkSyncService {
logger.info(`${progress} channels updated`);
// If a channel if not present in the graph, mark it as inactive
await channelsApi.$setChannelsInactive(graphChannelsIds);
channelsApi.$setChannelsInactive(graphChannelsIds);
} catch (e) {
logger.err(`Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`);
}
@@ -263,23 +219,11 @@ class NetworkSyncService {
}
private async $scanForClosedChannels(): Promise<void> {
if (this.closedChannelsScanBlock === blocks.getCurrentBlockHeight()) {
logger.debug(`We've already scan closed channels for this block, skipping.`);
return;
}
let progress = 0;
try {
let log = `Starting closed channels scan`;
if (this.closedChannelsScanBlock > 0) {
log += `. Last scan was at block ${this.closedChannelsScanBlock}`;
} else {
log += ` for the first time`;
}
logger.info(log);
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
logger.info(`Starting closed channels scan...`);
const channels = await channelsApi.$getChannelsByStatus(0);
for (const channel of channels) {
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
@@ -298,13 +242,131 @@ class NetworkSyncService {
this.loggerTimer = new Date().getTime() / 1000;
}
}
this.closedChannelsScanBlock = blocks.getCurrentBlockHeight();
logger.info(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`);
logger.info(`Closed channels scan complete.`);
} catch (e) {
logger.err('$scanForClosedChannels() error: ' + (e instanceof Error ? e.message : e));
}
}
/*
1. Mutually closed
2. Forced closed
3. Forced closed with penalty
*/
private async $runClosedChannelsForensics(): Promise<void> {
if (!config.ESPLORA.REST_API_URL) {
return;
}
let progress = 0;
try {
logger.info(`Started running closed channel forensics...`);
const channels = await channelsApi.$getClosedChannelsWithoutReason();
for (const channel of channels) {
let reason = 0;
// Only Esplora backend can retrieve spent transaction outputs
const outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
const lightningScriptReasons: number[] = [];
for (const outspend of outspends) {
if (outspend.spent && outspend.txid) {
const spendingTx = await bitcoinApi.$getRawTransaction(outspend.txid);
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
lightningScriptReasons.push(lightningScript);
}
}
if (lightningScriptReasons.length === outspends.length
&& lightningScriptReasons.filter((r) => r === 1).length === outspends.length) {
reason = 1;
} else {
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
if (filteredReasons.length) {
if (filteredReasons.some((r) => r === 2 || r === 4)) {
reason = 3;
} else {
reason = 2;
}
} else {
/*
We can detect a commitment transaction (force close) by reading Sequence and Locktime
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
*/
const closingTx = await bitcoinApi.$getRawTransaction(channel.closing_transaction_id);
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
const locktimeHex: string = closingTx.locktime.toString(16);
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
reason = 2; // Here we can't be sure if it's a penalty or not
} else {
reason = 1;
}
}
}
if (reason) {
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
}
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Closed channels forensics scan complete.`);
} catch (e) {
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
}
}
private findLightningScript(vin: IEsploraApi.Vin): number {
const topElement = vin.witness[vin.witness.length - 2];
if (/^OP_IF OP_PUSHBYTES_33 \w{66} OP_ELSE OP_PUSH(NUM_\d+|BYTES_(1 \w{2}|2 \w{4})) OP_CSV OP_DROP OP_PUSHBYTES_33 \w{66} OP_ENDIF OP_CHECKSIG$/.test(vin.inner_witnessscript_asm)) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction-outputs
if (topElement === '01') {
// top element is '01' to get in the revocation path
// 'Revoked Lightning Force Close';
// Penalty force closed
return 2;
} else {
// top element is '', this is a delayed to_local output
// 'Lightning Force Close';
return 3;
}
} else if (
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_NOTIF OP_DROP OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm) ||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_IF OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_DROP OP_PUSHBYTES_3 \w{6} OP_CLTV OP_DROP OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm)
) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#offered-htlc-outputs
// https://github.com/lightning/bolts/blob/master/03-transactions.md#received-htlc-outputs
if (topElement.length === 66) {
// top element is a public key
// 'Revoked Lightning HTLC'; Penalty force closed
return 4;
} else if (topElement) {
// top element is a preimage
// 'Lightning HTLC';
return 5;
} else {
// top element is '' to get in the expiry of the script
// 'Expired Lightning HTLC';
return 6;
}
} else if (/^OP_PUSHBYTES_33 \w{66} OP_CHECKSIG OP_IFDUP OP_NOTIF OP_PUSHNUM_16 OP_CSV OP_ENDIF$/.test(vin.inner_witnessscript_asm)) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#to_local_anchor-and-to_remote_anchor-output-option_anchors
if (topElement) {
// top element is a signature
// 'Lightning Anchor';
return 7;
} else {
// top element is '', it has been swept after 16 blocks
// 'Swept Lightning Anchor';
return 8;
}
}
return 1;
}
}
export default new NetworkSyncService();

View File

@@ -71,7 +71,9 @@ class FundingTxFetcher {
}
public async $fetchChannelOpenTx(channelId: string): Promise<{timestamp: number, txid: string, value: number}> {
channelId = Common.channelIntegerIdToShortId(channelId);
if (channelId.indexOf('x') === -1) {
channelId = Common.channelIntegerIdToShortId(channelId);
}
if (this.fundingTxCache[channelId]) {
return this.fundingTxCache[channelId];

View File

@@ -4,25 +4,17 @@ import nodesApi from '../../../api/explorer/nodes.api';
import config from '../../../config';
import DB from '../../../database';
import logger from '../../../logger';
import { ResultSetHeader } from 'mysql2';
import * as IPCheck from '../../../utils/ipcheck.js';
import { Reader } from 'mmdb-lib';
export async function $lookupNodeLocation(): Promise<void> {
let loggerTimer = new Date().getTime() / 1000;
let progress = 0;
let nodesUpdated = 0;
let geoNamesInserted = 0;
logger.info(`Running node location updater using Maxmind`);
try {
const nodes = await nodesApi.$getAllNodes();
const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
const lookupAsn = await maxmind.open<AsnResponse>(config.MAXMIND.GEOLITE2_ASN);
let lookupIsp: Reader<IspResponse> | null = null;
try {
lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
} catch (e) { }
const lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
for (const node of nodes) {
const sockets: string[] = node.sockets.split(',');
@@ -33,30 +25,7 @@ export async function $lookupNodeLocation(): Promise<void> {
if (hasClearnet && ip !== '127.0.1.1' && ip !== '127.0.0.1') {
const city = lookupCity.get(ip);
const asn = lookupAsn.get(ip);
let isp: IspResponse | null = null;
if (lookupIsp) {
isp = lookupIsp.get(ip);
}
let asOverwrite: any | undefined;
if (asn && (IPCheck.match(ip, '170.75.160.0/20') || IPCheck.match(ip, '172.81.176.0/21'))) {
asOverwrite = {
asn: 394745,
name: 'Lunanode',
};
}
else if (asn && (IPCheck.match(ip, '50.7.0.0/16') || IPCheck.match(ip, '66.90.64.0/18'))) {
asOverwrite = {
asn: 30058,
name: 'FDCservers.net',
};
}
else if (asn && asn.autonomous_system_number === 174) {
asOverwrite = {
asn: 174,
name: 'Cogent Communications',
};
}
const isp = lookupIsp.get(ip);
if (city && (asn || isp)) {
const query = `
@@ -72,7 +41,7 @@ export async function $lookupNodeLocation(): Promise<void> {
`;
const params = [
asOverwrite?.asn ?? isp?.autonomous_system_number ?? asn?.autonomous_system_number,
isp?.autonomous_system_number ?? asn?.autonomous_system_number,
city.city?.geoname_id,
city.country?.geoname_id,
city.subdivisions ? city.subdivisions[0].geoname_id : null,
@@ -81,72 +50,48 @@ export async function $lookupNodeLocation(): Promise<void> {
city.location?.accuracy_radius,
node.public_key
];
let result = await DB.query<ResultSetHeader>(query, params);
if (result[0].changedRows ?? 0 > 0) {
++nodesUpdated;
}
await DB.query(query, params);
// Store Continent
if (city.continent?.geoname_id) {
result = await DB.query<ResultSetHeader>(
await DB.query(
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'continent', ?)`,
[city.continent?.geoname_id, JSON.stringify(city.continent?.names)]);
if (result[0].changedRows ?? 0 > 0) {
++geoNamesInserted;
}
}
// Store Country
if (city.country?.geoname_id) {
result = await DB.query<ResultSetHeader>(
await DB.query(
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country', ?)`,
[city.country?.geoname_id, JSON.stringify(city.country?.names)]);
if (result[0].changedRows ?? 0 > 0) {
++geoNamesInserted;
}
}
// Store Country ISO code
if (city.country?.iso_code) {
result = await DB.query<ResultSetHeader>(
await DB.query(
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
[city.country?.geoname_id, city.country?.iso_code]);
if (result[0].changedRows ?? 0 > 0) {
++geoNamesInserted;
}
}
// Store Division
if (city.subdivisions && city.subdivisions[0]) {
result = await DB.query<ResultSetHeader>(
await DB.query(
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'division', ?)`,
[city.subdivisions[0].geoname_id, JSON.stringify(city.subdivisions[0]?.names)]);
if (result[0].changedRows ?? 0 > 0) {
++geoNamesInserted;
}
}
// Store City
if (city.city?.geoname_id) {
result = await DB.query<ResultSetHeader>(
await DB.query(
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'city', ?)`,
[city.city?.geoname_id, JSON.stringify(city.city?.names)]);
if (result[0].changedRows ?? 0 > 0) {
++geoNamesInserted;
}
}
// Store AS name
if (isp?.autonomous_system_organization ?? asn?.autonomous_system_organization) {
result = await DB.query<ResultSetHeader>(
await DB.query(
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'as_organization', ?)`,
[
asOverwrite?.asn ?? isp?.autonomous_system_number ?? asn?.autonomous_system_number,
JSON.stringify(asOverwrite?.name ?? isp?.isp ?? asn?.autonomous_system_organization)
]);
if (result[0].changedRows ?? 0 > 0) {
++geoNamesInserted;
}
[isp?.autonomous_system_number ?? asn?.autonomous_system_number, JSON.stringify(isp?.isp ?? asn?.autonomous_system_organization)]);
}
}
@@ -159,12 +104,7 @@ export async function $lookupNodeLocation(): Promise<void> {
}
}
}
if (nodesUpdated > 0) {
logger.info(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
} else {
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
}
logger.info(`${progress} nodes location data updated`);
} catch (e) {
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));
}

View File

@@ -1,76 +1,68 @@
import DB from '../../../database';
import { promises } from 'fs';
import { XMLParser } from 'fast-xml-parser';
import logger from '../../../logger';
import fundingTxFetcher from './funding-tx-fetcher';
import config from '../../../config';
import { ILightningApi } from '../../../api/lightning/lightning-api.interface';
import { isIP } from 'net';
import { Common } from '../../../api/common';
import channelsApi from '../../../api/explorer/channels.api';
import nodesApi from '../../../api/explorer/nodes.api';
import { ResultSetHeader } from 'mysql2';
const fsPromises = promises;
interface Node {
id: string;
timestamp: number;
features: string;
rgb_color: string;
alias: string;
addresses: unknown[];
out_degree: number;
in_degree: number;
}
interface Channel {
channel_id: string;
node1_pub: string;
node2_pub: string;
timestamp: number;
features: string;
fee_base_msat: number;
fee_rate_milli_msat: number;
htlc_minimim_msat: number;
cltv_expiry_delta: number;
htlc_maximum_msat: number;
}
class LightningStatsImporter {
topologiesFolder = config.LIGHTNING.TOPOLOGY_FOLDER;
parser = new XMLParser();
async $run(): Promise<void> {
logger.info(`Importing historical lightning stats`);
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info('Caching funding txs for currently existing channels');
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
return;
}
await this.$importHistoricalLightningStats();
await this.$cleanupIncorrectSnapshot();
}
/**
* Generate LN network stats for one day
*/
public async computeNetworkStats(timestamp: number,
networkGraph: ILightningApi.NetworkGraph, isHistorical: boolean = false): Promise<unknown> {
public async computeNetworkStats(timestamp: number, networkGraph): Promise<unknown> {
// Node counts and network shares
let clearnetNodes = 0;
let torNodes = 0;
let clearnetTorNodes = 0;
let unannouncedNodes = 0;
const [nodesInDbRaw]: any[] = await DB.query(`SELECT public_key FROM nodes`);
const nodesInDb = {};
for (const node of nodesInDbRaw) {
nodesInDb[node.public_key] = node;
}
for (const node of networkGraph.nodes) {
// If we don't know about this node, insert it in db
if (isHistorical === true && !nodesInDb[node.pub_key]) {
await nodesApi.$saveNode({
last_update: node.last_update,
pub_key: node.pub_key,
alias: node.alias,
addresses: node.addresses,
color: node.color,
features: node.features,
});
nodesInDb[node.pub_key] = node;
} else {
await nodesApi.$updateNodeSockets(node.pub_key, node.addresses);
}
let hasOnion = false;
let hasClearnet = false;
let isUnnanounced = true;
for (const socket of (node.addresses ?? [])) {
if (!socket.network?.length && !socket.addr?.length) {
continue;
}
hasOnion = hasOnion || ['torv2', 'torv3'].includes(socket.network) || socket.addr.indexOf('onion') !== -1 || socket.addr.indexOf('torv2') !== -1 || socket.addr.indexOf('torv3') !== -1;
hasClearnet = hasClearnet || ['ipv4', 'ipv6'].includes(socket.network) || [4, 6].includes(isIP(socket.addr.split(':')[0])) || socket.addr.indexOf('ipv4') !== -1 || socket.addr.indexOf('ipv6') !== -1;;
hasOnion = hasOnion || ['torv2', 'torv3'].includes(socket.network);
hasClearnet = hasClearnet || ['ipv4', 'ipv6'].includes(socket.network);
}
if (hasOnion && hasClearnet) {
clearnetTorNodes++;
@@ -97,14 +89,11 @@ class LightningStatsImporter {
const baseFees: number[] = [];
const alreadyCountedChannels = {};
const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id FROM channels`);
const channelsInDb = {};
for (const channel of channelsInDbRaw) {
channelsInDb[channel.short_id] = channel;
}
for (const channel of networkGraph.edges) {
const short_id = Common.channelIntegerIdToShortId(channel.channel_id);
let short_id = channel.channel_id;
if (short_id.indexOf('/') !== -1) {
short_id = short_id.slice(0, -2);
}
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
if (!tx) {
@@ -112,21 +101,6 @@ class LightningStatsImporter {
continue;
}
// If we don't know about this channel, insert it in db
if (isHistorical === true && !channelsInDb[short_id]) {
await channelsApi.$saveChannel({
channel_id: short_id,
chan_point: `${tx.txid}:${short_id.split('x')[2]}`,
last_update: channel.last_update,
node1_pub: channel.node1_pub,
node2_pub: channel.node2_pub,
capacity: (tx.value * 100000000).toString(),
node1_policy: null,
node2_policy: null,
}, 0);
channelsInDb[channel.channel_id] = channel;
}
if (!nodeStats[channel.node1_pub]) {
nodeStats[channel.node1_pub] = {
capacity: 0,
@@ -151,53 +125,35 @@ class LightningStatsImporter {
nodeStats[channel.node2_pub].channels++;
}
if (isHistorical === false) { // Coming from the node
if (channel.node1_policy !== undefined) { // Coming from the node
for (const policy of [channel.node1_policy, channel.node2_policy]) {
if (policy && parseInt(policy.fee_rate_milli_msat, 10) < 5000) {
if (policy && policy.fee_rate_milli_msat < 5000) {
avgFeeRate += parseInt(policy.fee_rate_milli_msat, 10);
feeRates.push(parseInt(policy.fee_rate_milli_msat, 10));
}
if (policy && parseInt(policy.fee_base_msat, 10) < 5000) {
if (policy && policy.fee_base_msat < 5000) {
avgBaseFee += parseInt(policy.fee_base_msat, 10);
baseFees.push(parseInt(policy.fee_base_msat, 10));
}
}
} else {
// @ts-ignore
if (channel.node1_policy.fee_rate_milli_msat < 5000) {
// @ts-ignore
avgFeeRate += parseInt(channel.node1_policy.fee_rate_milli_msat, 10);
// @ts-ignore
feeRates.push(parseInt(channel.node1_policy.fee_rate_milli_msat), 10);
}
// @ts-ignore
if (channel.node1_policy.fee_base_msat < 5000) {
// @ts-ignore
avgBaseFee += parseInt(channel.node1_policy.fee_base_msat, 10);
// @ts-ignore
baseFees.push(parseInt(channel.node1_policy.fee_base_msat), 10);
} else { // Coming from the historical import
if (channel.fee_rate_milli_msat < 5000) {
avgFeeRate += parseInt(channel.fee_rate_milli_msat, 10);
feeRates.push(parseInt(channel.fee_rate_milli_msat), 10);
}
if (channel.fee_base_msat < 5000) {
avgBaseFee += parseInt(channel.fee_base_msat, 10);
baseFees.push(parseInt(channel.fee_base_msat), 10);
}
}
}
let medCapacity = 0;
let medFeeRate = 0;
let medBaseFee = 0;
let avgCapacity = 0;
avgFeeRate /= Math.max(networkGraph.edges.length, 1);
avgBaseFee /= Math.max(networkGraph.edges.length, 1);
if (capacities.length > 0) {
medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
avgCapacity = Math.round(capacity / Math.max(capacities.length, 1));
}
if (feeRates.length > 0) {
medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
}
if (baseFees.length > 0) {
medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
}
const medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
const avgCapacity = Math.round(capacity / Math.max(capacities.length, 1));
let query = `INSERT INTO lightning_stats(
added,
@@ -287,17 +243,6 @@ class LightningStatsImporter {
nodeStats[public_key].capacity,
nodeStats[public_key].channels,
]);
if (!isHistorical) {
await DB.query(
`UPDATE nodes SET capacity = ?, channels = ? WHERE public_key = ?`,
[
nodeStats[public_key].capacity,
nodeStats[public_key].channels,
public_key,
]
);
}
}
return {
@@ -310,232 +255,156 @@ class LightningStatsImporter {
* Import topology files LN historical data into the database
*/
async $importHistoricalLightningStats(): Promise<void> {
logger.debug('Run the historical importer');
try {
let fileList: string[] = [];
try {
fileList = await fsPromises.readdir(this.topologiesFolder);
} catch (e) {
logger.err(`Unable to open topology folder at ${this.topologiesFolder}`);
throw e;
}
// Insert history from the most recent to the oldest
// This also put the .json cached files first
fileList.sort().reverse();
let latestNodeCount = 1;
const [rows]: any[] = await DB.query(`
SELECT UNIX_TIMESTAMP(added) AS added
FROM lightning_stats
ORDER BY added DESC
`);
const existingStatsTimestamps = {};
for (const row of rows) {
existingStatsTimestamps[row.added] = row;
const fileList = await fsPromises.readdir(this.topologiesFolder);
// Insert history from the most recent to the oldest
// This also put the .json cached files first
fileList.sort().reverse();
const [rows]: any[] = await DB.query(`
SELECT UNIX_TIMESTAMP(added) AS added, node_count
FROM lightning_stats
ORDER BY added DESC
`);
const existingStatsTimestamps = {};
for (const row of rows) {
existingStatsTimestamps[row.added] = row;
}
// For logging purpose
let processed = 10;
let totalProcessed = -1;
for (const filename of fileList) {
processed++;
totalProcessed++;
const timestamp = parseInt(filename.split('_')[1], 10);
// Stats exist already, don't calculate/insert them
if (existingStatsTimestamps[timestamp] !== undefined) {
latestNodeCount = existingStatsTimestamps[timestamp].node_count;
continue;
}
// For logging purpose
let processed = 10;
let totalProcessed = 0;
let logStarted = false;
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
const fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
for (const filename of fileList) {
processed++;
const timestamp = parseInt(filename.split('_')[1], 10);
// Stats exist already, don't calculate/insert them
if (existingStatsTimestamps[timestamp] !== undefined) {
totalProcessed++;
continue;
}
if (filename.indexOf('topology_') === -1) {
totalProcessed++;
continue;
}
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
let fileContent = '';
try {
fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
} catch (e: any) {
if (e.errno == -1) { // EISDIR - Ignore directorie
totalProcessed++;
continue;
}
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`);
totalProcessed++;
continue;
}
let graph;
let graph;
if (filename.indexOf('.json') !== -1) {
try {
graph = JSON.parse(fileContent);
graph = await this.cleanupTopology(graph);
} catch (e) {
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`);
totalProcessed++;
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content`);
continue;
}
if (this.isIncorrectSnapshot(timestamp, graph)) {
logger.debug(`Ignoring ${this.topologiesFolder}/${filename}, because we defined it as an incorrect snapshot`);
++totalProcessed;
} else {
graph = this.parseFile(fileContent);
if (!graph) {
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content`);
continue;
}
if (!logStarted) {
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`);
logStarted = true;
}
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
totalProcessed++;
if (processed > 10) {
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
processed = 0;
} else {
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
}
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
const stat = await this.computeNetworkStats(timestamp, graph, true);
existingStatsTimestamps[timestamp] = stat;
await fsPromises.writeFile(`${this.topologiesFolder}/${filename}.json`, JSON.stringify(graph));
}
if (totalProcessed > 0) {
logger.info(`Lightning network stats historical import completed`);
if (timestamp > 1556316000) {
// "No, the reason most likely is just that I started collection in 2019,
// so what I had before that is just the survivors from before, which weren't that many"
const diffRatio = graph.nodes.length / latestNodeCount;
if (diffRatio < 0.9) {
// Ignore drop of more than 90% of the node count as it's probably a missing data point
logger.debug(`Nodes count diff ratio threshold reached, ignore the data for this day ${graph.nodes.length} nodes vs ${latestNodeCount}`);
continue;
}
}
} catch (e) {
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`);
latestNodeCount = graph.nodes.length;
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
if (processed > 10) {
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
processed = 0;
} else {
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
}
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
const stat = await this.computeNetworkStats(timestamp, graph);
existingStatsTimestamps[timestamp] = stat;
}
logger.info(`Lightning network stats historical import completed`);
}
cleanupTopology(graph): ILightningApi.NetworkGraph {
const newGraph = {
nodes: <ILightningApi.Node[]>[],
edges: <ILightningApi.Channel[]>[],
};
/**
* Parse the file content into XML, and return a list of nodes and channels
*/
private parseFile(fileContent): any {
const graph = this.parser.parse(fileContent);
if (Object.keys(graph).length === 0) {
return null;
}
for (const node of graph.nodes) {
const addressesParts = (node.addresses ?? '').split(',');
const addresses: any[] = [];
for (const address of addressesParts) {
const formatted = Common.findSocketNetwork(address);
const nodes: Node[] = [];
const channels: Channel[] = [];
// If there is only one entry, the parser does not return an array, so we override this
if (!Array.isArray(graph.graphml.graph.node)) {
graph.graphml.graph.node = [graph.graphml.graph.node];
}
if (!Array.isArray(graph.graphml.graph.edge)) {
graph.graphml.graph.edge = [graph.graphml.graph.edge];
}
for (const node of graph.graphml.graph.node) {
if (!node.data) {
continue;
}
const addresses: unknown[] = [];
const sockets = node.data[5].split(',');
for (const socket of sockets) {
const parts = socket.split('://');
addresses.push({
network: formatted.network,
addr: formatted.url
network: parts[0],
addr: parts[1],
});
}
let rgb = node.rgb_color ?? '#000000';
if (rgb.indexOf('#') === -1) {
rgb = `#${rgb}`;
}
newGraph.nodes.push({
last_update: node.timestamp ?? 0,
pub_key: node.id ?? null,
alias: node.alias ?? node.id.slice(0, 20),
nodes.push({
id: node.data[0],
timestamp: node.data[1],
features: node.data[2],
rgb_color: node.data[3],
alias: node.data[4],
addresses: addresses,
color: rgb,
features: {},
out_degree: node.data[6],
in_degree: node.data[7],
});
}
for (const adjacency of graph.adjacency) {
if (adjacency.length === 0) {
for (const channel of graph.graphml.graph.edge) {
if (!channel.data) {
continue;
} else {
for (const edge of adjacency) {
newGraph.edges.push({
channel_id: edge.scid,
chan_point: '',
last_update: edge.timestamp,
node1_pub: edge.source ?? null,
node2_pub: edge.destination ?? null,
capacity: '0', // Will be fetch later
node1_policy: {
time_lock_delta: edge.cltv_expiry_delta,
min_htlc: edge.htlc_minimim_msat,
fee_base_msat: edge.fee_base_msat,
fee_rate_milli_msat: edge.fee_proportional_millionths,
max_htlc_msat: edge.htlc_maximum_msat,
last_update: edge.timestamp,
disabled: false,
},
node2_policy: null,
});
}
}
channels.push({
channel_id: channel.data[0],
node1_pub: channel.data[1],
node2_pub: channel.data[2],
timestamp: channel.data[3],
features: channel.data[4],
fee_base_msat: channel.data[5],
fee_rate_milli_msat: channel.data[6],
htlc_minimim_msat: channel.data[7],
cltv_expiry_delta: channel.data[8],
htlc_maximum_msat: channel.data[9],
});
}
return newGraph;
}
private isIncorrectSnapshot(timestamp, graph): boolean {
if (timestamp >= 1549065600 /* 2019-02-02 */ && timestamp <= 1550620800 /* 2019-02-20 */ && graph.nodes.length < 2600) {
return true;
}
if (timestamp >= 1552953600 /* 2019-03-19 */ && timestamp <= 1556323200 /* 2019-05-27 */ && graph.nodes.length < 4000) {
return true;
}
if (timestamp >= 1557446400 /* 2019-05-10 */ && timestamp <= 1560470400 /* 2019-06-14 */ && graph.nodes.length < 4000) {
return true;
}
if (timestamp >= 1561680000 /* 2019-06-28 */ && timestamp <= 1563148800 /* 2019-07-15 */ && graph.nodes.length < 4000) {
return true;
}
if (timestamp >= 1571270400 /* 2019-11-17 */ && timestamp <= 1580601600 /* 2020-02-02 */ && graph.nodes.length < 4500) {
return true;
}
if (timestamp >= 1591142400 /* 2020-06-03 */ && timestamp <= 1592006400 /* 2020-06-13 */ && graph.nodes.length < 5500) {
return true;
}
if (timestamp >= 1632787200 /* 2021-09-28 */ && timestamp <= 1633564800 /* 2021-10-07 */ && graph.nodes.length < 13000) {
return true;
}
if (timestamp >= 1634256000 /* 2021-10-15 */ && timestamp <= 1645401600 /* 2022-02-21 */ && graph.nodes.length < 17000) {
return true;
}
if (timestamp >= 1654992000 /* 2022-06-12 */ && timestamp <= 1661472000 /* 2022-08-26 */ && graph.nodes.length < 14000) {
return true;
}
return false;
}
private async $cleanupIncorrectSnapshot(): Promise<void> {
// We do not run this one automatically because those stats are not supposed to be inserted in the first
// place, but I write them here to remind us we manually run those queries
// DELETE FROM lightning_stats
// WHERE (
// UNIX_TIMESTAMP(added) >= 1549065600 AND UNIX_TIMESTAMP(added) <= 1550620800 AND node_count < 2600 OR
// UNIX_TIMESTAMP(added) >= 1552953600 AND UNIX_TIMESTAMP(added) <= 1556323200 AND node_count < 4000 OR
// UNIX_TIMESTAMP(added) >= 1557446400 AND UNIX_TIMESTAMP(added) <= 1560470400 AND node_count < 4000 OR
// UNIX_TIMESTAMP(added) >= 1561680000 AND UNIX_TIMESTAMP(added) <= 1563148800 AND node_count < 4000 OR
// UNIX_TIMESTAMP(added) >= 1571270400 AND UNIX_TIMESTAMP(added) <= 1580601600 AND node_count < 4500 OR
// UNIX_TIMESTAMP(added) >= 1591142400 AND UNIX_TIMESTAMP(added) <= 1592006400 AND node_count < 5500 OR
// UNIX_TIMESTAMP(added) >= 1632787200 AND UNIX_TIMESTAMP(added) <= 1633564800 AND node_count < 13000 OR
// UNIX_TIMESTAMP(added) >= 1634256000 AND UNIX_TIMESTAMP(added) <= 1645401600 AND node_count < 17000 OR
// UNIX_TIMESTAMP(added) >= 1654992000 AND UNIX_TIMESTAMP(added) <= 1661472000 AND node_count < 14000
// )
// DELETE FROM node_stats
// WHERE (
// UNIX_TIMESTAMP(added) >= 1549065600 AND UNIX_TIMESTAMP(added) <= 1550620800 OR
// UNIX_TIMESTAMP(added) >= 1552953600 AND UNIX_TIMESTAMP(added) <= 1556323200 OR
// UNIX_TIMESTAMP(added) >= 1557446400 AND UNIX_TIMESTAMP(added) <= 1560470400 OR
// UNIX_TIMESTAMP(added) >= 1561680000 AND UNIX_TIMESTAMP(added) <= 1563148800 OR
// UNIX_TIMESTAMP(added) >= 1571270400 AND UNIX_TIMESTAMP(added) <= 1580601600 OR
// UNIX_TIMESTAMP(added) >= 1591142400 AND UNIX_TIMESTAMP(added) <= 1592006400 OR
// UNIX_TIMESTAMP(added) >= 1632787200 AND UNIX_TIMESTAMP(added) <= 1633564800 OR
// UNIX_TIMESTAMP(added) >= 1634256000 AND UNIX_TIMESTAMP(added) <= 1645401600 OR
// UNIX_TIMESTAMP(added) >= 1654992000 AND UNIX_TIMESTAMP(added) <= 1661472000
// )
return {
nodes: nodes,
edges: channels,
};
}
}

View File

@@ -0,0 +1,43 @@
import { query } from '../../utils/axios-query';
import priceUpdater, { PriceFeed, PriceHistory } from '../price-updater';
class FtxApi implements PriceFeed {
public name: string = 'FTX';
public currencies: string[] = ['USD', 'BRZ', 'EUR', 'JPY', 'AUD'];
public url: string = 'https://ftx.com/api/markets/BTC/';
public urlHist: string = 'https://ftx.com/api/markets/BTC/{CURRENCY}/candles?resolution={GRANULARITY}';
constructor() {
}
public async $fetchPrice(currency): Promise<number> {
const response = await query(this.url + currency);
return response ? parseInt(response['result']['last'], 10) : -1;
}
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
const priceHistory: PriceHistory = {};
for (const currency of currencies) {
if (this.currencies.includes(currency) === false) {
continue;
}
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '3600' : '86400').replace('{CURRENCY}', currency));
const pricesRaw = response ? response['result'] : [];
for (const price of pricesRaw as any[]) {
const time = Math.round(price['time'] / 1000);
if (priceHistory[time] === undefined) {
priceHistory[time] = priceUpdater.getEmptyPricesObj();
}
priceHistory[time][currency] = price['close'];
}
}
return priceHistory;
}
}
export default FtxApi;

View File

@@ -1,11 +1,12 @@
import * as fs from 'fs';
import path from "path";
import { Common } from '../api/common';
import config from '../config';
import logger from '../logger';
import PricesRepository from '../repositories/PricesRepository';
import BitfinexApi from './price-feeds/bitfinex-api';
import BitflyerApi from './price-feeds/bitflyer-api';
import CoinbaseApi from './price-feeds/coinbase-api';
import FtxApi from './price-feeds/ftx-api';
import GeminiApi from './price-feeds/gemini-api';
import KrakenApi from './price-feeds/kraken-api';
@@ -46,6 +47,7 @@ class PriceUpdater {
this.latestPrices = this.getEmptyPricesObj();
this.feeds.push(new BitflyerApi()); // Does not have historical endpoint
this.feeds.push(new FtxApi());
this.feeds.push(new KrakenApi());
this.feeds.push(new CoinbaseApi());
this.feeds.push(new BitfinexApi());
@@ -157,7 +159,7 @@ class PriceUpdater {
const existingPriceTimes = await PricesRepository.$getPricesTimes();
// Insert MtGox weekly prices
const pricesJson: any[] = JSON.parse(fs.readFileSync(path.join(__dirname, 'mtgox-weekly.json')).toString());
const pricesJson: any[] = JSON.parse(fs.readFileSync('./src/tasks/price-feeds/mtgox-weekly.json').toString());
const prices = this.getEmptyPricesObj();
let insertedCount: number = 0;
for (const price of pricesJson) {

View File

@@ -1,119 +0,0 @@
var net = require('net');
var IPCheck = module.exports = function(input) {
var self = this;
if (!(self instanceof IPCheck)) {
return new IPCheck(input);
}
self.input = input;
self.parse();
};
IPCheck.prototype.parse = function() {
var self = this;
if (!self.input || typeof self.input !== 'string') return self.valid = false;
var ip;
var pos = self.input.lastIndexOf('/');
if (pos !== -1) {
ip = self.input.substring(0, pos);
self.mask = +self.input.substring(pos + 1);
} else {
ip = self.input;
self.mask = null;
}
self.ipv = net.isIP(ip);
self.valid = !!self.ipv && !isNaN(self.mask);
if (!self.valid) return;
// default mask = 32 for ipv4 and 128 for ipv6
if (self.mask === null) self.mask = self.ipv === 4 ? 32 : 128;
if (self.ipv === 4) {
// difference between ipv4 and ipv6 masks
self.mask += 96;
}
if (self.mask < 0 || self.mask > 128) {
self.valid = false;
return;
}
self.address = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ];
if(self.ipv === 4){
self.parseIPv4(ip);
}else{
self.parseIPv6(ip);
}
};
IPCheck.prototype.parseIPv4 = function(ip) {
var self = this;
// ipv4 addresses live under ::ffff:0:0
self.address[10] = self.address[11] = 0xff;
var octets = ip.split('.');
for (var i = 0; i < 4; i++) {
self.address[i + 12] = parseInt(octets[i], 10);
}
};
var V6_TRANSITIONAL = /:(\d+\.\d+\.\d+\.\d+)$/;
IPCheck.prototype.parseIPv6 = function(ip) {
var self = this;
var transitionalMatch = V6_TRANSITIONAL.exec(ip);
if(transitionalMatch){
self.parseIPv4(transitionalMatch[1]);
return;
}
var bits = ip.split(':');
if (bits.length < 8) {
ip = ip.replace('::', Array(11 - bits.length).join(':'));
bits = ip.split(':');
}
var j = 0;
for (var i = 0; i < bits.length; i += 1) {
var x = bits[i] ? parseInt(bits[i], 16) : 0;
self.address[j++] = x >> 8;
self.address[j++] = x & 0xff;
}
};
IPCheck.prototype.match = function(cidr) {
var self = this;
if (!(cidr instanceof IPCheck)) cidr = new IPCheck(cidr);
if (!self.valid || !cidr.valid) return false;
var mask = cidr.mask;
var i = 0;
while (mask >= 8) {
if (self.address[i] !== cidr.address[i]) return false;
i++;
mask -= 8;
}
var shift = 8 - mask;
return (self.address[i] >>> shift) === (cidr.address[i] >>> shift);
};
IPCheck.match = function(ip, cidr) {
ip = ip instanceof IPCheck ? ip : new IPCheck(ip);
return ip.match(cidr);
};

View File

@@ -1,174 +0,0 @@
export type HeapNode<T> = {
element: T
child?: HeapNode<T>
next?: HeapNode<T>
prev?: HeapNode<T>
} | null | undefined;
// minimal pairing heap priority queue implementation
export class PairingHeap<T> {
private root: HeapNode<T> = null;
private comparator: (a: T, b: T) => boolean;
// comparator function should return 'true' if a is higher priority than b
constructor(comparator: (a: T, b: T) => boolean) {
this.comparator = comparator;
}
isEmpty(): boolean {
return !this.root;
}
add(element: T): HeapNode<T> {
const node: HeapNode<T> = {
element
};
this.root = this.meld(this.root, node);
return node;
}
// returns the top priority element without modifying the queue
peek(): T | void {
return this.root?.element;
}
// removes and returns the top priority element
pop(): T | void {
let element;
if (this.root) {
const node = this.root;
element = node.element;
this.root = this.mergePairs(node.child);
}
return element;
}
deleteNode(node: HeapNode<T>): void {
if (!node) {
return;
}
if (node === this.root) {
this.root = this.mergePairs(node.child);
}
else {
if (node.prev) {
if (node.prev.child === node) {
node.prev.child = node.next;
}
else {
node.prev.next = node.next;
}
}
if (node.next) {
node.next.prev = node.prev;
}
this.root = this.meld(this.root, this.mergePairs(node.child));
}
node.child = null;
node.prev = null;
node.next = null;
}
// fix the heap after increasing the priority of a given node
increasePriority(node: HeapNode<T>): void {
// already the top priority element
if (!node || node === this.root) {
return;
}
// extract from siblings
if (node.prev) {
if (node.prev?.child === node) {
if (this.comparator(node.prev.element, node.element)) {
// already in a valid position
return;
}
node.prev.child = node.next;
}
else {
node.prev.next = node.next;
}
}
if (node.next) {
node.next.prev = node.prev;
}
this.root = this.meld(this.root, node);
}
decreasePriority(node: HeapNode<T>): void {
this.deleteNode(node);
this.root = this.meld(this.root, node);
}
meld(a: HeapNode<T>, b: HeapNode<T>): HeapNode<T> {
if (!a) {
return b;
}
if (!b || a === b) {
return a;
}
let parent: HeapNode<T> = b;
let child: HeapNode<T> = a;
if (this.comparator(a.element, b.element)) {
parent = a;
child = b;
}
child.next = parent.child;
if (parent.child) {
parent.child.prev = child;
}
child.prev = parent;
parent.child = child;
parent.next = null;
parent.prev = null;
return parent;
}
mergePairs(node: HeapNode<T>): HeapNode<T> {
if (!node) {
return null;
}
let current: HeapNode<T> = node;
let next: HeapNode<T>;
let nextCurrent: HeapNode<T>;
let pairs: HeapNode<T>;
let melded: HeapNode<T>;
while (current) {
next = current.next;
if (next) {
nextCurrent = next.next;
melded = this.meld(current, next);
if (melded) {
melded.prev = pairs;
}
pairs = melded;
}
else {
nextCurrent = null;
current.prev = pairs;
pairs = current;
break;
}
current = nextCurrent;
}
melded = null;
let prev: HeapNode<T>;
while (pairs) {
prev = pairs.prev;
melded = this.meld(melded, pairs);
pairs = prev;
}
return melded;
}
}

View File

@@ -1,5 +0,0 @@
jest.mock('./mempool-config.json', () => ({}), { virtual: true });
jest.mock('./src/logger.ts', () => ({}), { virtual: true });
jest.mock('./src/api/rbf-cache.ts', () => ({}), { virtual: true });
jest.mock('./src/api/mempool.ts', () => ({}), { virtual: true });
jest.mock('./src/api/memory-cache.ts', () => ({}), { virtual: true });

View File

@@ -1,8 +0,0 @@
{
"extends": "./tsconfig",
"exclude": ["**/*.test.*", "**/__mocks__/*", "**/__tests__/*"],
"compilerOptions": {
"types": ["node"]
},
}

View File

@@ -1,8 +1,8 @@
{
"compilerOptions": {
"types": ["node"],
"module": "commonjs",
"target": "esnext",
"types": ["node", "jest"],
"lib": ["es2019", "dom"],
"strict": true,
"noImplicitAny": false,
@@ -13,8 +13,7 @@
"node_modules/@types"
],
"allowSyntheticDefaultImports": true,
"esModuleInterop": true,
"allowJs": true,
"esModuleInterop": true
},
"include": [
"src/**/*.ts"
@@ -22,4 +21,4 @@
"exclude": [
"dist/**"
]
}
}

View File

@@ -1,3 +0,0 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of September 1, 2022.
Signed: WesVleuten

View File

@@ -1,3 +0,0 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of August 19, 2022.
Signed: junderw

View File

@@ -89,7 +89,6 @@ Below we list all settings from `mempool-config.json` and the corresponding over
"MEMPOOL": {
"NETWORK": "mainnet",
"BACKEND": "electrum",
"ENABLED": true,
"HTTP_PORT": 8999,
"SPAWN_CLUSTER_PROCS": 0,
"API_URL_PREFIX": "/api/v1/",
@@ -351,68 +350,3 @@ Corresponding `docker-compose.yml` overrides:
PRICE_DATA_SERVER_CLEARNET_URL: ""
...
```
<br/>
`mempool-config.json`:
```
"LIGHTNING": {
"ENABLED": false
"BACKEND": "lnd"
"TOPOLOGY_FOLDER": ""
"STATS_REFRESH_INTERVAL": 600
"GRAPH_REFRESH_INTERVAL": 600
"LOGGER_UPDATE_INTERVAL": 30
}
```
Corresponding `docker-compose.yml` overrides:
```
api:
environment:
LIGHTNING_ENABLED: false
LIGHTNING_BACKEND: "lnd"
LIGHTNING_TOPOLOGY_FOLDER: ""
LIGHTNING_STATS_REFRESH_INTERVAL: 600
LIGHTNING_GRAPH_REFRESH_INTERVAL: 600
LIGHTNING_LOGGER_UPDATE_INTERVAL: 30
...
```
<br/>
`mempool-config.json`:
```
"LND": {
"TLS_CERT_PATH": ""
"MACAROON_PATH": ""
"REST_API_URL": "https://localhost:8080"
}
```
Corresponding `docker-compose.yml` overrides:
```
api:
environment:
LND_TLS_CERT_PATH: ""
LND_MACAROON_PATH: ""
LND_REST_API_URL: "https://localhost:8080"
...
```
<br/>
`mempool-config.json`:
```
"CLIGHTNING": {
"SOCKET": ""
}
```
Corresponding `docker-compose.yml` overrides:
```
api:
environment:
CLIGHTNING_SOCKET: ""
...
```

View File

@@ -1,7 +1,7 @@
FROM node:16.16.0-buster-slim AS builder
ARG commitHash
ENV MEMPOOL_COMMIT_HASH=${commitHash}
ENV DOCKER_COMMIT_HASH=${commitHash}
WORKDIR /build
COPY . .
@@ -9,15 +9,18 @@ COPY . .
RUN apt-get update
RUN apt-get install -y build-essential python3 pkg-config
RUN npm install --omit=dev --omit=optional
RUN npm run package
RUN npm run build
FROM node:16.16.0-buster-slim
WORKDIR /backend
RUN chown 1000:1000 ./
COPY --from=builder --chown=1000:1000 /build/package ./package/
COPY --from=builder --chown=1000:1000 /build/mempool-config.json /build/start.sh /build/wait-for-it.sh ./
COPY --from=builder /build/ .
RUN chmod +x /backend/start.sh
RUN chmod +x /backend/wait-for-it.sh
RUN chown -R 1000:1000 /backend && chmod -R 755 /backend
USER 1000

View File

@@ -2,7 +2,6 @@
"MEMPOOL": {
"NETWORK": "__MEMPOOL_NETWORK__",
"BACKEND": "__MEMPOOL_BACKEND__",
"ENABLED": __MEMPOOL_ENABLED__,
"HTTP_PORT": __MEMPOOL_HTTP_PORT__,
"SPAWN_CLUSTER_PROCS": __MEMPOOL_SPAWN_CLUSTER_PROCS__,
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
@@ -68,22 +67,6 @@
"ENABLED": __BISQ_ENABLED__,
"DATA_PATH": "__BISQ_DATA_PATH__"
},
"LIGHTNING": {
"ENABLED": __LIGHTNING_ENABLED__,
"BACKEND": "__LIGHTNING_BACKEND__",
"STATS_REFRESH_INTERVAL": __LIGHTNING_STATS_REFRESH_INTERVAL__,
"GRAPH_REFRESH_INTERVAL": __LIGHTNING_GRAPH_REFRESH_INTERVAL__,
"LOGGER_UPDATE_INTERVAL": __LIGHTNING_LOGGER_UPDATE_INTERVAL__,
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__"
},
"LND": {
"TLS_CERT_PATH": "__LND_TLS_CERT_PATH__",
"MACAROON_PATH": "__LND_MACAROON_PATH__",
"REST_API_URL": "__LND_REST_API_URL__"
},
"CLIGHTNING": {
"SOCKET": "__CLIGHTNING_SOCKET__"
},
"SOCKS5PROXY": {
"ENABLED": __SOCKS5PROXY_ENABLED__,
"USE_ONION": __SOCKS5PROXY_USE_ONION__,

44
docker/backend/start.sh Executable file → Normal file
View File

@@ -3,7 +3,6 @@
# MEMPOOL
__MEMPOOL_NETWORK__=${MEMPOOL_NETWORK:=mainnet}
__MEMPOOL_BACKEND__=${MEMPOOL_BACKEND:=electrum}
__MEMPOOL_ENABLED__=${MEMPOOL_ENABLED:=true}
__MEMPOOL_HTTP_PORT__=${BACKEND_HTTP_PORT:=8999}
__MEMPOOL_SPAWN_CLUSTER_PROCS__=${MEMPOOL_SPAWN_CLUSTER_PROCS:=0}
__MEMPOOL_API_URL_PREFIX__=${MEMPOOL_API_URL_PREFIX:=/api/v1/}
@@ -25,8 +24,8 @@ __MEMPOOL_USER_AGENT__=${MEMPOOL_USER_AGENT:=mempool}
__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__=${MEMPOOL_STDOUT_LOG_MIN_PRIORITY:=info}
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=false}
__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__=${MEMPOOL_AUTOMATIC_BLOCK_REINDEXING:=false}
__MEMPOOL_POOLS_JSON_URL__=${MEMPOOL_POOLS_JSON_URL:=https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json}
__MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=https://api.github.com/repos/mempool/mining-pools/git/trees/master}
__MEMPOOL_POOLS_JSON_URL__=${MEMPOOL_POOLS_JSON_URL:=false}
__MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=false}
# CORE_RPC
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
@@ -92,27 +91,10 @@ __EXTERNAL_DATA_SERVER_LIQUID_ONION__=${EXTERNAL_DATA_SERVER_LIQUID_ONION:=http:
__EXTERNAL_DATA_SERVER_BISQ_URL__=${EXTERNAL_DATA_SERVER_BISQ_URL:=https://bisq.markets/api}
__EXTERNAL_DATA_SERVER_BISQ_ONION__=${EXTERNAL_DATA_SERVER_BISQ_ONION:=http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api}
# LIGHTNING
__LIGHTNING_ENABLED__=${LIGHTNING_ENABLED:=false}
__LIGHTNING_BACKEND__=${LIGHTNING_BACKEND:="lnd"}
__LIGHTNING_TOPOLOGY_FOLDER__=${LIGHTNING_TOPOLOGY_FOLDER:=""}
__LIGHTNING_STATS_REFRESH_INTERVAL__=${LIGHTNING_STATS_REFRESH_INTERVAL:=600}
__LIGHTNING_GRAPH_REFRESH_INTERVAL__=${LIGHTNING_GRAPH_REFRESH_INTERVAL:=600}
__LIGHTNING_LOGGER_UPDATE_INTERVAL__=${LIGHTNING_LOGGER_UPDATE_INTERVAL:=30}
# LND
__LND_TLS_CERT_PATH__=${LND_TLS_CERT_PATH:=""}
__LND_MACAROON_PATH__=${LND_MACAROON_PATH:=""}
__LND_REST_API_URL__=${LND_REST_API_URL:="https://localhost:8080"}
# CLN
__CLIGHTNING_SOCKET__=${CLIGHTNING_SOCKET:=""}
mkdir -p "${__MEMPOOL_CACHE_DIR__}"
sed -i "s/__MEMPOOL_NETWORK__/${__MEMPOOL_NETWORK__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BACKEND__/${__MEMPOOL_BACKEND__}/g" mempool-config.json
sed -i "s/__MEMPOOL_ENABLED__/${__MEMPOOL_ENABLED__}/g" mempool-config.json
sed -i "s/__MEMPOOL_HTTP_PORT__/${__MEMPOOL_HTTP_PORT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_SPAWN_CLUSTER_PROCS__/${__MEMPOOL_SPAWN_CLUSTER_PROCS__}/g" mempool-config.json
sed -i "s!__MEMPOOL_API_URL_PREFIX__!${__MEMPOOL_API_URL_PREFIX__}!g" mempool-config.json
@@ -134,8 +116,8 @@ sed -i "s!__MEMPOOL_USER_AGENT__!${__MEMPOOL_USER_AGENT__}!g" mempool-config.jso
sed -i "s/__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__/${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__/${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}/g" mempool-config.json
sed -i "s!__MEMPOOL_POOLS_JSON_URL__!${__MEMPOOL_POOLS_JSON_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json
sed -i "s/__MEMPOOL_POOLS_JSON_URL__/${__MEMPOOL_POOLS_JSON_URL__}/g" mempool-config.json
sed -i "s/__MEMPOOL_POOLS_JSON_TREE_URL__/${__MEMPOOL_POOLS_JSON_TREE_URL__}/g" mempool-config.json
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json
@@ -191,20 +173,4 @@ sed -i "s!__EXTERNAL_DATA_SERVER_LIQUID_ONION__!${__EXTERNAL_DATA_SERVER_LIQUID_
sed -i "s!__EXTERNAL_DATA_SERVER_BISQ_URL__!${__EXTERNAL_DATA_SERVER_BISQ_URL__}!g" mempool-config.json
sed -i "s!__EXTERNAL_DATA_SERVER_BISQ_ONION__!${__EXTERNAL_DATA_SERVER_BISQ_ONION__}!g" mempool-config.json
# LIGHTNING
sed -i "s!__LIGHTNING_ENABLED__!${__LIGHTNING_ENABLED__}!g" mempool-config.json
sed -i "s!__LIGHTNING_BACKEND__!${__LIGHTNING_BACKEND__}!g" mempool-config.json
sed -i "s!__LIGHTNING_TOPOLOGY_FOLDER__!${__LIGHTNING_TOPOLOGY_FOLDER__}!g" mempool-config.json
sed -i "s!__LIGHTNING_STATS_REFRESH_INTERVAL__!${__LIGHTNING_STATS_REFRESH_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_GRAPH_REFRESH_INTERVAL__!${__LIGHTNING_GRAPH_REFRESH_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_LOGGER_UPDATE_INTERVAL__!${__LIGHTNING_LOGGER_UPDATE_INTERVAL__}!g" mempool-config.json
# LND
sed -i "s!__LND_TLS_CERT_PATH__!${__LND_TLS_CERT_PATH__}!g" mempool-config.json
sed -i "s!__LND_MACAROON_PATH__!${__LND_MACAROON_PATH__}!g" mempool-config.json
sed -i "s!__LND_REST_API_URL__!${__LND_REST_API_URL__}!g" mempool-config.json
# CLN
sed -i "s!__CLIGHTNING_SOCKET__!${__CLIGHTNING_SOCKET__}!g" mempool-config.json
node /backend/package/index.js
node /backend/dist/index.js

0
docker/backend/wait-for-it.sh Executable file → Normal file
View File

View File

@@ -8,9 +8,7 @@ WORKDIR /build
COPY . .
RUN apt-get update
RUN apt-get install -y build-essential rsync
RUN cp mempool-frontend-config.sample.json mempool-frontend-config.json
RUN npm install --omit=dev --omit=optional
RUN npm run build
FROM nginx:1.17.8-alpine
@@ -30,9 +28,7 @@ RUN chown -R 1000:1000 /patch && chmod -R 755 /patch && \
chown -R 1000:1000 /var/cache/nginx && \
chown -R 1000:1000 /var/log/nginx && \
chown -R 1000:1000 /etc/nginx/nginx.conf && \
chown -R 1000:1000 /etc/nginx/conf.d && \
chown -R 1000:1000 /var/www/mempool
chown -R 1000:1000 /etc/nginx/conf.d
RUN touch /var/run/nginx.pid && \
chown -R 1000:1000 /var/run/nginx.pid

View File

@@ -10,51 +10,4 @@ cp /etc/nginx/nginx.conf /patch/nginx.conf
sed -i "s/__MEMPOOL_FRONTEND_HTTP_PORT__/${__MEMPOOL_FRONTEND_HTTP_PORT__}/g" /patch/nginx.conf
cat /patch/nginx.conf > /etc/nginx/nginx.conf
# Runtime overrides - read env vars defined in docker compose
__TESTNET_ENABLED__=${TESTNET_ENABLED:=false}
__SIGNET_ENABLED__=${SIGNET_ENABLED:=false}
__LIQUID_ENABLED__=${LIQUID_EANBLED:=false}
__LIQUID_TESTNET_ENABLED__=${LIQUID_TESTNET_ENABLED:=false}
__BISQ_ENABLED__=${BISQ_ENABLED:=false}
__BISQ_SEPARATE_BACKEND__=${BISQ_SEPARATE_BACKEND:=false}
__ITEMS_PER_PAGE__=${ITEMS_PER_PAGE:=10}
__KEEP_BLOCKS_AMOUNT__=${KEEP_BLOCKS_AMOUNT:=8}
__NGINX_PROTOCOL__=${NGINX_PROTOCOL:=http}
__NGINX_HOSTNAME__=${NGINX_HOSTNAME:=localhost}
__NGINX_PORT__=${NGINX_PORT:=8999}
__BLOCK_WEIGHT_UNITS__=${BLOCK_WEIGHT_UNITS:=4000000}
__MEMPOOL_BLOCKS_AMOUNT__=${MEMPOOL_BLOCKS_AMOUNT:=8}
__BASE_MODULE__=${BASE_MODULE:=mempool}
__MEMPOOL_WEBSITE_URL__=${MEMPOOL_WEBSITE_URL:=https://mempool.space}
__LIQUID_WEBSITE_URL__=${LIQUID_WEBSITE_URL:=https://liquid.network}
__BISQ_WEBSITE_URL__=${BISQ_WEBSITE_URL:=https://bisq.markets}
__MINING_DASHBOARD__=${MINING_DASHBOARD:=true}
__LIGHTNING__=${LIGHTNING:=false}
# Export as environment variables to be used by envsubst
export __TESTNET_ENABLED__
export __SIGNET_ENABLED__
export __LIQUID_ENABLED__
export __LIQUID_TESTNET_ENABLED__
export __BISQ_ENABLED__
export __BISQ_SEPARATE_BACKEND__
export __ITEMS_PER_PAGE__
export __KEEP_BLOCKS_AMOUNT__
export __NGINX_PROTOCOL__
export __NGINX_HOSTNAME__
export __NGINX_PORT__
export __BLOCK_WEIGHT_UNITS__
export __MEMPOOL_BLOCKS_AMOUNT__
export __BASE_MODULE__
export __MEMPOOL_WEBSITE_URL__
export __LIQUID_WEBSITE_URL__
export __BISQ_WEBSITE_URL__
export __MINING_DASHBOARD__
export __LIGHTNING__
folder=$(find /var/www/mempool -name "config.js" | xargs dirname)
echo ${folder}
envsubst < ${folder}/config.template.js > ${folder}/config.js
exec "$@"

View File

@@ -1,7 +1,10 @@
#!/bin/sh
#backend
gitMaster="\.\.\/\.git\/refs\/heads\/master"
git ls-remote https://github.com/mempool/mempool.git "$1^{}" | awk '{ print $1}' > ./backend/master
cp ./docker/backend/* ./backend/
sed -i "s/${gitMaster}/master/g" ./backend/src/api/backend-info.ts
#frontend
localhostIP="127.0.0.1"

View File

@@ -32,7 +32,6 @@
"prefer-const": 1,
"prefer-rest-params": 1,
"quotes": [1, "single", { "allowTemplateLiterals": true }],
"semi": 1,
"eqeqeq": 1
"semi": 1
}
}

View File

@@ -113,7 +113,7 @@ https://www.transifex.com/mempool/mempool/dashboard/
* French @Bayernatoor
* Korean @kcalvinalvinn
* Italian @HodlBits
* Hebrew @rapidlab309
* Hebrew @Sh0ham
* Georgian @wyd_idk
* Hungarian @btcdragonlord
* Dutch @m__btc

View File

@@ -152,14 +152,15 @@
"assets": [
"src/favicon.ico",
"src/resources",
"src/robots.txt",
"src/config.js",
"src/config.template.js"
"src/robots.txt"
],
"styles": [
"src/styles.scss",
"node_modules/@fortawesome/fontawesome-svg-core/styles.css"
],
"scripts": [
"generated-config.js"
],
"vendorChunk": true,
"extractLicenses": false,
"buildOptimizer": false,
@@ -169,10 +170,6 @@
},
"configurations": {
"production": {
"assets": [
"src/favicon.ico",
"src/robots.txt"
],
"fileReplacements": [
{
"replace": "src/environments/environment.ts",
@@ -221,10 +218,6 @@
"proxyConfig": "proxy.conf.local.js",
"verbose": true
},
"local-esplora": {
"proxyConfig": "proxy.conf.local-esplora.js",
"verbose": true
},
"mixed": {
"proxyConfig": "proxy.conf.mixed.js",
"verbose": true
@@ -268,6 +261,57 @@
}
}
},
"server": {
"builder": "@angular-devkit/build-angular:server",
"options": {
"outputPath": "dist/mempool/server",
"main": "server.ts",
"tsConfig": "tsconfig.server.json",
"sourceMap": true,
"optimization": false
},
"configurations": {
"production": {
"outputHashing": "media",
"fileReplacements": [
{
"replace": "src/environments/environment.ts",
"with": "src/environments/environment.prod.ts"
}
],
"sourceMap": false,
"localize": true,
"optimization": true
}
},
"defaultConfiguration": ""
},
"serve-ssr": {
"builder": "@nguniversal/builders:ssr-dev-server",
"options": {
"browserTarget": "mempool:build",
"serverTarget": "mempool:server"
},
"configurations": {
"production": {
"browserTarget": "mempool:build:production",
"serverTarget": "mempool:server:production"
}
}
},
"prerender": {
"builder": "@nguniversal/builders:prerender",
"options": {
"browserTarget": "mempool:build:production",
"serverTarget": "mempool:server:production",
"routes": [
"/"
]
},
"configurations": {
"production": {}
}
},
"cypress-run": {
"builder": "@cypress/schematic:cypress",
"options": {
@@ -288,5 +332,6 @@
}
}
}
}
},
"defaultProject": "mempool"
}

View File

@@ -2,8 +2,7 @@ var fs = require('fs');
const { spawnSync } = require('child_process');
const CONFIG_FILE_NAME = 'mempool-frontend-config.json';
const GENERATED_CONFIG_FILE_NAME = 'src/resources/config.js';
const GENERATED_TEMPLATE_CONFIG_FILE_NAME = 'src/resources/config.template.js';
const GENERATED_CONFIG_FILE_NAME = 'generated-config.js';
let settings = [];
let configContent = {};
@@ -68,17 +67,10 @@ if (process.env.DOCKER_COMMIT_HASH) {
const newConfig = `(function (window) {
window.__env = window.__env || {};${settings.reduce((str, obj) => `${str}
window.__env.${obj.key} = ${typeof obj.value === 'string' ? `'${obj.value}'` : obj.value};`, '')}
window.__env.${obj.key} = ${ typeof obj.value === 'string' ? `'${obj.value}'` : obj.value };`, '')}
window.__env.GIT_COMMIT_HASH = '${gitCommitHash}';
window.__env.PACKAGE_JSON_VERSION = '${packetJsonVersion}';
}(this));`;
const newConfigTemplate = `(function (window) {
window.__env = window.__env || {};${settings.reduce((str, obj) => `${str}
window.__env.${obj.key} = ${typeof obj.value === 'string' ? `'\${__${obj.key}__}'` : `\${__${obj.key}__}`};`, '')}
window.__env.GIT_COMMIT_HASH = '${gitCommitHash}';
window.__env.PACKAGE_JSON_VERSION = '${packetJsonVersion}';
}(this));`;
}(global || this));`;
function readConfig(path) {
try {
@@ -97,16 +89,6 @@ function writeConfig(path, config) {
}
}
function writeConfigTemplate(path, config) {
try {
fs.writeFileSync(path, config, 'utf8');
} catch (e) {
throw new Error(e);
}
}
writeConfigTemplate(GENERATED_TEMPLATE_CONFIG_FILE_NAME, newConfigTemplate);
const currentConfig = readConfig(GENERATED_CONFIG_FILE_NAME);
if (currentConfig && currentConfig === newConfig) {
@@ -124,4 +106,4 @@ if (currentConfig && currentConfig === newConfig) {
console.log('NEW CONFIG: ', newConfig);
writeConfig(GENERATED_CONFIG_FILE_NAME, newConfig);
console.log(`${GENERATED_CONFIG_FILE_NAME} file updated`);
}
};

View File

@@ -17,8 +17,5 @@
"LIQUID_WEBSITE_URL": "https://liquid.network",
"BISQ_WEBSITE_URL": "https://bisq.markets",
"MINING_DASHBOARD": true,
"MAINNET_BLOCK_AUDIT_START_HEIGHT": 0,
"TESTNET_BLOCK_AUDIT_START_HEIGHT": 0,
"SIGNET_BLOCK_AUDIT_START_HEIGHT": 0,
"LIGHTNING": false
}

14576
frontend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -22,21 +22,20 @@
"scripts": {
"ng": "./node_modules/@angular/cli/bin/ng.js",
"tsc": "./node_modules/typescript/bin/tsc",
"i18n-extract-from-source": "npm run ng -- extract-i18n --out-file ./src/locale/messages.xlf",
"i18n-extract-from-source": "./node_modules/@angular/cli/bin/ng extract-i18n --out-file ./src/locale/messages.xlf",
"i18n-pull-from-transifex": "tx pull -a --parallel --minimum-perc 1 --force",
"serve": "npm run generate-config && npm run ng -- serve -c local",
"serve:stg": "npm run generate-config && npm run ng -- serve -c staging",
"serve:local-prod": "npm run generate-config && npm run ng -- serve -c local-prod",
"serve:local-staging": "npm run generate-config && npm run ng -- serve -c local-staging",
"start": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local",
"start:local-esplora": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-esplora",
"start:stg": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c staging",
"start:local-prod": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-prod",
"start:local-staging": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-staging",
"start:mixed": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c mixed",
"build": "npm run generate-config && npm run ng -- build --configuration production --localize && npm run sync-assets && npm run build-mempool.js",
"sync-assets": "rsync -av ./src/resources ./dist/mempool/browser && node sync-assets.js 'dist/mempool/browser/resources/'",
"sync-assets-dev": "node sync-assets.js 'src/resources/'",
"sync-assets": "node sync-assets.js && rsync -av ./dist/mempool/browser/en-US/resources ./dist/mempool/browser/resources",
"sync-assets-dev": "node sync-assets.js dev",
"generate-config": "node generate-config.js",
"build-mempool.js": "npm run build-mempool-js && npm run build-mempool-liquid-js && npm run build-mempool-bisq-js",
"build-mempool-js": "browserify -p tinyify ./node_modules/@mempool/mempool.js/lib/index.js --standalone mempoolJS > ./dist/mempool/browser/en-US/mempool.js",
@@ -51,6 +50,9 @@
"config:defaults:mempool": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true LIQUID_TESTNET_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 BASE_MODULE=mempool BLOCK_WEIGHT_UNITS=4000000 && npm run generate-config",
"config:defaults:liquid": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true LIQUID_TESTNET_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 BASE_MODULE=liquid BLOCK_WEIGHT_UNITS=300000 && npm run generate-config",
"config:defaults:bisq": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 BASE_MODULE=bisq BLOCK_WEIGHT_UNITS=4000000 && npm run generate-config",
"dev:ssr": "npm run generate-config && npm run ng -- run mempool:serve-ssr",
"serve:ssr": "node server.run.js",
"build:ssr": "npm run build && npm run ng -- run mempool:server:production && npm run tsc -- server.run.ts",
"prerender": "npm run ng -- run mempool:prerender",
"cypress:open": "cypress open",
"cypress:run": "cypress run",
@@ -61,59 +63,63 @@
"cypress:run:ci:staging": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 && npm run generate-config && start-server-and-test serve:local-staging 4200 cypress:run:record"
},
"dependencies": {
"@angular-devkit/build-angular": "^14.2.10",
"@angular/animations": "^14.2.12",
"@angular/cli": "^14.2.10",
"@angular/common": "^14.2.12",
"@angular/compiler": "^14.2.12",
"@angular/core": "^14.2.12",
"@angular/forms": "^14.2.12",
"@angular/localize": "^14.2.12",
"@angular/platform-browser": "^14.2.12",
"@angular/platform-browser-dynamic": "^14.2.12",
"@angular/platform-server": "^14.2.12",
"@angular/router": "^14.2.12",
"@fortawesome/angular-fontawesome": "~0.11.1",
"@fortawesome/fontawesome-common-types": "~6.2.1",
"@fortawesome/fontawesome-svg-core": "~6.2.1",
"@fortawesome/free-solid-svg-icons": "~6.2.1",
"@angular-devkit/build-angular": "~13.3.7",
"@angular/animations": "~13.3.10",
"@angular/cli": "~13.3.7",
"@angular/common": "~13.3.10",
"@angular/compiler": "~13.3.10",
"@angular/core": "~13.3.10",
"@angular/forms": "~13.3.10",
"@angular/localize": "~13.3.10",
"@angular/platform-browser": "~13.3.10",
"@angular/platform-browser-dynamic": "~13.3.10",
"@angular/platform-server": "~13.3.10",
"@angular/router": "~13.3.10",
"@fortawesome/angular-fontawesome": "~0.10.2",
"@fortawesome/fontawesome-common-types": "~6.1.1",
"@fortawesome/fontawesome-svg-core": "~6.1.1",
"@fortawesome/free-solid-svg-icons": "~6.1.1",
"@mempool/mempool.js": "2.3.0",
"@ng-bootstrap/ng-bootstrap": "^13.1.1",
"@types/qrcode": "~1.5.0",
"bootstrap": "~4.6.1",
"@ng-bootstrap/ng-bootstrap": "^11.0.0",
"@nguniversal/express-engine": "~13.1.1",
"@types/qrcode": "~1.4.2",
"bootstrap": "~4.5.0",
"browserify": "^17.0.0",
"clipboard": "^2.0.11",
"clipboard": "^2.0.10",
"domino": "^2.1.6",
"echarts": "~5.4.0",
"echarts": "~5.3.2",
"echarts-gl": "^2.0.9",
"express": "^4.17.1",
"lightweight-charts": "~3.8.0",
"ngx-echarts": "~14.0.0",
"ngx-infinite-scroll": "^14.0.1",
"qrcode": "1.5.1",
"rxjs": "~7.5.7",
"tinyify": "^3.1.0",
"ngx-echarts": "8.0.1",
"ngx-infinite-scroll": "^10.0.1",
"qrcode": "1.5.0",
"rxjs": "~7.5.5",
"tinyify": "^3.0.0",
"tlite": "^0.1.9",
"tslib": "~2.4.1",
"zone.js": "~0.12.0"
"tslib": "~2.4.0",
"zone.js": "~0.11.5"
},
"devDependencies": {
"@angular/compiler-cli": "^14.2.12",
"@angular/language-service": "^14.2.12",
"@types/node": "^18.11.9",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"eslint": "^8.28.0",
"@angular/compiler-cli": "~13.3.10",
"@angular/language-service": "~13.3.10",
"@nguniversal/builders": "~13.1.1",
"@types/express": "^4.17.0",
"@types/node": "^12.11.1",
"@typescript-eslint/eslint-plugin": "^5.30.5",
"@typescript-eslint/parser": "^5.30.5",
"eslint": "^8.19.0",
"http-proxy-middleware": "~2.0.6",
"prettier": "^2.8.0",
"ts-node": "~10.9.1",
"prettier": "^2.7.1",
"ts-node": "~10.8.1",
"typescript": "~4.6.4"
},
"optionalDependencies": {
"@cypress/schematic": "~2.3.0",
"cypress": "^11.2.0",
"cypress-fail-on-console-error": "~4.0.2",
"@cypress/schematic": "~2.0.0",
"cypress": "^10.3.0",
"cypress-fail-on-console-error": "~3.0.0",
"cypress-wait-until": "^1.7.2",
"mock-socket": "~9.1.5",
"mock-socket": "~9.1.4",
"start-server-and-test": "~1.14.0"
},
"scarfSettings": {

View File

@@ -1,137 +0,0 @@
const fs = require('fs');
const FRONTEND_CONFIG_FILE_NAME = 'mempool-frontend-config.json';
let configContent;
// Read frontend config
try {
const rawConfig = fs.readFileSync(FRONTEND_CONFIG_FILE_NAME);
configContent = JSON.parse(rawConfig);
console.log(`${FRONTEND_CONFIG_FILE_NAME} file found, using provided config`);
} catch (e) {
console.log(e);
if (e.code !== 'ENOENT') {
throw new Error(e);
} else {
console.log(`${FRONTEND_CONFIG_FILE_NAME} file not found, using default config`);
}
}
let PROXY_CONFIG = [];
if (configContent && configContent.BASE_MODULE === 'liquid') {
PROXY_CONFIG.push(...[
{
context: ['/liquid/api/v1/**'],
target: `http://127.0.0.1:8999`,
secure: false,
ws: true,
changeOrigin: true,
proxyTimeout: 30000,
pathRewrite: {
"^/liquid": ""
},
},
{
context: ['/liquid/api/**'],
target: `http://127.0.0.1:3000`,
secure: false,
changeOrigin: true,
proxyTimeout: 30000,
pathRewrite: {
"^/liquid/api/": ""
},
},
{
context: ['/liquidtestnet/api/v1/**'],
target: `http://127.0.0.1:8999`,
secure: false,
ws: true,
changeOrigin: true,
proxyTimeout: 30000,
pathRewrite: {
"^/liquidtestnet": ""
},
},
{
context: ['/liquidtestnet/api/**'],
target: `http://127.0.0.1:3000`,
secure: false,
changeOrigin: true,
proxyTimeout: 30000,
pathRewrite: {
"^/liquidtestnet/api/": "/"
},
},
]);
}
if (configContent && configContent.BASE_MODULE === 'bisq') {
PROXY_CONFIG.push(...[
{
context: ['/bisq/api/v1/ws'],
target: `http://127.0.0.1:8999`,
secure: false,
ws: true,
changeOrigin: true,
proxyTimeout: 30000,
pathRewrite: {
"^/bisq": ""
},
},
{
context: ['/bisq/api/v1/**'],
target: `http://127.0.0.1:8999`,
secure: false,
changeOrigin: true,
proxyTimeout: 30000,
},
{
context: ['/bisq/api/**'],
target: `http://127.0.0.1:8999`,
secure: false,
changeOrigin: true,
proxyTimeout: 30000,
pathRewrite: {
"^/bisq/api/": "/api/v1/bisq/"
},
}
]);
}
PROXY_CONFIG.push(...[
{
context: ['/testnet/api/v1/lightning/**'],
target: `http://127.0.0.1:8999`,
secure: false,
changeOrigin: true,
proxyTimeout: 30000,
pathRewrite: {
"^/testnet": ""
},
},
{
context: ['/api/v1/**'],
target: `http://127.0.0.1:8999`,
secure: false,
ws: true,
changeOrigin: true,
proxyTimeout: 30000,
},
{
context: ['/api/**'],
target: `http://127.0.0.1:3000`,
secure: false,
changeOrigin: true,
proxyTimeout: 30000,
pathRewrite: {
"^/api": ""
},
}
]);
console.log(PROXY_CONFIG);
module.exports = PROXY_CONFIG;

View File

@@ -3,9 +3,9 @@ const fs = require('fs');
let PROXY_CONFIG = require('./proxy.conf');
PROXY_CONFIG.forEach(entry => {
entry.target = entry.target.replace("mempool.space", "mempool-staging.tk7.mempool.space");
entry.target = entry.target.replace("liquid.network", "liquid-staging.tk7.mempool.space");
entry.target = entry.target.replace("bisq.markets", "bisq-staging.fra.mempool.space");
entry.target = entry.target.replace("mempool.space", "mempool.ninja");
entry.target = entry.target.replace("liquid.network", "liquid.place");
entry.target = entry.target.replace("bisq.markets", "bisq.ninja");
});
module.exports = PROXY_CONFIG;

96
frontend/server.run.ts Normal file
View File

@@ -0,0 +1,96 @@
import 'zone.js/node';
import './generated-config';
import * as domino from 'domino';
import * as express from 'express';
import * as fs from 'fs';
import * as path from 'path';
const {readFileSync, existsSync} = require('fs');
const {createProxyMiddleware} = require('http-proxy-middleware');
const template = fs.readFileSync(path.join(process.cwd(), 'dist/mempool/browser/en-US/', 'index.html')).toString();
const win = domino.createWindow(template);
// @ts-ignore
win.__env = global.__env;
// @ts-ignore
win.matchMedia = () => {
return {
matches: true
};
};
// @ts-ignore
win.setTimeout = (fn) => { fn(); };
win.document.body.scrollTo = (() => {});
// @ts-ignore
global['window'] = win;
global['document'] = win.document;
// @ts-ignore
global['history'] = { state: { } };
global['localStorage'] = {
getItem: () => '',
setItem: () => {},
removeItem: () => {},
clear: () => {},
length: 0,
key: () => '',
};
/**
* Return the list of supported and actually active locales
*/
function getActiveLocales() {
const angularConfig = JSON.parse(readFileSync('angular.json', 'utf8'));
const supportedLocales = [
angularConfig.projects.mempool.i18n.sourceLocale,
...Object.keys(angularConfig.projects.mempool.i18n.locales),
];
return supportedLocales.filter(locale => existsSync(`./dist/mempool/server/${locale}`));
}
function app() {
const server = express();
// proxy API to nginx
server.get('/api/**', createProxyMiddleware({
// @ts-ignore
target: win.__env.NGINX_PROTOCOL + '://' + win.__env.NGINX_HOSTNAME + ':' + win.__env.NGINX_PORT,
changeOrigin: true,
}));
// map / and /en to en-US
const defaultLocale = 'en-US';
console.log(`serving default locale: ${defaultLocale}`);
const appServerModule = require(`./dist/mempool/server/${defaultLocale}/main.js`);
server.use('/', appServerModule.app(defaultLocale));
server.use('/en', appServerModule.app(defaultLocale));
// map each locale to its localized main.js
getActiveLocales().forEach(locale => {
console.log('serving locale:', locale);
const appServerModule = require(`./dist/mempool/server/${locale}/main.js`);
// map everything to itself
server.use(`/${locale}`, appServerModule.app(locale));
});
return server;
}
function run() {
const port = process.env.PORT || 4000;
// Start up the Node server
app().listen(port, () => {
console.log(`Node Express server listening on port ${port}`);
});
}
run();

160
frontend/server.ts Normal file
View File

@@ -0,0 +1,160 @@
import 'zone.js/node';
import './generated-config';
import { ngExpressEngine } from '@nguniversal/express-engine';
import * as express from 'express';
import * as fs from 'fs';
import * as path from 'path';
import * as domino from 'domino';
import { join } from 'path';
import { AppServerModule } from './src/main.server';
import { APP_BASE_HREF } from '@angular/common';
import { existsSync } from 'fs';
const template = fs.readFileSync(path.join(process.cwd(), 'dist/mempool/browser/en-US/', 'index.html')).toString();
const win = domino.createWindow(template);
// @ts-ignore
win.__env = global.__env;
// @ts-ignore
win.matchMedia = () => {
return {
matches: true
};
};
// @ts-ignore
win.setTimeout = (fn) => { fn(); };
win.document.body.scrollTo = (() => {});
// @ts-ignore
global['window'] = win;
global['document'] = win.document;
// @ts-ignore
global['history'] = { state: { } };
global['localStorage'] = {
getItem: () => '',
setItem: () => {},
removeItem: () => {},
clear: () => {},
length: 0,
key: () => '',
};
// The Express app is exported so that it can be used by serverless Functions.
export function app(locale: string): express.Express {
const server = express();
const distFolder = join(process.cwd(), `dist/mempool/browser/${locale}`);
const indexHtml = existsSync(join(distFolder, 'index.original.html')) ? 'index.original.html' : 'index';
// Our Universal express-engine (found @ https://github.com/angular/universal/tree/master/modules/express-engine)
server.engine('html', ngExpressEngine({
bootstrap: AppServerModule,
}));
server.set('view engine', 'html');
server.set('views', distFolder);
// only handle URLs that actually exist
//server.get(locale, getLocalizedSSR(indexHtml));
server.get('/', getLocalizedSSR(indexHtml));
server.get('/tx/*', getLocalizedSSR(indexHtml));
server.get('/block/*', getLocalizedSSR(indexHtml));
server.get('/mempool-block/*', getLocalizedSSR(indexHtml));
server.get('/address/*', getLocalizedSSR(indexHtml));
server.get('/blocks', getLocalizedSSR(indexHtml));
server.get('/mining/pools', getLocalizedSSR(indexHtml));
server.get('/mining/pool/*', getLocalizedSSR(indexHtml));
server.get('/graphs', getLocalizedSSR(indexHtml));
server.get('/liquid', getLocalizedSSR(indexHtml));
server.get('/liquid/tx/*', getLocalizedSSR(indexHtml));
server.get('/liquid/block/*', getLocalizedSSR(indexHtml));
server.get('/liquid/mempool-block/*', getLocalizedSSR(indexHtml));
server.get('/liquid/address/*', getLocalizedSSR(indexHtml));
server.get('/liquid/asset/*', getLocalizedSSR(indexHtml));
server.get('/liquid/blocks', getLocalizedSSR(indexHtml));
server.get('/liquid/graphs', getLocalizedSSR(indexHtml));
server.get('/liquid/assets', getLocalizedSSR(indexHtml));
server.get('/liquid/api', getLocalizedSSR(indexHtml));
server.get('/liquid/tv', getLocalizedSSR(indexHtml));
server.get('/liquid/status', getLocalizedSSR(indexHtml));
server.get('/liquid/about', getLocalizedSSR(indexHtml));
server.get('/testnet', getLocalizedSSR(indexHtml));
server.get('/testnet/tx/*', getLocalizedSSR(indexHtml));
server.get('/testnet/block/*', getLocalizedSSR(indexHtml));
server.get('/testnet/mempool-block/*', getLocalizedSSR(indexHtml));
server.get('/testnet/address/*', getLocalizedSSR(indexHtml));
server.get('/testnet/blocks', getLocalizedSSR(indexHtml));
server.get('/testnet/mining/pools', getLocalizedSSR(indexHtml));
server.get('/testnet/graphs', getLocalizedSSR(indexHtml));
server.get('/testnet/api', getLocalizedSSR(indexHtml));
server.get('/testnet/tv', getLocalizedSSR(indexHtml));
server.get('/testnet/status', getLocalizedSSR(indexHtml));
server.get('/testnet/about', getLocalizedSSR(indexHtml));
server.get('/signet', getLocalizedSSR(indexHtml));
server.get('/signet/tx/*', getLocalizedSSR(indexHtml));
server.get('/signet/block/*', getLocalizedSSR(indexHtml));
server.get('/signet/mempool-block/*', getLocalizedSSR(indexHtml));
server.get('/signet/address/*', getLocalizedSSR(indexHtml));
server.get('/signet/blocks', getLocalizedSSR(indexHtml));
server.get('/signet/mining/pools', getLocalizedSSR(indexHtml));
server.get('/signet/graphs', getLocalizedSSR(indexHtml));
server.get('/signet/api', getLocalizedSSR(indexHtml));
server.get('/signet/tv', getLocalizedSSR(indexHtml));
server.get('/signet/status', getLocalizedSSR(indexHtml));
server.get('/signet/about', getLocalizedSSR(indexHtml));
server.get('/bisq', getLocalizedSSR(indexHtml));
server.get('/bisq/tx/*', getLocalizedSSR(indexHtml));
server.get('/bisq/blocks', getLocalizedSSR(indexHtml));
server.get('/bisq/block/*', getLocalizedSSR(indexHtml));
server.get('/bisq/address/*', getLocalizedSSR(indexHtml));
server.get('/bisq/stats', getLocalizedSSR(indexHtml));
server.get('/bisq/about', getLocalizedSSR(indexHtml));
server.get('/bisq/api', getLocalizedSSR(indexHtml));
server.get('/about', getLocalizedSSR(indexHtml));
server.get('/api', getLocalizedSSR(indexHtml));
server.get('/tv', getLocalizedSSR(indexHtml));
server.get('/status', getLocalizedSSR(indexHtml));
server.get('/terms-of-service', getLocalizedSSR(indexHtml));
// fallback to static file handler so we send HTTP 404 to nginx
server.get('/**', express.static(distFolder, { maxAge: '1y' }));
return server;
}
function getLocalizedSSR(indexHtml) {
return (req, res) => {
res.render(indexHtml, {
req,
providers: [
{ provide: APP_BASE_HREF, useValue: req.baseUrl }
]
});
}
}
// only used for development mode
function run(): void {
const port = process.env.PORT || 4000;
// Start up the Node server
const server = app('en-US');
server.listen(port, () => {
console.log(`Node Express server listening on port ${port}`);
});
}
// Webpack will replace 'require' with '__webpack_require__'
// '__non_webpack_require__' is a proxy to Node 'require'
// The below code is to ensure that the server is run only when not requiring the bundle.
declare const __non_webpack_require__: NodeRequire;
const mainModule = __non_webpack_require__.main;
const moduleFilename = mainModule && mainModule.filename || '';
if (moduleFilename === __filename || moduleFilename.includes('iisnode')) {
run();
}
export * from './src/main.server';

View File

@@ -1,17 +1,21 @@
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { AppPreloadingStrategy } from './app.preloading-strategy'
import { Routes, RouterModule, PreloadAllModules } from '@angular/router';
import { StartComponent } from './components/start/start.component';
import { TransactionComponent } from './components/transaction/transaction.component';
import { BlockComponent } from './components/block/block.component';
import { BlockAuditComponent } from './components/block-audit/block-audit.component';
import { BlockPreviewComponent } from './components/block/block-preview.component';
import { AddressComponent } from './components/address/address.component';
import { AddressPreviewComponent } from './components/address/address-preview.component';
import { MasterPageComponent } from './components/master-page/master-page.component';
import { MasterPagePreviewComponent } from './components/master-page-preview/master-page-preview.component';
import { AboutComponent } from './components/about/about.component';
import { StatusViewComponent } from './components/status-view/status-view.component';
import { TermsOfServiceComponent } from './components/terms-of-service/terms-of-service.component';
import { PrivacyPolicyComponent } from './components/privacy-policy/privacy-policy.component';
import { TrademarkPolicyComponent } from './components/trademark-policy/trademark-policy.component';
import { BisqMasterPageComponent } from './components/bisq-master-page/bisq-master-page.component';
import { SponsorComponent } from './components/sponsor/sponsor.component';
import { PushTransactionComponent } from './components/push-transaction/push-transaction.component';
import { BlocksList } from './components/blocks-list/blocks-list.component';
import { LiquidMasterPageComponent } from './components/liquid-master-page/liquid-master-page.component';
@@ -21,10 +25,6 @@ import { AssetsComponent } from './components/assets/assets.component';
import { AssetComponent } from './components/asset/asset.component';
import { AssetsNavComponent } from './components/assets/assets-nav/assets-nav.component';
const browserWindow = window || {};
// @ts-ignore
const browserWindowEnv = browserWindow.__env || {};
let routes: Routes = [
{
path: 'testnet',
@@ -32,8 +32,7 @@ let routes: Routes = [
{
path: '',
pathMatch: 'full',
loadChildren: () => import('./graphs/graphs.module').then(m => m.GraphsModule),
data: { preload: true },
loadChildren: () => import('./graphs/graphs.module').then(m => m.GraphsModule)
},
{
path: '',
@@ -73,14 +72,12 @@ let routes: Routes = [
children: [],
component: AddressComponent,
data: {
ogImage: true,
networkSpecific: true,
ogImage: true
}
},
{
path: 'tx',
component: StartComponent,
data: { networkSpecific: true },
children: [
{
path: ':id',
@@ -91,7 +88,6 @@ let routes: Routes = [
{
path: 'block',
component: StartComponent,
data: { networkSpecific: true },
children: [
{
path: ':id',
@@ -102,10 +98,18 @@ let routes: Routes = [
},
],
},
{
path: 'block-audit',
children: [
{
path: ':id',
component: BlockAuditComponent,
},
],
},
{
path: 'docs',
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule),
data: { preload: true },
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
},
{
path: 'api',
@@ -113,14 +117,12 @@ let routes: Routes = [
},
{
path: 'lightning',
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule),
data: { preload: browserWindowEnv && browserWindowEnv.LIGHTNING === true, networks: ['bitcoin'] },
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
},
],
},
{
path: 'status',
data: { networks: ['bitcoin', 'liquid'] },
component: StatusViewComponent
},
{
@@ -179,13 +181,11 @@ let routes: Routes = [
children: [],
component: AddressComponent,
data: {
ogImage: true,
networkSpecific: true,
ogImage: true
}
},
{
path: 'tx',
data: { networkSpecific: true },
component: StartComponent,
children: [
{
@@ -196,7 +196,6 @@ let routes: Routes = [
},
{
path: 'block',
data: { networkSpecific: true },
component: StartComponent,
children: [
{
@@ -208,6 +207,15 @@ let routes: Routes = [
},
],
},
{
path: 'block-audit',
children: [
{
path: ':id',
component: BlockAuditComponent,
},
],
},
{
path: 'docs',
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
@@ -218,14 +226,12 @@ let routes: Routes = [
},
{
path: 'lightning',
data: { networks: ['bitcoin'] },
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
},
],
},
{
path: 'status',
data: { networks: ['bitcoin', 'liquid'] },
component: StatusViewComponent
},
{
@@ -281,13 +287,11 @@ let routes: Routes = [
children: [],
component: AddressComponent,
data: {
ogImage: true,
networkSpecific: true,
ogImage: true
}
},
{
path: 'tx',
data: { networkSpecific: true },
component: StartComponent,
children: [
{
@@ -298,7 +302,6 @@ let routes: Routes = [
},
{
path: 'block',
data: { networkSpecific: true },
component: StartComponent,
children: [
{
@@ -310,6 +313,15 @@ let routes: Routes = [
},
],
},
{
path: 'block-audit',
children: [
{
path: ':id',
component: BlockAuditComponent
},
],
},
{
path: 'docs',
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
@@ -320,33 +332,51 @@ let routes: Routes = [
},
{
path: 'lightning',
data: { networks: ['bitcoin'] },
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
},
],
},
{
path: 'preview',
component: MasterPagePreviewComponent,
children: [
{
path: '',
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
path: 'block/:id',
component: BlockPreviewComponent
},
{
path: 'testnet',
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
path: 'testnet/block/:id',
component: BlockPreviewComponent
},
{
path: 'signet',
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
path: 'signet/block/:id',
component: BlockPreviewComponent
},
{
path: 'address/:id',
children: [],
component: AddressPreviewComponent
},
{
path: 'testnet/address/:id',
children: [],
component: AddressPreviewComponent
},
{
path: 'signet/address/:id',
children: [],
component: AddressPreviewComponent
},
],
},
{
path: 'status',
data: { networks: ['bitcoin', 'liquid'] },
component: StatusViewComponent
},
{
path: 'sponsor',
component: SponsorComponent,
},
{
path: '',
loadChildren: () => import('./graphs/graphs.module').then(m => m.GraphsModule)
@@ -357,6 +387,10 @@ let routes: Routes = [
},
];
const browserWindow = window || {};
// @ts-ignore
const browserWindowEnv = browserWindow.__env || {};
if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'bisq') {
routes = [{
path: '',
@@ -408,13 +442,11 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
children: [],
component: AddressComponent,
data: {
ogImage: true,
networkSpecific: true,
ogImage: true
}
},
{
path: 'tx',
data: { networkSpecific: true },
component: StartComponent,
children: [
{
@@ -425,7 +457,6 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
},
{
path: 'block',
data: { networkSpecific: true },
component: StartComponent,
children: [
{
@@ -439,22 +470,18 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
},
{
path: 'assets',
data: { networks: ['liquid'] },
component: AssetsNavComponent,
children: [
{
path: 'all',
data: { networks: ['liquid'] },
component: AssetsComponent,
},
{
path: 'asset/:id',
data: { networkSpecific: true },
component: AssetComponent
},
{
path: 'group/:id',
data: { networkSpecific: true },
component: AssetGroupComponent
},
{
@@ -475,7 +502,6 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
},
{
path: 'status',
data: { networks: ['bitcoin', 'liquid'] },
component: StatusViewComponent
},
{
@@ -526,13 +552,11 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
children: [],
component: AddressComponent,
data: {
ogImage: true,
networkSpecific: true,
ogImage: true
}
},
{
path: 'tx',
data: { networkSpecific: true },
component: StartComponent,
children: [
{
@@ -543,7 +567,6 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
},
{
path: 'block',
data: { networkSpecific: true },
component: StartComponent,
children: [
{
@@ -557,27 +580,22 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
},
{
path: 'assets',
data: { networks: ['liquid'] },
component: AssetsNavComponent,
children: [
{
path: 'featured',
data: { networkSpecific: true },
component: AssetsFeaturedComponent,
},
{
path: 'all',
data: { networks: ['liquid'] },
component: AssetsComponent,
},
{
path: 'asset/:id',
data: { networkSpecific: true },
component: AssetComponent
},
{
path: 'group/:id',
data: { networkSpecific: true },
component: AssetGroupComponent
},
{
@@ -598,22 +616,36 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
},
{
path: 'preview',
component: MasterPagePreviewComponent,
children: [
{
path: '',
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
path: 'block/:id',
component: BlockPreviewComponent
},
{
path: 'testnet',
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
path: 'testnet/block/:id',
component: BlockPreviewComponent
},
{
path: 'address/:id',
children: [],
component: AddressPreviewComponent
},
{
path: 'testnet/address/:id',
children: [],
component: AddressPreviewComponent
},
],
},
{
path: 'status',
data: { networks: ['bitcoin', 'liquid']},
component: StatusViewComponent
},
{
path: 'sponsor',
component: SponsorComponent,
},
{
path: '',
loadChildren: () => import('./graphs/graphs.module').then(m => m.GraphsModule)
@@ -627,10 +659,10 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
@NgModule({
imports: [RouterModule.forRoot(routes, {
initialNavigation: 'enabledBlocking',
initialNavigation: 'enabled',
scrollPositionRestoration: 'enabled',
anchorScrolling: 'enabled',
preloadingStrategy: AppPreloadingStrategy
preloadingStrategy: PreloadAllModules
})],
})
export class AppRoutingModule { }

View File

@@ -79,7 +79,7 @@ export const poolsColor = {
'binancepool': '#1E88E5',
'viabtc': '#039BE5',
'btccom': '#00897B',
'braiinspool': '#00ACC1',
'slushpool': '#00ACC1',
'sbicrypto': '#43A047',
'marapool': '#7CB342',
'luxor': '#C0CA33',

View File

@@ -1,5 +1,5 @@
import { BrowserModule, BrowserTransferStateModule } from '@angular/platform-browser';
import { ModuleWithProviders, NgModule } from '@angular/core';
import { NgModule } from '@angular/core';
import { HttpClientModule, HTTP_INTERCEPTORS } from '@angular/common/http';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { AppRoutingModule } from './app-routing.module';
@@ -18,24 +18,6 @@ import { LanguageService } from './services/language.service';
import { FiatShortenerPipe } from './shared/pipes/fiat-shortener.pipe';
import { ShortenStringPipe } from './shared/pipes/shorten-string-pipe/shorten-string.pipe';
import { CapAddressPipe } from './shared/pipes/cap-address-pipe/cap-address-pipe';
import { AppPreloadingStrategy } from './app.preloading-strategy';
const providers = [
ElectrsApiService,
StateService,
WebsocketService,
AudioService,
SeoService,
OpenGraphService,
StorageService,
EnterpriseService,
LanguageService,
ShortenStringPipe,
FiatShortenerPipe,
CapAddressPipe,
AppPreloadingStrategy,
{ provide: HTTP_INTERCEPTORS, useClass: HttpCacheInterceptor, multi: true }
];
@NgModule({
declarations: [
@@ -49,17 +31,21 @@ const providers = [
BrowserAnimationsModule,
SharedModule,
],
providers: providers,
providers: [
ElectrsApiService,
StateService,
WebsocketService,
AudioService,
SeoService,
OpenGraphService,
StorageService,
EnterpriseService,
LanguageService,
ShortenStringPipe,
FiatShortenerPipe,
CapAddressPipe,
{ provide: HTTP_INTERCEPTORS, useClass: HttpCacheInterceptor, multi: true }
],
bootstrap: [AppComponent]
})
export class AppModule { }
@NgModule({})
export class MempoolSharedModule{
static forRoot(): ModuleWithProviders<MempoolSharedModule> {
return {
ngModule: AppModule,
providers: providers
};
}
}

View File

@@ -1,10 +0,0 @@
import { PreloadingStrategy, Route } from '@angular/router';
import { Observable, timer, mergeMap, of } from 'rxjs';
export class AppPreloadingStrategy implements PreloadingStrategy {
preload(route: Route, load: Function): Observable<any> {
return route.data && route.data.preload
? timer(1500).pipe(mergeMap(() => load()))
: of(null);
}
}

View File

@@ -1,11 +1,11 @@
import { Component, OnInit, OnDestroy } from '@angular/core';
import { SeoService } from '../../services/seo.service';
import { SeoService } from 'src/app/services/seo.service';
import { switchMap, filter, catchError } from 'rxjs/operators';
import { ParamMap, ActivatedRoute } from '@angular/router';
import { Subscription, of } from 'rxjs';
import { BisqTransaction } from '../bisq.interfaces';
import { BisqApiService } from '../bisq-api.service';
import { WebsocketService } from '../../services/websocket.service';
import { WebsocketService } from 'src/app/services/websocket.service';
@Component({
selector: 'app-bisq-address',

View File

@@ -1,14 +1,14 @@
import { Component, OnInit, OnDestroy } from '@angular/core';
import { BisqBlock } from '../../bisq/bisq.interfaces';
import { BisqBlock } from 'src/app/bisq/bisq.interfaces';
import { Location } from '@angular/common';
import { BisqApiService } from '../bisq-api.service';
import { ActivatedRoute, ParamMap, Router } from '@angular/router';
import { Subscription, of } from 'rxjs';
import { switchMap, catchError } from 'rxjs/operators';
import { SeoService } from '../../services/seo.service';
import { ElectrsApiService } from '../../services/electrs-api.service';
import { SeoService } from 'src/app/services/seo.service';
import { ElectrsApiService } from 'src/app/services/electrs-api.service';
import { HttpErrorResponse } from '@angular/common/http';
import { WebsocketService } from '../../services/websocket.service';
import { WebsocketService } from 'src/app/services/websocket.service';
@Component({
selector: 'app-bisq-block',

View File

@@ -3,9 +3,9 @@ import { BisqApiService } from '../bisq-api.service';
import { switchMap, map, take, mergeMap, tap } from 'rxjs/operators';
import { Observable } from 'rxjs';
import { BisqBlock, BisqOutput, BisqTransaction } from '../bisq.interfaces';
import { SeoService } from '../../services/seo.service';
import { SeoService } from 'src/app/services/seo.service';
import { ActivatedRoute, Router } from '@angular/router';
import { WebsocketService } from '../../services/websocket.service';
import { WebsocketService } from 'src/app/services/websocket.service';
@Component({
selector: 'app-bisq-blocks',

View File

@@ -1,9 +1,9 @@
import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core';
import { Observable, combineLatest, BehaviorSubject, of } from 'rxjs';
import { map, share, switchMap } from 'rxjs/operators';
import { SeoService } from '../../services/seo.service';
import { StateService } from '../../services/state.service';
import { WebsocketService } from '../../services/websocket.service';
import { SeoService } from 'src/app/services/seo.service';
import { StateService } from 'src/app/services/state.service';
import { WebsocketService } from 'src/app/services/websocket.service';
import { BisqApiService } from '../bisq-api.service';
import { Trade } from '../bisq.interfaces';

View File

@@ -1,9 +1,9 @@
import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core';
import { Observable, combineLatest, BehaviorSubject, of } from 'rxjs';
import { map, share, switchMap } from 'rxjs/operators';
import { SeoService } from '../../services/seo.service';
import { StateService } from '../../services/state.service';
import { WebsocketService } from '../../services/websocket.service';
import { SeoService } from 'src/app/services/seo.service';
import { StateService } from 'src/app/services/state.service';
import { WebsocketService } from 'src/app/services/websocket.service';
import { BisqApiService } from '../bisq-api.service';
import { Trade } from '../bisq.interfaces';

View File

@@ -10,27 +10,27 @@
</div>
<form [formGroup]="radioGroupForm" class="mb-3 radio-form">
<div class="btn-group btn-group-toggle" name="radioBasic">
<label class="btn btn-primary btn-sm" [class.active]="radioGroupForm.get('interval').value === 'half_hour'">
<input type="radio" [value]="'half_hour'" (click)="setFragment('half_hour')" formControlName="interval"> 30M
<div class="btn-group btn-group-toggle" ngbRadioGroup name="radioBasic" formControlName="interval">
<label ngbButtonLabel class="btn-primary btn-sm">
<input ngbButton type="radio" [value]="'half_hour'" (click)="setFragment('half_hour')"> 30M
</label>
<label class="btn btn-primary btn-sm" [class.active]="radioGroupForm.get('interval').value === 'hour'">
<input type="radio" [value]="'hour'" (click)="setFragment('hour')" formControlName="interval"> 1H
<label ngbButtonLabel class="btn-primary btn-sm">
<input ngbButton type="radio" [value]="'hour'" (click)="setFragment('hour')"> 1H
</label>
<label class="btn btn-primary btn-sm" [class.active]="radioGroupForm.get('interval').value === 'half_day'">
<input type="radio" [value]="'half_day'" (click)="setFragment('half_day')" formControlName="interval"> 12H
<label ngbButtonLabel class="btn-primary btn-sm">
<input ngbButton type="radio" [value]="'half_day'" (click)="setFragment('half_day')"> 12H
</label>
<label class="btn btn-primary btn-sm" [class.active]="radioGroupForm.get('interval').value === 'day'">
<input type="radio" [value]="'day'" (click)="setFragment('day')" formControlName="interval"> 1D
<label ngbButtonLabel class="btn-primary btn-sm">
<input ngbButton type="radio" [value]="'day'" (click)="setFragment('day')"> 1D
</label>
<label class="btn btn-primary btn-sm" [class.active]="radioGroupForm.get('interval').value === 'week'">
<input type="radio" [value]="'week'" (click)="setFragment('week')" formControlName="interval"> 1W
<label ngbButtonLabel class="btn-primary btn-sm">
<input ngbButton type="radio" [value]="'week'" (click)="setFragment('week')"> 1W
</label>
<label class="btn btn-primary btn-sm" [class.active]="radioGroupForm.get('interval').value === 'month'">
<input type="radio" [value]="'month'" (click)="setFragment('month')" formControlName="interval"> 1M
<label ngbButtonLabel class="btn-primary btn-sm">
<input ngbButton type="radio" [value]="'month'" (click)="setFragment('month')"> 1M
</label>
<label class="btn btn-primary btn-sm" [class.active]="radioGroupForm.get('interval').value === 'year'">
<input type="radio" [value]="'year'" (click)="setFragment('year')" formControlName="interval"> 1Y
<label ngbButtonLabel class="btn-primary btn-sm">
<input ngbButton type="radio" [value]="'year'" (click)="setFragment('year')"> 1Y
</label>
</div>
</form>

View File

@@ -1,10 +1,10 @@
import { ChangeDetectionStrategy, Component, OnDestroy, OnInit } from '@angular/core';
import { UntypedFormBuilder, UntypedFormGroup } from '@angular/forms';
import { FormBuilder, FormGroup } from '@angular/forms';
import { ActivatedRoute, Router } from '@angular/router';
import { combineLatest, merge, Observable, of } from 'rxjs';
import { map, switchMap } from 'rxjs/operators';
import { SeoService } from '../../services/seo.service';
import { WebsocketService } from '../../services/websocket.service';
import { SeoService } from 'src/app/services/seo.service';
import { WebsocketService } from 'src/app/services/websocket.service';
import { BisqApiService } from '../bisq-api.service';
import { OffersMarket, Trade } from '../bisq.interfaces';
@@ -19,7 +19,7 @@ export class BisqMarketComponent implements OnInit, OnDestroy {
currency$: Observable<any>;
offers$: Observable<OffersMarket>;
trades$: Observable<Trade[]>;
radioGroupForm: UntypedFormGroup;
radioGroupForm: FormGroup;
defaultInterval = 'day';
isLoadingGraph = false;
@@ -28,7 +28,7 @@ export class BisqMarketComponent implements OnInit, OnDestroy {
private websocketService: WebsocketService,
private route: ActivatedRoute,
private bisqApiService: BisqApiService,
private formBuilder: UntypedFormBuilder,
private formBuilder: FormBuilder,
private seoService: SeoService,
private router: Router,
) { }

View File

@@ -1,9 +1,9 @@
import { Component, OnInit } from '@angular/core';
import { BisqApiService } from '../bisq-api.service';
import { BisqStats } from '../bisq.interfaces';
import { SeoService } from '../../services/seo.service';
import { StateService } from '../../services/state.service';
import { WebsocketService } from '../../services/websocket.service';
import { SeoService } from 'src/app/services/seo.service';
import { StateService } from 'src/app/services/state.service';
import { WebsocketService } from 'src/app/services/websocket.service';
@Component({
selector: 'app-bisq-stats',

View File

@@ -1,5 +1,5 @@
import { Component, ChangeDetectionStrategy, Input, OnChanges } from '@angular/core';
import { BisqTransaction } from '../../bisq/bisq.interfaces';
import { BisqTransaction } from 'src/app/bisq/bisq.interfaces';
@Component({
selector: 'app-bisq-transaction-details',

View File

@@ -1,15 +1,15 @@
import { Component, OnInit, OnDestroy } from '@angular/core';
import { ActivatedRoute, ParamMap, Router } from '@angular/router';
import { BisqTransaction } from '../../bisq/bisq.interfaces';
import { BisqTransaction } from 'src/app/bisq/bisq.interfaces';
import { switchMap, map, catchError } from 'rxjs/operators';
import { of, Observable, Subscription } from 'rxjs';
import { StateService } from '../../services/state.service';
import { Block, Transaction } from '../../interfaces/electrs.interface';
import { StateService } from 'src/app/services/state.service';
import { Block, Transaction } from 'src/app/interfaces/electrs.interface';
import { BisqApiService } from '../bisq-api.service';
import { SeoService } from '../../services/seo.service';
import { ElectrsApiService } from '../../services/electrs-api.service';
import { SeoService } from 'src/app/services/seo.service';
import { ElectrsApiService } from 'src/app/services/electrs-api.service';
import { HttpErrorResponse } from '@angular/common/http';
import { WebsocketService } from '../../services/websocket.service';
import { WebsocketService } from 'src/app/services/websocket.service';
@Component({
selector: 'app-bisq-transaction',

View File

@@ -4,11 +4,11 @@ import { BisqTransaction, BisqOutput } from '../bisq.interfaces';
import { Observable, Subscription } from 'rxjs';
import { switchMap, map, tap } from 'rxjs/operators';
import { BisqApiService } from '../bisq-api.service';
import { SeoService } from '../../services/seo.service';
import { UntypedFormGroup, UntypedFormBuilder } from '@angular/forms';
import { SeoService } from 'src/app/services/seo.service';
import { FormGroup, FormBuilder } from '@angular/forms';
import { Router, ActivatedRoute } from '@angular/router';
import { IMultiSelectOption, IMultiSelectSettings, IMultiSelectTexts } from '../../components/ngx-bootstrap-multiselect/types'
import { WebsocketService } from '../../services/websocket.service';
import { IMultiSelectOption, IMultiSelectSettings, IMultiSelectTexts } from 'src/app/components/ngx-bootstrap-multiselect/types'
import { WebsocketService } from 'src/app/services/websocket.service';
@Component({
selector: 'app-bisq-transactions',
@@ -23,7 +23,7 @@ export class BisqTransactionsComponent implements OnInit, OnDestroy {
fiveItemsPxSize = 250;
isLoading = true;
loadingItems: number[];
radioGroupForm: UntypedFormGroup;
radioGroupForm: FormGroup;
types: string[] = [];
radioGroupSubscription: Subscription;
@@ -70,7 +70,7 @@ export class BisqTransactionsComponent implements OnInit, OnDestroy {
private websocketService: WebsocketService,
private bisqApiService: BisqApiService,
private seoService: SeoService,
private formBuilder: UntypedFormBuilder,
private formBuilder: FormBuilder,
private route: ActivatedRoute,
private router: Router,
private cd: ChangeDetectorRef,

Some files were not shown because too many files have changed in this diff Show More