Fix merge conflicts on gitignore
This commit is contained in:
commit
9a3929554b
87
.github/workflows/cypress.yml
vendored
87
.github/workflows/cypress.yml
vendored
@ -6,86 +6,53 @@ on:
|
||||
jobs:
|
||||
cypress:
|
||||
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: "ubuntu-latest"
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
containers: [1, 2, 3, 4, 5]
|
||||
os: ["ubuntu-latest"]
|
||||
browser: [chrome]
|
||||
name: E2E tests on ${{ matrix.browser }} - ${{ matrix.os }}
|
||||
module: ["mempool", "liquid", "bisq"]
|
||||
include:
|
||||
- module: "mempool"
|
||||
spec: |
|
||||
cypress/e2e/mainnet/*.spec.ts
|
||||
cypress/e2e/signet/*.spec.ts
|
||||
cypress/e2e/testnet/*.spec.ts
|
||||
- module: "liquid"
|
||||
spec: |
|
||||
cypress/e2e/liquid/liquid.spec.ts
|
||||
cypress/e2e/liquidtestnet/liquidtestnet.spec.ts
|
||||
- module: "bisq"
|
||||
spec: |
|
||||
cypress/e2e/bisq/bisq.spec.ts
|
||||
|
||||
name: E2E tests for ${{ matrix.module }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: ${{ matrix.module }}
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 16.15.0
|
||||
cache: 'npm'
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- name: ${{ matrix.browser }} browser tests (Mempool)
|
||||
uses: cypress-io/github-action@v4
|
||||
with:
|
||||
tag: ${{ github.event_name }}
|
||||
working-directory: frontend
|
||||
build: npm run config:defaults:mempool
|
||||
start: npm run start:local-staging
|
||||
wait-on: 'http://localhost:4200'
|
||||
wait-on-timeout: 120
|
||||
record: true
|
||||
parallel: true
|
||||
spec: |
|
||||
cypress/e2e/mainnet/*.spec.ts
|
||||
cypress/e2e/signet/*.spec.ts
|
||||
cypress/e2e/testnet/*.spec.ts
|
||||
group: Tests on ${{ matrix.browser }} (Mempool)
|
||||
browser: ${{ matrix.browser }}
|
||||
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
|
||||
env:
|
||||
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json
|
||||
|
||||
- name: ${{ matrix.browser }} browser tests (Liquid)
|
||||
- name: Chrome browser tests (${{ matrix.module }})
|
||||
uses: cypress-io/github-action@v4
|
||||
if: always()
|
||||
with:
|
||||
tag: ${{ github.event_name }}
|
||||
working-directory: frontend
|
||||
build: npm run config:defaults:liquid
|
||||
working-directory: ${{ matrix.module }}/frontend
|
||||
build: npm run config:defaults:${{ matrix.module }}
|
||||
start: npm run start:local-staging
|
||||
wait-on: 'http://localhost:4200'
|
||||
wait-on-timeout: 120
|
||||
record: true
|
||||
parallel: true
|
||||
spec: |
|
||||
cypress/e2e/liquid/liquid.spec.ts
|
||||
cypress/e2e/liquidtestnet/liquidtestnet.spec.ts
|
||||
group: Tests on ${{ matrix.browser }} (Liquid)
|
||||
browser: ${{ matrix.browser }}
|
||||
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
|
||||
env:
|
||||
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
|
||||
- name: ${{ matrix.browser }} browser tests (Bisq)
|
||||
uses: cypress-io/github-action@v4
|
||||
if: always()
|
||||
with:
|
||||
tag: ${{ github.event_name }}
|
||||
working-directory: frontend
|
||||
build: npm run config:defaults:bisq
|
||||
start: npm run start:local-staging
|
||||
wait-on: 'http://localhost:4200'
|
||||
wait-on-timeout: 120
|
||||
record: true
|
||||
parallel: true
|
||||
spec: cypress/e2e/bisq/bisq.spec.ts
|
||||
group: Tests on ${{ matrix.browser }} (Bisq)
|
||||
browser: ${{ matrix.browser }}
|
||||
spec: ${{ matrix.spec }}
|
||||
group: Tests on Chrome (${{ matrix.module }})
|
||||
browser: "chrome"
|
||||
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
|
||||
env:
|
||||
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
|
||||
|
@ -6,6 +6,8 @@ In order to clarify the intellectual property license granted with Contributions
|
||||
|
||||
When submitting a pull request for the first time, please create a file with a name like `/contributors/{github_username}.txt`, and in the content of that file indicate your agreement to the Contributor License Agreement terms below. An example of what that file should contain can be seen in wiz's agreement file. (This method of CLA "signing" is borrowed from Medium's open source project.)
|
||||
|
||||
Also, please GPG-sign all your commits (`git config commit.gpgsign true`).
|
||||
|
||||
# Contributor License Agreement
|
||||
|
||||
Last Updated: January 25, 2022
|
||||
|
@ -15,10 +15,11 @@
|
||||
"@typescript-eslint/ban-types": 1,
|
||||
"@typescript-eslint/no-empty-function": 1,
|
||||
"@typescript-eslint/no-explicit-any": 1,
|
||||
"@typescript-eslint/no-inferrable-types": 1,
|
||||
"@typescript-eslint/no-inferrable-types": 0,
|
||||
"@typescript-eslint/no-namespace": 1,
|
||||
"@typescript-eslint/no-this-alias": 1,
|
||||
"@typescript-eslint/no-var-requires": 1,
|
||||
"@typescript-eslint/explicit-function-return-type": 1,
|
||||
"no-console": 1,
|
||||
"no-constant-condition": 1,
|
||||
"no-dupe-else-if": 1,
|
||||
@ -28,6 +29,8 @@
|
||||
"no-useless-catch": 1,
|
||||
"no-var": 1,
|
||||
"prefer-const": 1,
|
||||
"prefer-rest-params": 1
|
||||
"prefer-rest-params": 1,
|
||||
"quotes": [1, "single", { "allowTemplateLiterals": true }],
|
||||
"semi": 1
|
||||
}
|
||||
}
|
||||
|
4
backend/.gitignore
vendored
4
backend/.gitignore
vendored
@ -1,10 +1,10 @@
|
||||
# See http://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# production config and external assets
|
||||
*.json
|
||||
!mempool-config.template.json
|
||||
!mempool-config.sample.json
|
||||
|
||||
mempool-config.json
|
||||
pools.json
|
||||
icons.json
|
||||
|
||||
# compiled output
|
||||
|
@ -20,7 +20,8 @@
|
||||
"EXTERNAL_MAX_RETRY": 1,
|
||||
"EXTERNAL_RETRY_INTERVAL": 0,
|
||||
"USER_AGENT": "mempool",
|
||||
"STDOUT_LOG_MIN_PRIORITY": "debug"
|
||||
"STDOUT_LOG_MIN_PRIORITY": "debug",
|
||||
"AUTOMATIC_BLOCK_REINDEXING": false
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "127.0.0.1",
|
||||
@ -62,10 +63,25 @@
|
||||
"ENABLED": true,
|
||||
"TX_PER_SECOND_SAMPLE_PERIOD": 150
|
||||
},
|
||||
"MAXMIND": {
|
||||
"ENABLED": false,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
|
||||
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
|
||||
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
|
||||
},
|
||||
"BISQ": {
|
||||
"ENABLED": false,
|
||||
"DATA_PATH": "/bisq/statsnode-data/btc_mainnet/db"
|
||||
},
|
||||
"LIGHTNING": {
|
||||
"ENABLED": false,
|
||||
"BACKEND": "lnd"
|
||||
},
|
||||
"LND": {
|
||||
"TLS_CERT_PATH": "tls.cert",
|
||||
"MACAROON_PATH": "readonly.macaroon",
|
||||
"REST_API_URL": "https://localhost:8080"
|
||||
},
|
||||
"SOCKS5PROXY": {
|
||||
"ENABLED": false,
|
||||
"USE_ONION": true,
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mempool-backend",
|
||||
"version": "2.4.1-dev",
|
||||
"version": "2.5.0-dev",
|
||||
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
|
||||
"license": "GNU Affero General Public License v3.0",
|
||||
"homepage": "https://mempool.space",
|
||||
@ -16,7 +16,8 @@
|
||||
"mempool",
|
||||
"blockchain",
|
||||
"explorer",
|
||||
"liquid"
|
||||
"liquid",
|
||||
"lightning"
|
||||
],
|
||||
"main": "index.ts",
|
||||
"scripts": {
|
||||
@ -37,6 +38,8 @@
|
||||
"bitcoinjs-lib": "6.0.1",
|
||||
"crypto-js": "^4.0.0",
|
||||
"express": "^4.18.0",
|
||||
"fast-xml-parser": "^4.0.9",
|
||||
"maxmind": "^4.3.6",
|
||||
"mysql2": "2.3.3",
|
||||
"node-worker-threads-pool": "^1.5.1",
|
||||
"socks-proxy-agent": "~7.0.0",
|
||||
|
381
backend/src/api/bisq/bisq.routes.ts
Normal file
381
backend/src/api/bisq/bisq.routes.ts
Normal file
@ -0,0 +1,381 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from '../../config';
|
||||
import { RequiredSpec } from '../../mempool.interfaces';
|
||||
import bisq from './bisq';
|
||||
import { MarketsApiError } from './interfaces';
|
||||
import marketsApi from './markets-api';
|
||||
|
||||
class BisqRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/stats', this.getBisqStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/tx/:txId', this.getBisqTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/block/:hash', this.getBisqBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/tip/height', this.getBisqTip)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/:index/:length', this.getBisqBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/address/:address', this.getBisqAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/txs/:index/:length', this.getBisqTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/currencies', this.getBisqMarketCurrencies.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/depth', this.getBisqMarketDepth.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/hloc', this.getBisqMarketHloc.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/markets', this.getBisqMarketMarkets.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/offers', this.getBisqMarketOffers.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/ticker', this.getBisqMarketTicker.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/trades', this.getBisqMarketTrades.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes', this.getBisqMarketVolumes.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes/7d', this.getBisqMarketVolumes7d.bind(this))
|
||||
;
|
||||
}
|
||||
|
||||
|
||||
private getBisqStats(req: Request, res: Response) {
|
||||
const result = bisq.getStats();
|
||||
res.json(result);
|
||||
}
|
||||
|
||||
private getBisqTip(req: Request, res: Response) {
|
||||
const result = bisq.getLatestBlockHeight();
|
||||
res.type('text/plain');
|
||||
res.send(result.toString());
|
||||
}
|
||||
|
||||
private getBisqTransaction(req: Request, res: Response) {
|
||||
const result = bisq.getTransaction(req.params.txId);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('Bisq transaction not found');
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqTransactions(req: Request, res: Response) {
|
||||
const types: string[] = [];
|
||||
req.query.types = req.query.types || [];
|
||||
if (!Array.isArray(req.query.types)) {
|
||||
res.status(500).send('Types is not an array');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const _type in req.query.types) {
|
||||
if (typeof req.query.types[_type] === 'string') {
|
||||
types.push(req.query.types[_type].toString());
|
||||
}
|
||||
}
|
||||
|
||||
const index = parseInt(req.params.index, 10) || 0;
|
||||
const length = parseInt(req.params.length, 10) > 100 ? 100 : parseInt(req.params.length, 10) || 25;
|
||||
const [transactions, count] = bisq.getTransactions(index, length, types);
|
||||
res.header('X-Total-Count', count.toString());
|
||||
res.json(transactions);
|
||||
}
|
||||
|
||||
private getBisqBlock(req: Request, res: Response) {
|
||||
const result = bisq.getBlock(req.params.hash);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('Bisq block not found');
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqBlocks(req: Request, res: Response) {
|
||||
const index = parseInt(req.params.index, 10) || 0;
|
||||
const length = parseInt(req.params.length, 10) > 100 ? 100 : parseInt(req.params.length, 10) || 25;
|
||||
const [transactions, count] = bisq.getBlocks(index, length);
|
||||
res.header('X-Total-Count', count.toString());
|
||||
res.json(transactions);
|
||||
}
|
||||
|
||||
private getBisqAddress(req: Request, res: Response) {
|
||||
const result = bisq.getAddress(req.params.address.substr(1));
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('Bisq address not found');
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketCurrencies(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'type': {
|
||||
required: false,
|
||||
types: ['crypto', 'fiat', 'all']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getCurrencies(p.type);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketCurrencies error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketDepth(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: true,
|
||||
types: ['@string']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getDepth(p.market);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketDepth error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketMarkets(req: Request, res: Response) {
|
||||
const result = marketsApi.getMarkets();
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketMarkets error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketTrades(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: true,
|
||||
types: ['@string']
|
||||
},
|
||||
'timestamp_from': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'timestamp_to': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'trade_id_to': {
|
||||
required: false,
|
||||
types: ['@string']
|
||||
},
|
||||
'trade_id_from': {
|
||||
required: false,
|
||||
types: ['@string']
|
||||
},
|
||||
'direction': {
|
||||
required: false,
|
||||
types: ['buy', 'sell']
|
||||
},
|
||||
'limit': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'sort': {
|
||||
required: false,
|
||||
types: ['asc', 'desc']
|
||||
}
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getTrades(p.market, p.timestamp_from,
|
||||
p.timestamp_to, p.trade_id_from, p.trade_id_to, p.direction, p.limit, p.sort);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketTrades error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketOffers(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: true,
|
||||
types: ['@string']
|
||||
},
|
||||
'direction': {
|
||||
required: false,
|
||||
types: ['buy', 'sell']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getOffers(p.market, p.direction);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketOffers error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketVolumes(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: false,
|
||||
types: ['@string']
|
||||
},
|
||||
'interval': {
|
||||
required: false,
|
||||
types: ['minute', 'half_hour', 'hour', 'half_day', 'day', 'week', 'month', 'year', 'auto']
|
||||
},
|
||||
'timestamp_from': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'timestamp_to': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'milliseconds': {
|
||||
required: false,
|
||||
types: ['@boolean']
|
||||
},
|
||||
'timestamp': {
|
||||
required: false,
|
||||
types: ['no', 'yes']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getVolumes(p.market, p.timestamp_from, p.timestamp_to, p.interval, p.milliseconds, p.timestamp);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketVolumes error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketHloc(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: true,
|
||||
types: ['@string']
|
||||
},
|
||||
'interval': {
|
||||
required: false,
|
||||
types: ['minute', 'half_hour', 'hour', 'half_day', 'day', 'week', 'month', 'year', 'auto']
|
||||
},
|
||||
'timestamp_from': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'timestamp_to': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'milliseconds': {
|
||||
required: false,
|
||||
types: ['@boolean']
|
||||
},
|
||||
'timestamp': {
|
||||
required: false,
|
||||
types: ['no', 'yes']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getHloc(p.market, p.interval, p.timestamp_from, p.timestamp_to, p.milliseconds, p.timestamp);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketHloc error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketTicker(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: false,
|
||||
types: ['@string']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getTicker(p.market);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketTicker error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketVolumes7d(req: Request, res: Response) {
|
||||
const result = marketsApi.getVolumesByTime(604800);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketVolumes7d error'));
|
||||
}
|
||||
}
|
||||
|
||||
private parseRequestParameters(requestParams: object, params: RequiredSpec): { [name: string]: any; } {
|
||||
const final = {};
|
||||
for (const i in params) {
|
||||
if (params.hasOwnProperty(i)) {
|
||||
if (params[i].required && requestParams[i] === undefined) {
|
||||
return { error: i + ' parameter missing'};
|
||||
}
|
||||
if (typeof requestParams[i] === 'string') {
|
||||
const str = (requestParams[i] || '').toString().toLowerCase();
|
||||
if (params[i].types.indexOf('@number') > -1) {
|
||||
const number = parseInt((str).toString(), 10);
|
||||
final[i] = number;
|
||||
} else if (params[i].types.indexOf('@string') > -1) {
|
||||
final[i] = str;
|
||||
} else if (params[i].types.indexOf('@boolean') > -1) {
|
||||
final[i] = str === 'true' || str === 'yes';
|
||||
} else if (params[i].types.indexOf(str) > -1) {
|
||||
final[i] = str;
|
||||
} else {
|
||||
return { error: i + ' parameter invalid'};
|
||||
}
|
||||
} else if (typeof requestParams[i] === 'number') {
|
||||
final[i] = requestParams[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
return final;
|
||||
}
|
||||
|
||||
private getBisqMarketErrorResponse(message: string): MarketsApiError {
|
||||
return {
|
||||
'success': 0,
|
||||
'error': message
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new BisqRoutes;
|
@ -9,10 +9,12 @@ export interface AbstractBitcoinApi {
|
||||
$getBlockHash(height: number): Promise<string>;
|
||||
$getBlockHeader(hash: string): Promise<string>;
|
||||
$getBlock(hash: string): Promise<IEsploraApi.Block>;
|
||||
$getRawBlock(hash: string): Promise<string>;
|
||||
$getAddress(address: string): Promise<IEsploraApi.Address>;
|
||||
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
|
||||
$getAddressPrefix(prefix: string): string[];
|
||||
$sendRawTransaction(rawTransaction: string): Promise<string>;
|
||||
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend>;
|
||||
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]>;
|
||||
$getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]>;
|
||||
}
|
||||
|
@ -77,7 +77,8 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
}
|
||||
|
||||
$getRawBlock(hash: string): Promise<string> {
|
||||
return this.bitcoindClient.getBlock(hash, 0);
|
||||
return this.bitcoindClient.getBlock(hash, 0)
|
||||
.then((raw: string) => Buffer.from(raw, "hex"));
|
||||
}
|
||||
|
||||
$getBlockHash(height: number): Promise<string> {
|
||||
@ -130,6 +131,16 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
return this.bitcoindClient.sendRawTransaction(rawTransaction);
|
||||
}
|
||||
|
||||
async $getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
|
||||
const txOut = await this.bitcoindClient.getTxOut(txId, vout, false);
|
||||
return {
|
||||
spent: txOut === null,
|
||||
status: {
|
||||
confirmed: true,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async $getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
|
||||
const outSpends: IEsploraApi.Outspend[] = [];
|
||||
const tx = await this.$getRawTransaction(txId, true, false);
|
||||
@ -195,7 +206,9 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
sequence: vin.sequence,
|
||||
txid: vin.txid || '',
|
||||
vout: vin.vout || 0,
|
||||
witness: vin.txinwitness,
|
||||
witness: vin.txinwitness || [],
|
||||
inner_redeemscript_asm: '',
|
||||
inner_witnessscript_asm: '',
|
||||
};
|
||||
});
|
||||
|
||||
|
554
backend/src/api/bitcoin/bitcoin.routes.ts
Normal file
554
backend/src/api/bitcoin/bitcoin.routes.ts
Normal file
@ -0,0 +1,554 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import axios from 'axios';
|
||||
import config from '../../config';
|
||||
import websocketHandler from '../websocket-handler';
|
||||
import mempool from '../mempool';
|
||||
import feeApi from '../fee-api';
|
||||
import mempoolBlocks from '../mempool-blocks';
|
||||
import bitcoinApi from './bitcoin-api-factory';
|
||||
import { Common } from '../common';
|
||||
import backendInfo from '../backend-info';
|
||||
import transactionUtils from '../transaction-utils';
|
||||
import { IEsploraApi } from './esplora-api.interface';
|
||||
import loadingIndicators from '../loading-indicators';
|
||||
import { TransactionExtended } from '../../mempool.interfaces';
|
||||
import logger from '../../logger';
|
||||
import blocks from '../blocks';
|
||||
import bitcoinClient from './bitcoin-client';
|
||||
import difficultyAdjustment from '../difficulty-adjustment';
|
||||
|
||||
class BitcoinRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.getCpfpInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', this.getMempoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', this.getBackendInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', this.getInitData)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', this.validateAddress)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', this.$postTransactionForm)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'donations/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'translators', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'translators/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', this.getBlocks.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', this.getBlocks.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions);
|
||||
;
|
||||
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool', this.getMempool)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/txids', this.getMempoolTxIds)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/recent', this.getRecentMempoolTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId', this.getTransaction)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'tx', this.$postTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/hex', this.getRawTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', this.getTransactionStatus)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', this.getTransactionOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', this.getBlockHeader)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', this.getBlockTipHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/hash', this.getBlockTipHash)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/raw', this.getRawBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txids', this.getTxIdsForBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs', this.getBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs/:index', this.getBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', this.getBlockHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', this.getAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', this.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs/chain/:txId', this.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', this.getAddressPrefix)
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private getInitData(req: Request, res: Response) {
|
||||
try {
|
||||
const result = websocketHandler.getInitData();
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private getRecommendedFees(req: Request, res: Response) {
|
||||
if (!mempool.isInSync()) {
|
||||
res.statusCode = 503;
|
||||
res.send('Service Unavailable');
|
||||
return;
|
||||
}
|
||||
const result = feeApi.getRecommendedFee();
|
||||
res.json(result);
|
||||
}
|
||||
|
||||
private getMempoolBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
const result = mempoolBlocks.getMempoolBlocks();
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private getTransactionTimes(req: Request, res: Response) {
|
||||
if (!Array.isArray(req.query.txId)) {
|
||||
res.status(500).send('Not an array');
|
||||
return;
|
||||
}
|
||||
const txIds: string[] = [];
|
||||
for (const _txId in req.query.txId) {
|
||||
if (typeof req.query.txId[_txId] === 'string') {
|
||||
txIds.push(req.query.txId[_txId].toString());
|
||||
}
|
||||
}
|
||||
|
||||
const times = mempool.getFirstSeenForTransactions(txIds);
|
||||
res.json(times);
|
||||
}
|
||||
|
||||
private async $getBatchedOutspends(req: Request, res: Response) {
|
||||
if (!Array.isArray(req.query.txId)) {
|
||||
res.status(500).send('Not an array');
|
||||
return;
|
||||
}
|
||||
if (req.query.txId.length > 50) {
|
||||
res.status(400).send('Too many txids requested');
|
||||
return;
|
||||
}
|
||||
const txIds: string[] = [];
|
||||
for (const _txId in req.query.txId) {
|
||||
if (typeof req.query.txId[_txId] === 'string') {
|
||||
txIds.push(req.query.txId[_txId].toString());
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const batchedOutspends = await bitcoinApi.$getBatchedOutspends(txIds);
|
||||
res.json(batchedOutspends);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private getCpfpInfo(req: Request, res: Response) {
|
||||
if (!/^[a-fA-F0-9]{64}$/.test(req.params.txId)) {
|
||||
res.status(501).send(`Invalid transaction ID.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const tx = mempool.getMempool()[req.params.txId];
|
||||
if (!tx) {
|
||||
res.status(404).send(`Transaction doesn't exist in the mempool.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (tx.cpfpChecked) {
|
||||
res.json({
|
||||
ancestors: tx.ancestors,
|
||||
bestDescendant: tx.bestDescendant || null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
|
||||
|
||||
res.json(cpfpInfo);
|
||||
}
|
||||
|
||||
private getBackendInfo(req: Request, res: Response) {
|
||||
res.json(backendInfo.getBackendInfo());
|
||||
}
|
||||
|
||||
private async getTransaction(req: Request, res: Response) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
|
||||
res.json(transaction);
|
||||
} catch (e) {
|
||||
let statusCode = 500;
|
||||
if (e instanceof Error && e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
|
||||
statusCode = 404;
|
||||
}
|
||||
res.status(statusCode).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getRawTransaction(req: Request, res: Response) {
|
||||
try {
|
||||
const transaction: IEsploraApi.Transaction = await bitcoinApi.$getRawTransaction(req.params.txId, true);
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
res.send(transaction.hex);
|
||||
} catch (e) {
|
||||
let statusCode = 500;
|
||||
if (e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
|
||||
statusCode = 404;
|
||||
}
|
||||
res.status(statusCode).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getTransactionStatus(req: Request, res: Response) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
|
||||
res.json(transaction.status);
|
||||
} catch (e) {
|
||||
let statusCode = 500;
|
||||
if (e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
|
||||
statusCode = 404;
|
||||
}
|
||||
res.status(statusCode).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlock(req: Request, res: Response) {
|
||||
try {
|
||||
const block = await blocks.$getBlock(req.params.hash);
|
||||
|
||||
const blockAge = new Date().getTime() / 1000 - block.timestamp;
|
||||
const day = 24 * 3600;
|
||||
let cacheDuration;
|
||||
if (blockAge > 365 * day) {
|
||||
cacheDuration = 30 * day;
|
||||
} else if (blockAge > 30 * day) {
|
||||
cacheDuration = 10 * day;
|
||||
} else {
|
||||
cacheDuration = 600
|
||||
}
|
||||
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * cacheDuration).toUTCString());
|
||||
res.json(block);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockHeader(req: Request, res: Response) {
|
||||
try {
|
||||
const blockHeader = await bitcoinApi.$getBlockHeader(req.params.hash);
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
res.send(blockHeader);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getStrippedBlockTransactions(req: Request, res: Response) {
|
||||
try {
|
||||
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) { // Bitcoin
|
||||
const height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(await blocks.$getBlocks(height, 15));
|
||||
} else { // Liquid, Bisq
|
||||
return await this.getLegacyBlocks(req, res);
|
||||
}
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getLegacyBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
const returnBlocks: IEsploraApi.Block[] = [];
|
||||
const fromHeight = parseInt(req.params.height, 10) || blocks.getCurrentBlockHeight();
|
||||
|
||||
// Check if block height exist in local cache to skip the hash lookup
|
||||
const blockByHeight = blocks.getBlocks().find((b) => b.height === fromHeight);
|
||||
let startFromHash: string | null = null;
|
||||
if (blockByHeight) {
|
||||
startFromHash = blockByHeight.id;
|
||||
} else {
|
||||
startFromHash = await bitcoinApi.$getBlockHash(fromHeight);
|
||||
}
|
||||
|
||||
let nextHash = startFromHash;
|
||||
for (let i = 0; i < 10 && nextHash; i++) {
|
||||
const localBlock = blocks.getBlocks().find((b) => b.id === nextHash);
|
||||
if (localBlock) {
|
||||
returnBlocks.push(localBlock);
|
||||
nextHash = localBlock.previousblockhash;
|
||||
} else {
|
||||
const block = await bitcoinApi.$getBlock(nextHash);
|
||||
returnBlocks.push(block);
|
||||
nextHash = block.previousblockhash;
|
||||
}
|
||||
}
|
||||
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(returnBlocks);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockTransactions(req: Request, res: Response) {
|
||||
try {
|
||||
loadingIndicators.setProgress('blocktxs-' + req.params.hash, 0);
|
||||
|
||||
const txIds = await bitcoinApi.$getTxIdsForBlock(req.params.hash);
|
||||
const transactions: TransactionExtended[] = [];
|
||||
const startingIndex = Math.max(0, parseInt(req.params.index || '0', 10));
|
||||
|
||||
const endIndex = Math.min(startingIndex + 10, txIds.length);
|
||||
for (let i = startingIndex; i < endIndex; i++) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(txIds[i], true, true);
|
||||
transactions.push(transaction);
|
||||
loadingIndicators.setProgress('blocktxs-' + req.params.hash, (i - startingIndex + 1) / (endIndex - startingIndex) * 100);
|
||||
} catch (e) {
|
||||
logger.debug('getBlockTransactions error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
loadingIndicators.setProgress('blocktxs-' + req.params.hash, 100);
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockHeight(req: Request, res: Response) {
|
||||
try {
|
||||
const blockHash = await bitcoinApi.$getBlockHash(parseInt(req.params.height, 10));
|
||||
res.send(blockHash);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAddress(req: Request, res: Response) {
|
||||
if (config.MEMPOOL.BACKEND === 'none') {
|
||||
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const addressData = await bitcoinApi.$getAddress(req.params.address);
|
||||
res.json(addressData);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
|
||||
return res.status(413).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAddressTransactions(req: Request, res: Response) {
|
||||
if (config.MEMPOOL.BACKEND === 'none') {
|
||||
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, req.params.txId);
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
|
||||
return res.status(413).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAdressTxChain(req: Request, res: Response) {
|
||||
res.status(501).send('Not implemented');
|
||||
}
|
||||
|
||||
private async getAddressPrefix(req: Request, res: Response) {
|
||||
try {
|
||||
const blockHash = await bitcoinApi.$getAddressPrefix(req.params.prefix);
|
||||
res.send(blockHash);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getRecentMempoolTransactions(req: Request, res: Response) {
|
||||
const latestTransactions = Object.entries(mempool.getMempool())
|
||||
.sort((a, b) => (b[1].firstSeen || 0) - (a[1].firstSeen || 0))
|
||||
.slice(0, 10).map((tx) => Common.stripTransaction(tx[1]));
|
||||
|
||||
res.json(latestTransactions);
|
||||
}
|
||||
|
||||
private async getMempool(req: Request, res: Response) {
|
||||
const info = mempool.getMempoolInfo();
|
||||
res.json({
|
||||
count: info.size,
|
||||
vsize: info.bytes,
|
||||
total_fee: info.total_fee * 1e8,
|
||||
fee_histogram: []
|
||||
});
|
||||
}
|
||||
|
||||
private async getMempoolTxIds(req: Request, res: Response) {
|
||||
try {
|
||||
const rawMempool = await bitcoinApi.$getRawMempool();
|
||||
res.send(rawMempool);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockTipHeight(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getBlockHeightTip();
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockTipHash(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getBlockHashTip();
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
res.send(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getRawBlock(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getRawBlock(req.params.hash);
|
||||
res.setHeader('content-type', 'application/octet-stream');
|
||||
res.send(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getTxIdsForBlock(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getTxIdsForBlock(req.params.hash);
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async validateAddress(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinClient.validateAddress(req.params.address);
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getTransactionOutspends(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getOutspends(req.params.txId);
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private getDifficultyChange(req: Request, res: Response) {
|
||||
try {
|
||||
res.json(difficultyAdjustment.getDifficultyAdjustment());
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $postTransaction(req: Request, res: Response) {
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
try {
|
||||
let rawTx;
|
||||
if (typeof req.body === 'object') {
|
||||
rawTx = Object.keys(req.body)[0];
|
||||
} else {
|
||||
rawTx = req.body;
|
||||
}
|
||||
const txIdResult = await bitcoinApi.$sendRawTransaction(rawTx);
|
||||
res.send(txIdResult);
|
||||
} catch (e: any) {
|
||||
res.status(400).send(e.message && e.code ? 'sendrawtransaction RPC error: ' + JSON.stringify({ code: e.code, message: e.message })
|
||||
: (e.message || 'Error'));
|
||||
}
|
||||
}
|
||||
|
||||
private async $postTransactionForm(req: Request, res: Response) {
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
const matches = /tx=([a-z0-9]+)/.exec(req.body);
|
||||
let txHex = '';
|
||||
if (matches && matches[1]) {
|
||||
txHex = matches[1];
|
||||
}
|
||||
try {
|
||||
const txIdResult = await bitcoinClient.sendRawTransaction(txHex);
|
||||
res.send(txIdResult);
|
||||
} catch (e: any) {
|
||||
res.status(400).send(e.message && e.code ? 'sendrawtransaction RPC error: ' + JSON.stringify({ code: e.code, message: e.message })
|
||||
: (e.message || 'Error'));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new BitcoinRoutes();
|
@ -25,10 +25,10 @@ export namespace IEsploraApi {
|
||||
is_coinbase: boolean;
|
||||
scriptsig: string;
|
||||
scriptsig_asm: string;
|
||||
inner_redeemscript_asm?: string;
|
||||
inner_witnessscript_asm?: string;
|
||||
inner_redeemscript_asm: string;
|
||||
inner_witnessscript_asm: string;
|
||||
sequence: any;
|
||||
witness?: string[];
|
||||
witness: string[];
|
||||
prevout: Vout | null;
|
||||
// Elements
|
||||
is_pegin?: boolean;
|
||||
|
@ -50,6 +50,11 @@ class ElectrsApi implements AbstractBitcoinApi {
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getRawBlock(hash: string): Promise<string> {
|
||||
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getAddress(address: string): Promise<IEsploraApi.Address> {
|
||||
throw new Error('Method getAddress not implemented.');
|
||||
}
|
||||
@ -66,6 +71,11 @@ class ElectrsApi implements AbstractBitcoinApi {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
|
||||
return axios.get<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout, this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
|
||||
return axios.get<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
|
@ -17,11 +17,11 @@ import { prepareBlock } from '../utils/blocks-utils';
|
||||
import BlocksRepository from '../repositories/BlocksRepository';
|
||||
import HashratesRepository from '../repositories/HashratesRepository';
|
||||
import indexer from '../indexer';
|
||||
import fiatConversion from './fiat-conversion';
|
||||
import poolsParser from './pools-parser';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
import mining from './mining';
|
||||
import mining from './mining/mining';
|
||||
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
|
||||
import difficultyAdjustment from './difficulty-adjustment';
|
||||
|
||||
class Blocks {
|
||||
private blocks: BlockExtended[] = [];
|
||||
@ -150,6 +150,7 @@ class Blocks {
|
||||
blockExtended.extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
|
||||
blockExtended.extras.coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
|
||||
blockExtended.extras.coinbaseRaw = blockExtended.extras.coinbaseTx.vin[0].scriptsig;
|
||||
blockExtended.extras.usd = fiatConversion.getConversionRates().USD;
|
||||
|
||||
if (block.height === 0) {
|
||||
blockExtended.extras.medianFee = 0; // 50th percentiles
|
||||
@ -280,8 +281,7 @@ class Blocks {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
|
||||
const timeLeft = Math.round((indexedBlocks.length - totalIndexed) / blockPerSeconds);
|
||||
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
}
|
||||
@ -293,7 +293,11 @@ class Blocks {
|
||||
totalIndexed++;
|
||||
newlyIndexed++;
|
||||
}
|
||||
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
} else {
|
||||
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
@ -348,8 +352,7 @@ class Blocks {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
|
||||
const timeLeft = Math.round((indexingBlockAmount - totalIndexed) / blockPerSeconds);
|
||||
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('block-indexing', progress, false);
|
||||
@ -365,7 +368,11 @@ class Blocks {
|
||||
|
||||
currentBlockHeight -= chunkSize;
|
||||
}
|
||||
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
} else {
|
||||
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
}
|
||||
loadingIndicators.setProgress('block-indexing', 100);
|
||||
} catch (e) {
|
||||
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
@ -527,13 +534,12 @@ class Blocks {
|
||||
}
|
||||
}
|
||||
|
||||
let block = await bitcoinClient.getBlock(hash);
|
||||
|
||||
// Not Bitcoin network, return the block as it
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||
return block;
|
||||
return await bitcoinApi.$getBlock(hash);
|
||||
}
|
||||
|
||||
let block = await bitcoinClient.getBlock(hash);
|
||||
block = prepareBlock(block);
|
||||
|
||||
// Bitcoin network, add our custom data on top
|
||||
@ -547,8 +553,8 @@ class Blocks {
|
||||
return blockExtended;
|
||||
}
|
||||
|
||||
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache: boolean = false,
|
||||
skipDBLookup: boolean = false): Promise<TransactionStripped[]>
|
||||
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache = false,
|
||||
skipDBLookup = false): Promise<TransactionStripped[]>
|
||||
{
|
||||
if (skipMemoryCache === false) {
|
||||
// Check the memory cache
|
||||
@ -572,24 +578,20 @@ class Blocks {
|
||||
|
||||
// Index the response if needed
|
||||
if (Common.blocksSummariesIndexingEnabled() === true) {
|
||||
await BlocksSummariesRepository.$saveSummary(block.height, summary);
|
||||
await BlocksSummariesRepository.$saveSummary({height: block.height, mined: summary});
|
||||
}
|
||||
|
||||
return summary.transactions;
|
||||
}
|
||||
|
||||
public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> {
|
||||
let currentHeight = fromHeight !== undefined ? fromHeight : this.getCurrentBlockHeight();
|
||||
let currentHeight = fromHeight !== undefined ? fromHeight : await blocksRepository.$mostRecentBlockHeight();
|
||||
const returnBlocks: BlockExtended[] = [];
|
||||
|
||||
if (currentHeight < 0) {
|
||||
return returnBlocks;
|
||||
}
|
||||
|
||||
if (currentHeight === 0 && Common.indexingEnabled()) {
|
||||
currentHeight = await blocksRepository.$mostRecentBlockHeight();
|
||||
}
|
||||
|
||||
// Check if block height exist in local cache to skip the hash lookup
|
||||
const blockByHeight = this.getBlocks().find((b) => b.height === currentHeight);
|
||||
let startFromHash: string | null = null;
|
||||
|
@ -172,7 +172,7 @@ export class Common {
|
||||
|
||||
static indexingEnabled(): boolean {
|
||||
return (
|
||||
['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK) &&
|
||||
['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) &&
|
||||
config.DATABASE.ENABLED === true &&
|
||||
config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== 0
|
||||
);
|
||||
|
@ -4,15 +4,13 @@ import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 24;
|
||||
private static currentVersion = 35;
|
||||
private queryTimeout = 120000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
|
||||
private blocksTruncatedMessage = `'blocks' table has been truncated. Re-indexing from scratch.`;
|
||||
private hashratesTruncatedMessage = `'hashrates' table has been truncated. Re-indexing from scratch.`;
|
||||
|
||||
constructor() { }
|
||||
private blocksTruncatedMessage = `'blocks' table has been truncated.`;
|
||||
private hashratesTruncatedMessage = `'hashrates' table has been truncated.`;
|
||||
|
||||
/**
|
||||
* Avoid printing multiple time the same message
|
||||
@ -104,152 +102,223 @@ class DatabaseMigration {
|
||||
await this.$setStatisticsAddedIndexedFlag(databaseSchemaVersion);
|
||||
|
||||
const isBitcoin = ['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK);
|
||||
try {
|
||||
await this.$executeQuery(this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
|
||||
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
|
||||
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
|
||||
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
|
||||
}
|
||||
if (databaseSchemaVersion < 3) {
|
||||
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
|
||||
}
|
||||
if (databaseSchemaVersion < 4) {
|
||||
await this.$executeQuery('DROP table IF EXISTS blocks;');
|
||||
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
|
||||
}
|
||||
if (databaseSchemaVersion < 5 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 6 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
// Cleanup original blocks fields type
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
|
||||
// We also fix the pools.id type so we need to drop/re-create the foreign key
|
||||
await this.$executeQuery('ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
|
||||
// Add new block indexing fields
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
|
||||
}
|
||||
await this.$executeQuery(this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
|
||||
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
|
||||
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
|
||||
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
|
||||
}
|
||||
if (databaseSchemaVersion < 3) {
|
||||
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
|
||||
}
|
||||
if (databaseSchemaVersion < 4) {
|
||||
await this.$executeQuery('DROP table IF EXISTS blocks;');
|
||||
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
|
||||
}
|
||||
if (databaseSchemaVersion < 5 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 7 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP table IF EXISTS hashrates;');
|
||||
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
|
||||
}
|
||||
if (databaseSchemaVersion < 6 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
// Cleanup original blocks fields type
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
|
||||
// We also fix the pools.id type so we need to drop/re-create the foreign key
|
||||
await this.$executeQuery('ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
|
||||
// Add new block indexing fields
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 8 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
|
||||
}
|
||||
if (databaseSchemaVersion < 7 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP table IF EXISTS hashrates;');
|
||||
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 9 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
|
||||
}
|
||||
if (databaseSchemaVersion < 8 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 10 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
|
||||
}
|
||||
if (databaseSchemaVersion < 9 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 11 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery(`ALTER TABLE blocks
|
||||
ADD avg_fee INT UNSIGNED NULL,
|
||||
ADD avg_fee_rate INT UNSIGNED NULL
|
||||
`);
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 10 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 12 && isBitcoin === true) {
|
||||
// No need to re-index because the new data type can contain larger values
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 11 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery(`ALTER TABLE blocks
|
||||
ADD avg_fee INT UNSIGNED NULL,
|
||||
ADD avg_fee_rate INT UNSIGNED NULL
|
||||
`);
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 13 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 12 && isBitcoin === true) {
|
||||
// No need to re-index because the new data type can contain larger values
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 14 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 13 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 16 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
|
||||
}
|
||||
if (databaseSchemaVersion < 14 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 17 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
|
||||
}
|
||||
if (databaseSchemaVersion < 16 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 18 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `hash` (`hash`);');
|
||||
}
|
||||
if (databaseSchemaVersion < 17 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 19) {
|
||||
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
|
||||
}
|
||||
if (databaseSchemaVersion < 18 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `hash` (`hash`);');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 20 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateBlocksSummariesTableQuery(), await this.$checkIfTableExists('blocks_summaries'));
|
||||
}
|
||||
if (databaseSchemaVersion < 19) {
|
||||
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 21) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `rates`');
|
||||
await this.$executeQuery(this.getCreatePricesTableQuery(), await this.$checkIfTableExists('prices'));
|
||||
}
|
||||
if (databaseSchemaVersion < 20 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateBlocksSummariesTableQuery(), await this.$checkIfTableExists('blocks_summaries'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 22 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `difficulty_adjustments`');
|
||||
await this.$executeQuery(this.getCreateDifficultyAdjustmentsTableQuery(), await this.$checkIfTableExists('difficulty_adjustments'));
|
||||
}
|
||||
if (databaseSchemaVersion < 21) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `rates`');
|
||||
await this.$executeQuery(this.getCreatePricesTableQuery(), await this.$checkIfTableExists('prices'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 23) {
|
||||
await this.$executeQuery('TRUNCATE `prices`');
|
||||
await this.$executeQuery('ALTER TABLE `prices` DROP `avg_prices`');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `USD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `EUR` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `GBP` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `CAD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `CHF` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `AUD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `JPY` float DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 22 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `difficulty_adjustments`');
|
||||
await this.$executeQuery(this.getCreateDifficultyAdjustmentsTableQuery(), await this.$checkIfTableExists('difficulty_adjustments'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 24 && isBitcoin == true) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_audits`');
|
||||
await this.$executeQuery(this.getCreateBlocksAuditsTableQuery(), await this.$checkIfTableExists('blocks_audits'));
|
||||
if (databaseSchemaVersion < 23) {
|
||||
await this.$executeQuery('TRUNCATE `prices`');
|
||||
await this.$executeQuery('ALTER TABLE `prices` DROP `avg_prices`');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `USD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `EUR` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `GBP` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `CAD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `CHF` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `AUD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `JPY` float DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 24 && isBitcoin == true) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_audits`');
|
||||
await this.$executeQuery(this.getCreateBlocksAuditsTableQuery(), await this.$checkIfTableExists('blocks_audits'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 25 && isBitcoin === true) {
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('last_node_stats', 0, '1970-01-01');`);
|
||||
await this.$executeQuery(this.getCreateLightningStatisticsQuery(), await this.$checkIfTableExists('lightning_stats'));
|
||||
await this.$executeQuery(this.getCreateNodesQuery(), await this.$checkIfTableExists('nodes'));
|
||||
await this.$executeQuery(this.getCreateChannelsQuery(), await this.$checkIfTableExists('channels'));
|
||||
await this.$executeQuery(this.getCreateNodesStatsQuery(), await this.$checkIfTableExists('node_stats'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 26 && isBitcoin === true) {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.uniqueLog(logger.notice, `'lightning_stats' table has been truncated.`);
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
await this.$executeQuery(`TRUNCATE lightning_stats`);
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD tor_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD unannounced_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 27 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 28 && isBitcoin === true) {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.uniqueLog(logger.notice, `'lightning_stats' and 'node_stats' tables have been truncated.`);
|
||||
}
|
||||
await this.$executeQuery(`TRUNCATE lightning_stats`);
|
||||
await this.$executeQuery(`TRUNCATE node_stats`);
|
||||
await this.$executeQuery(`ALTER TABLE lightning_stats MODIFY added DATE`);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 29 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateGeoNamesTableQuery(), await this.$checkIfTableExists('geo_names'));
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD as_number int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD city_id int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD country_id int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD accuracy_radius int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD subdivision_id int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD longitude double NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD latitude double NULL DEFAULT NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 30 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization") NOT NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 31 && isBitcoin == true) { // Link blocks to prices
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `id` int NULL AUTO_INCREMENT UNIQUE');
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_prices`');
|
||||
await this.$executeQuery(this.getCreateBlocksPricesTableQuery(), await this.$checkIfTableExists('blocks_prices'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 32 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks_summaries` ADD `template` JSON DEFAULT "[]"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 33 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization", "country_iso_code") NOT NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 34 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_tor_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 35 && isBitcoin == true) {
|
||||
await this.$executeQuery('DELETE from `lightning_stats` WHERE added > "2021-09-19"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD CONSTRAINT added_unique UNIQUE (added);');
|
||||
}
|
||||
}
|
||||
|
||||
@ -288,7 +357,7 @@ class DatabaseMigration {
|
||||
/**
|
||||
* Small query execution wrapper to log all executed queries
|
||||
*/
|
||||
private async $executeQuery(query: string, silent: boolean = false): Promise<any> {
|
||||
private async $executeQuery(query: string, silent = false): Promise<any> {
|
||||
if (!silent) {
|
||||
logger.debug('MIGRATIONS: Execute query:\n' + query);
|
||||
}
|
||||
@ -317,21 +386,17 @@ class DatabaseMigration {
|
||||
* Create the `state` table
|
||||
*/
|
||||
private async $createMigrationStateTable(): Promise<void> {
|
||||
try {
|
||||
const query = `CREATE TABLE IF NOT EXISTS state (
|
||||
name varchar(25) NOT NULL,
|
||||
number int(11) NULL,
|
||||
string varchar(100) NULL,
|
||||
CONSTRAINT name_unique UNIQUE (name)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
await this.$executeQuery(query);
|
||||
const query = `CREATE TABLE IF NOT EXISTS state (
|
||||
name varchar(25) NOT NULL,
|
||||
number int(11) NULL,
|
||||
string varchar(100) NULL,
|
||||
CONSTRAINT name_unique UNIQUE (name)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
await this.$executeQuery(query);
|
||||
|
||||
// Set initial values
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('schema_version', 0, NULL);`);
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
// Set initial values
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('schema_version', 0, NULL);`);
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -569,6 +634,82 @@ class DatabaseMigration {
|
||||
adjustment float NOT NULL,
|
||||
PRIMARY KEY (height),
|
||||
INDEX (time)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateLightningStatisticsQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS lightning_stats (
|
||||
id int(11) NOT NULL AUTO_INCREMENT,
|
||||
added datetime NOT NULL,
|
||||
channel_count int(11) NOT NULL,
|
||||
node_count int(11) NOT NULL,
|
||||
total_capacity double unsigned NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateNodesQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS nodes (
|
||||
public_key varchar(66) NOT NULL,
|
||||
first_seen datetime NOT NULL,
|
||||
updated_at datetime NOT NULL,
|
||||
alias varchar(200) CHARACTER SET utf8mb4 NOT NULL,
|
||||
color varchar(200) NOT NULL,
|
||||
sockets text DEFAULT NULL,
|
||||
PRIMARY KEY (public_key),
|
||||
KEY alias (alias(10))
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateChannelsQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS channels (
|
||||
id bigint(11) unsigned NOT NULL,
|
||||
short_id varchar(15) NOT NULL DEFAULT '',
|
||||
capacity bigint(20) unsigned NOT NULL,
|
||||
transaction_id varchar(64) NOT NULL,
|
||||
transaction_vout int(11) NOT NULL,
|
||||
updated_at datetime DEFAULT NULL,
|
||||
created datetime DEFAULT NULL,
|
||||
status int(11) NOT NULL DEFAULT 0,
|
||||
closing_transaction_id varchar(64) DEFAULT NULL,
|
||||
closing_date datetime DEFAULT NULL,
|
||||
closing_reason int(11) DEFAULT NULL,
|
||||
node1_public_key varchar(66) NOT NULL,
|
||||
node1_base_fee_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node1_cltv_delta int(11) DEFAULT NULL,
|
||||
node1_fee_rate bigint(11) DEFAULT NULL,
|
||||
node1_is_disabled tinyint(1) DEFAULT NULL,
|
||||
node1_max_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node1_min_htlc_mtokens bigint(20) DEFAULT NULL,
|
||||
node1_updated_at datetime DEFAULT NULL,
|
||||
node2_public_key varchar(66) NOT NULL,
|
||||
node2_base_fee_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node2_cltv_delta int(11) DEFAULT NULL,
|
||||
node2_fee_rate bigint(11) DEFAULT NULL,
|
||||
node2_is_disabled tinyint(1) DEFAULT NULL,
|
||||
node2_max_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node2_min_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node2_updated_at datetime DEFAULT NULL,
|
||||
PRIMARY KEY (id),
|
||||
KEY node1_public_key (node1_public_key),
|
||||
KEY node2_public_key (node2_public_key),
|
||||
KEY status (status),
|
||||
KEY short_id (short_id),
|
||||
KEY transaction_id (transaction_id),
|
||||
KEY closing_transaction_id (closing_transaction_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateNodesStatsQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS node_stats (
|
||||
id int(11) unsigned NOT NULL AUTO_INCREMENT,
|
||||
public_key varchar(66) NOT NULL DEFAULT '',
|
||||
added date NOT NULL,
|
||||
capacity bigint(20) unsigned NOT NULL DEFAULT 0,
|
||||
channels int(11) unsigned NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (id),
|
||||
UNIQUE KEY added (added,public_key),
|
||||
KEY public_key (public_key)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
@ -585,6 +726,25 @@ class DatabaseMigration {
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateGeoNamesTableQuery(): string {
|
||||
return `CREATE TABLE geo_names (
|
||||
id int(11) unsigned NOT NULL,
|
||||
type enum('city','country','division','continent') NOT NULL,
|
||||
names text DEFAULT NULL,
|
||||
UNIQUE KEY id (id,type),
|
||||
KEY id_2 (id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`
|
||||
}
|
||||
|
||||
private getCreateBlocksPricesTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS blocks_prices (
|
||||
height int(10) unsigned NOT NULL,
|
||||
price_id int(10) unsigned NOT NULL,
|
||||
PRIMARY KEY (height),
|
||||
INDEX (price_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
public async $truncateIndexedData(tables: string[]) {
|
||||
const allowedTables = ['blocks', 'hashrates', 'prices'];
|
||||
|
||||
|
307
backend/src/api/explorer/channels.api.ts
Normal file
307
backend/src/api/explorer/channels.api.ts
Normal file
@ -0,0 +1,307 @@
|
||||
import logger from '../../logger';
|
||||
import DB from '../../database';
|
||||
import nodesApi from './nodes.api';
|
||||
|
||||
class ChannelsApi {
|
||||
public async $getAllChannels(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getAllChannels error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getAllChannelsGeo(publicKey?: string): Promise<any[]> {
|
||||
try {
|
||||
const params: string[] = [];
|
||||
let query = `SELECT nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias,
|
||||
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
|
||||
nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias,
|
||||
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude,
|
||||
channels.capacity
|
||||
FROM channels
|
||||
JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key
|
||||
JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key
|
||||
WHERE nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL
|
||||
AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL
|
||||
`;
|
||||
|
||||
if (publicKey !== undefined) {
|
||||
query += ' AND (nodes_1.public_key = ? OR nodes_2.public_key = ?)';
|
||||
params.push(publicKey);
|
||||
params.push(publicKey);
|
||||
}
|
||||
|
||||
const [rows]: any = await DB.query(query, params);
|
||||
return rows.map((row) => [
|
||||
row.node1_public_key, row.node1_alias, row.node1_longitude, row.node1_latitude,
|
||||
row.node2_public_key, row.node2_alias, row.node2_longitude, row.node2_latitude,
|
||||
row.capacity]);
|
||||
} catch (e) {
|
||||
logger.err('$getAllChannelsGeo error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $searchChannelsById(search: string): Promise<any[]> {
|
||||
try {
|
||||
const searchStripped = search.replace('%', '') + '%';
|
||||
const query = `SELECT id, short_id, capacity FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$searchChannelsById error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsByStatus(status: number): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE status = ?`;
|
||||
const [rows]: any = await DB.query(query, [status]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsByStatus error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getClosedChannelsWithoutReason(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason IS NULL AND closing_transaction_id != ''`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getClosedChannelsWithoutReason error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsWithoutCreatedDate(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE created IS NULL`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsWithoutCreatedDate error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannel(id: string): Promise<any> {
|
||||
try {
|
||||
const query = `SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.*, ns1.channels AS channels_left, ns1.capacity AS capacity_left, ns2.channels AS channels_right, ns2.capacity AS capacity_right FROM channels LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key LEFT JOIN node_stats AS ns1 ON ns1.public_key = channels.node1_public_key LEFT JOIN node_stats AS ns2 ON ns2.public_key = channels.node2_public_key WHERE (ns1.id = (SELECT MAX(id) FROM node_stats WHERE public_key = channels.node1_public_key) AND ns2.id = (SELECT MAX(id) FROM node_stats WHERE public_key = channels.node2_public_key)) AND channels.id = ?`;
|
||||
const [rows]: any = await DB.query(query, [id]);
|
||||
if (rows[0]) {
|
||||
return this.convertChannel(rows[0]);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$getChannel error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsStats(): Promise<any> {
|
||||
try {
|
||||
// Feedback from zerofeerouting:
|
||||
// "I would argue > 5000ppm can be ignored. Channels charging more than .5% fee are ignored by CLN for example."
|
||||
const ignoredFeeRateThreshold = 5000;
|
||||
const ignoredBaseFeeThreshold = 5000;
|
||||
|
||||
// Capacity
|
||||
let query = `SELECT AVG(capacity) AS avgCapacity FROM channels WHERE status = 1 ORDER BY capacity`;
|
||||
const [avgCapacity]: any = await DB.query(query);
|
||||
|
||||
query = `SELECT capacity FROM channels WHERE status = 1 ORDER BY capacity`;
|
||||
let [capacity]: any = await DB.query(query);
|
||||
capacity = capacity.map(capacity => capacity.capacity);
|
||||
const medianCapacity = capacity[Math.floor(capacity.length / 2)];
|
||||
|
||||
// Fee rates
|
||||
query = `SELECT node1_fee_rate FROM channels WHERE node1_fee_rate < ${ignoredFeeRateThreshold} AND status = 1`;
|
||||
let [feeRates1]: any = await DB.query(query);
|
||||
feeRates1 = feeRates1.map(rate => rate.node1_fee_rate);
|
||||
query = `SELECT node2_fee_rate FROM channels WHERE node2_fee_rate < ${ignoredFeeRateThreshold} AND status = 1`;
|
||||
let [feeRates2]: any = await DB.query(query);
|
||||
feeRates2 = feeRates2.map(rate => rate.node2_fee_rate);
|
||||
|
||||
let feeRates = (feeRates1.concat(feeRates2)).sort((a, b) => a - b);
|
||||
let avgFeeRate = 0;
|
||||
for (const rate of feeRates) {
|
||||
avgFeeRate += rate;
|
||||
}
|
||||
avgFeeRate /= feeRates.length;
|
||||
const medianFeeRate = feeRates[Math.floor(feeRates.length / 2)];
|
||||
|
||||
// Base fees
|
||||
query = `SELECT node1_base_fee_mtokens FROM channels WHERE node1_base_fee_mtokens < ${ignoredBaseFeeThreshold} AND status = 1`;
|
||||
let [baseFees1]: any = await DB.query(query);
|
||||
baseFees1 = baseFees1.map(rate => rate.node1_base_fee_mtokens);
|
||||
query = `SELECT node2_base_fee_mtokens FROM channels WHERE node2_base_fee_mtokens < ${ignoredBaseFeeThreshold} AND status = 1`;
|
||||
let [baseFees2]: any = await DB.query(query);
|
||||
baseFees2 = baseFees2.map(rate => rate.node2_base_fee_mtokens);
|
||||
|
||||
let baseFees = (baseFees1.concat(baseFees2)).sort((a, b) => a - b);
|
||||
let avgBaseFee = 0;
|
||||
for (const fee of baseFees) {
|
||||
avgBaseFee += fee;
|
||||
}
|
||||
avgBaseFee /= baseFees.length;
|
||||
const medianBaseFee = feeRates[Math.floor(baseFees.length / 2)];
|
||||
|
||||
return {
|
||||
avgCapacity: parseInt(avgCapacity[0].avgCapacity, 10),
|
||||
avgFeeRate: avgFeeRate,
|
||||
avgBaseFee: avgBaseFee,
|
||||
medianCapacity: medianCapacity,
|
||||
medianFeeRate: medianFeeRate,
|
||||
medianBaseFee: medianBaseFee,
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.err(`Cannot calculate channels statistics. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsByTransactionId(transactionIds: string[]): Promise<any[]> {
|
||||
try {
|
||||
transactionIds = transactionIds.map((id) => '\'' + id + '\'');
|
||||
const query = `SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.* FROM channels LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key WHERE channels.transaction_id IN (${transactionIds.join(', ')}) OR channels.closing_transaction_id IN (${transactionIds.join(', ')})`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
const channels = rows.map((row) => this.convertChannel(row));
|
||||
return channels;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelByTransactionId error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsForNode(public_key: string, index: number, length: number, status: string): Promise<any[]> {
|
||||
try {
|
||||
let channelStatusFilter;
|
||||
if (status === 'open') {
|
||||
channelStatusFilter = '< 2';
|
||||
} else if (status === 'closed') {
|
||||
channelStatusFilter = '= 2';
|
||||
}
|
||||
|
||||
// Channels originating from node
|
||||
let query = `
|
||||
SELECT node2.alias, node2.public_key, channels.status, channels.node1_fee_rate,
|
||||
channels.capacity, channels.short_id, channels.id
|
||||
FROM channels
|
||||
JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key
|
||||
WHERE node1_public_key = ? AND channels.status ${channelStatusFilter}
|
||||
`;
|
||||
const [channelsFromNode]: any = await DB.query(query, [public_key, index, length]);
|
||||
|
||||
// Channels incoming to node
|
||||
query = `
|
||||
SELECT node1.alias, node1.public_key, channels.status, channels.node2_fee_rate,
|
||||
channels.capacity, channels.short_id, channels.id
|
||||
FROM channels
|
||||
JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key
|
||||
WHERE node2_public_key = ? AND channels.status ${channelStatusFilter}
|
||||
`;
|
||||
const [channelsToNode]: any = await DB.query(query, [public_key, index, length]);
|
||||
|
||||
let allChannels = channelsFromNode.concat(channelsToNode);
|
||||
allChannels.sort((a, b) => {
|
||||
return b.capacity - a.capacity;
|
||||
});
|
||||
allChannels = allChannels.slice(index, index + length);
|
||||
|
||||
const channels: any[] = []
|
||||
for (const row of allChannels) {
|
||||
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
|
||||
channels.push({
|
||||
status: row.status,
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
channels: activeChannelsStats.active_channel_count ?? 0,
|
||||
capacity: activeChannelsStats.capacity ?? 0,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return channels;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsCountForNode(public_key: string, status: string): Promise<any> {
|
||||
try {
|
||||
// Default active and inactive channels
|
||||
let statusQuery = '< 2';
|
||||
// Closed channels only
|
||||
if (status === 'closed') {
|
||||
statusQuery = '= 2';
|
||||
}
|
||||
const query = `
|
||||
SELECT COUNT(*) AS count
|
||||
FROM channels
|
||||
WHERE (node1_public_key = ? OR node2_public_key = ?)
|
||||
AND status ${statusQuery}
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [public_key, public_key]);
|
||||
return rows[0]['count'];
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private convertChannel(channel: any): any {
|
||||
return {
|
||||
'id': channel.id,
|
||||
'short_id': channel.short_id,
|
||||
'capacity': channel.capacity,
|
||||
'transaction_id': channel.transaction_id,
|
||||
'transaction_vout': channel.transaction_vout,
|
||||
'closing_transaction_id': channel.closing_transaction_id,
|
||||
'closing_reason': channel.closing_reason,
|
||||
'updated_at': channel.updated_at,
|
||||
'created': channel.created,
|
||||
'status': channel.status,
|
||||
'node_left': {
|
||||
'alias': channel.alias_left,
|
||||
'public_key': channel.node1_public_key,
|
||||
'channels': channel.channels_left,
|
||||
'capacity': channel.capacity_left,
|
||||
'base_fee_mtokens': channel.node1_base_fee_mtokens,
|
||||
'cltv_delta': channel.node1_cltv_delta,
|
||||
'fee_rate': channel.node1_fee_rate,
|
||||
'is_disabled': channel.node1_is_disabled,
|
||||
'max_htlc_mtokens': channel.node1_max_htlc_mtokens,
|
||||
'min_htlc_mtokens': channel.node1_min_htlc_mtokens,
|
||||
'updated_at': channel.node1_updated_at,
|
||||
},
|
||||
'node_right': {
|
||||
'alias': channel.alias_right,
|
||||
'public_key': channel.node2_public_key,
|
||||
'channels': channel.channels_right,
|
||||
'capacity': channel.capacity_right,
|
||||
'base_fee_mtokens': channel.node2_base_fee_mtokens,
|
||||
'cltv_delta': channel.node2_cltv_delta,
|
||||
'fee_rate': channel.node2_fee_rate,
|
||||
'is_disabled': channel.node2_is_disabled,
|
||||
'max_htlc_mtokens': channel.node2_max_htlc_mtokens,
|
||||
'min_htlc_mtokens': channel.node2_min_htlc_mtokens,
|
||||
'updated_at': channel.node2_updated_at,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new ChannelsApi();
|
111
backend/src/api/explorer/channels.routes.ts
Normal file
111
backend/src/api/explorer/channels.routes.ts
Normal file
@ -0,0 +1,111 @@
|
||||
import config from '../../config';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import channelsApi from './channels.api';
|
||||
|
||||
class ChannelsRoutes {
|
||||
constructor() { }
|
||||
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/txids', this.$getChannelsByTransactionIds)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/search/:search', this.$searchChannelsById)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/:short_id', this.$getChannel)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels', this.$getChannelsForNode)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo', this.$getAllChannelsGeo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo/:publicKey', this.$getAllChannelsGeo)
|
||||
;
|
||||
}
|
||||
|
||||
private async $searchChannelsById(req: Request, res: Response) {
|
||||
try {
|
||||
const channels = await channelsApi.$searchChannelsById(req.params.search);
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getChannel(req: Request, res: Response) {
|
||||
try {
|
||||
const channel = await channelsApi.$getChannel(req.params.short_id);
|
||||
if (!channel) {
|
||||
res.status(404).send('Channel not found');
|
||||
return;
|
||||
}
|
||||
res.json(channel);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getChannelsForNode(req: Request, res: Response) {
|
||||
try {
|
||||
if (typeof req.query.public_key !== 'string') {
|
||||
res.status(400).send('Missing parameter: public_key');
|
||||
return;
|
||||
}
|
||||
const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0;
|
||||
const status: string = typeof req.query.status === 'string' ? req.query.status : '';
|
||||
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status);
|
||||
const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.header('X-Total-Count', channelsCount.toString());
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getChannelsByTransactionIds(req: Request, res: Response) {
|
||||
try {
|
||||
if (!Array.isArray(req.query.txId)) {
|
||||
res.status(400).send('Not an array');
|
||||
return;
|
||||
}
|
||||
const txIds: string[] = [];
|
||||
for (const _txId in req.query.txId) {
|
||||
if (typeof req.query.txId[_txId] === 'string') {
|
||||
txIds.push(req.query.txId[_txId].toString());
|
||||
}
|
||||
}
|
||||
const channels = await channelsApi.$getChannelsByTransactionId(txIds);
|
||||
const inputs: any[] = [];
|
||||
const outputs: any[] = [];
|
||||
for (const txid of txIds) {
|
||||
const foundChannelInputs = channels.find((channel) => channel.closing_transaction_id === txid);
|
||||
if (foundChannelInputs) {
|
||||
inputs.push(foundChannelInputs);
|
||||
} else {
|
||||
inputs.push(null);
|
||||
}
|
||||
const foundChannelOutputs = channels.find((channel) => channel.transaction_id === txid);
|
||||
if (foundChannelOutputs) {
|
||||
outputs.push(foundChannelOutputs);
|
||||
} else {
|
||||
outputs.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
inputs: inputs,
|
||||
outputs: outputs,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getAllChannelsGeo(req: Request, res: Response) {
|
||||
try {
|
||||
const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey);
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new ChannelsRoutes();
|
58
backend/src/api/explorer/general.routes.ts
Normal file
58
backend/src/api/explorer/general.routes.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import config from '../../config';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import nodesApi from './nodes.api';
|
||||
import channelsApi from './channels.api';
|
||||
import statisticsApi from './statistics.api';
|
||||
class GeneralLightningRoutes {
|
||||
constructor() { }
|
||||
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/search', this.$searchNodesAndChannels)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/statistics/latest', this.$getGeneralStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/statistics/:interval', this.$getStatistics)
|
||||
;
|
||||
}
|
||||
|
||||
private async $searchNodesAndChannels(req: Request, res: Response) {
|
||||
if (typeof req.query.searchText !== 'string') {
|
||||
res.status(400).send('Missing parameter: searchText');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const nodes = await nodesApi.$searchNodeByPublicKeyOrAlias(req.query.searchText);
|
||||
const channels = await channelsApi.$searchChannelsById(req.query.searchText);
|
||||
res.json({
|
||||
nodes: nodes,
|
||||
channels: channels,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getStatistics(req: Request, res: Response) {
|
||||
try {
|
||||
const statistics = await statisticsApi.$getStatistics(req.params.interval);
|
||||
const statisticsCount = await statisticsApi.$getStatisticsCount();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', statisticsCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(statistics);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getGeneralStats(req: Request, res: Response) {
|
||||
try {
|
||||
const statistics = await statisticsApi.$getLatestStatistics();
|
||||
res.json(statistics);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new GeneralLightningRoutes();
|
326
backend/src/api/explorer/nodes.api.ts
Normal file
326
backend/src/api/explorer/nodes.api.ts
Normal file
@ -0,0 +1,326 @@
|
||||
import logger from '../../logger';
|
||||
import DB from '../../database';
|
||||
|
||||
class NodesApi {
|
||||
public async $getNode(public_key: string): Promise<any> {
|
||||
try {
|
||||
// General info
|
||||
let query = `
|
||||
SELECT public_key, alias, UNIX_TIMESTAMP(first_seen) AS first_seen,
|
||||
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
|
||||
as_number, city_id, country_id, subdivision_id, longitude, latitude,
|
||||
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
|
||||
geo_names_country.names as country, geo_names_subdivision.names as subdivision
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
|
||||
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = subdivision_id
|
||||
LEFT JOIN geo_names geo_names_country on geo_names_country.id = country_id
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
let [rows]: any[] = await DB.query(query, [public_key]);
|
||||
if (rows.length === 0) {
|
||||
throw new Error(`This node does not exist, or our node is not seeing it yet`);
|
||||
}
|
||||
|
||||
const node = rows[0];
|
||||
node.as_organization = JSON.parse(node.as_organization);
|
||||
node.subdivision = JSON.parse(node.subdivision);
|
||||
node.city = JSON.parse(node.city);
|
||||
node.country = JSON.parse(node.country);
|
||||
|
||||
// Active channels and capacity
|
||||
const activeChannelsStats: any = await this.$getActiveChannelsStats(public_key);
|
||||
node.active_channel_count = activeChannelsStats.active_channel_count ?? 0;
|
||||
node.capacity = activeChannelsStats.capacity ?? 0;
|
||||
|
||||
// Opened channels count
|
||||
query = `
|
||||
SELECT count(short_id) as opened_channel_count
|
||||
FROM channels
|
||||
WHERE status != 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
[rows] = await DB.query(query, [public_key, public_key]);
|
||||
node.opened_channel_count = 0;
|
||||
if (rows.length > 0) {
|
||||
node.opened_channel_count = rows[0].opened_channel_count;
|
||||
}
|
||||
|
||||
// Closed channels count
|
||||
query = `
|
||||
SELECT count(short_id) as closed_channel_count
|
||||
FROM channels
|
||||
WHERE status = 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
[rows] = await DB.query(query, [public_key, public_key]);
|
||||
node.closed_channel_count = 0;
|
||||
if (rows.length > 0) {
|
||||
node.closed_channel_count = rows[0].closed_channel_count;
|
||||
}
|
||||
|
||||
return node;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getActiveChannelsStats(node_public_key: string): Promise<unknown> {
|
||||
const query = `
|
||||
SELECT count(short_id) as active_channel_count, sum(capacity) as capacity
|
||||
FROM channels
|
||||
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
const [rows]: any[] = await DB.query(query, [node_public_key, node_public_key]);
|
||||
if (rows.length > 0) {
|
||||
return {
|
||||
active_channel_count: rows[0].active_channel_count,
|
||||
capacity: rows[0].capacity
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getAllNodes(): Promise<any> {
|
||||
try {
|
||||
const query = `SELECT * FROM nodes`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getAllNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodeStats(public_key: string): Promise<any> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT UNIX_TIMESTAMP(added) AS added, capacity, channels
|
||||
FROM node_stats
|
||||
WHERE public_key = ?
|
||||
ORDER BY added DESC
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [public_key]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getNodeStats error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getTopCapacityNodes(): Promise<any> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
const query = `
|
||||
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.capacity, node_stats.channels
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 10;
|
||||
`;
|
||||
[rows] = await DB.query(query);
|
||||
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getTopCapacityNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getTopChannelsNodes(): Promise<any> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
const query = `
|
||||
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.capacity, node_stats.channels
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY channels DESC
|
||||
LIMIT 10;
|
||||
`;
|
||||
[rows] = await DB.query(query);
|
||||
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $searchNodeByPublicKeyOrAlias(search: string) {
|
||||
try {
|
||||
const searchStripped = search.replace('%', '') + '%';
|
||||
const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR nodes.alias LIKE ? GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$searchNodeByPublicKeyOrAlias error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesISP(groupBy: string, showTor: boolean) {
|
||||
try {
|
||||
const orderBy = groupBy === 'capacity' ? `CAST(SUM(capacity) as INT)` : `COUNT(DISTINCT nodes.public_key)`;
|
||||
|
||||
// Clearnet
|
||||
let query = `SELECT GROUP_CONCAT(DISTINCT(nodes.as_number)) as ispId, geo_names.names as names,
|
||||
COUNT(DISTINCT nodes.public_key) as nodesCount, CAST(SUM(capacity) as INT) as capacity
|
||||
FROM nodes
|
||||
JOIN geo_names ON geo_names.id = nodes.as_number
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
GROUP BY geo_names.names
|
||||
ORDER BY ${orderBy} DESC
|
||||
`;
|
||||
const [nodesCountPerAS]: any = await DB.query(query);
|
||||
|
||||
let total = 0;
|
||||
const nodesPerAs: any[] = [];
|
||||
|
||||
for (const asGroup of nodesCountPerAS) {
|
||||
if (groupBy === 'capacity') {
|
||||
total += asGroup.capacity;
|
||||
} else {
|
||||
total += asGroup.nodesCount;
|
||||
}
|
||||
}
|
||||
|
||||
// Tor
|
||||
if (showTor) {
|
||||
query = `SELECT COUNT(DISTINCT nodes.public_key) as nodesCount, CAST(SUM(capacity) as INT) as capacity
|
||||
FROM nodes
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
ORDER BY ${orderBy} DESC
|
||||
`;
|
||||
const [nodesCountTor]: any = await DB.query(query);
|
||||
|
||||
total += groupBy === 'capacity' ? nodesCountTor[0].capacity : nodesCountTor[0].nodesCount;
|
||||
nodesPerAs.push({
|
||||
ispId: null,
|
||||
name: 'Tor',
|
||||
count: nodesCountTor[0].nodesCount,
|
||||
share: Math.floor((groupBy === 'capacity' ? nodesCountTor[0].capacity : nodesCountTor[0].nodesCount) / total * 10000) / 100,
|
||||
capacity: nodesCountTor[0].capacity,
|
||||
});
|
||||
}
|
||||
|
||||
for (const as of nodesCountPerAS) {
|
||||
nodesPerAs.push({
|
||||
ispId: as.ispId,
|
||||
name: JSON.parse(as.names),
|
||||
count: as.nodesCount,
|
||||
share: Math.floor((groupBy === 'capacity' ? as.capacity : as.nodesCount) / total * 10000) / 100,
|
||||
capacity: as.capacity,
|
||||
});
|
||||
}
|
||||
|
||||
return nodesPerAs;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes grouped by AS. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesPerCountry(countryId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE geo_names_country.id = ?
|
||||
ORDER BY capacity DESC
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [countryId]);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
}
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes for country id ${countryId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesPerISP(ISPId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE nodes.as_number IN (?)
|
||||
ORDER BY capacity DESC
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [ISPId.split(',')]);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
}
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesCountries() {
|
||||
try {
|
||||
let query = `SELECT geo_names.names as names, geo_names_iso.names as iso_code, COUNT(DISTINCT nodes.public_key) as nodesCount, SUM(capacity) as capacity
|
||||
FROM nodes
|
||||
JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country'
|
||||
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
GROUP BY country_id
|
||||
ORDER BY COUNT(DISTINCT nodes.public_key) DESC
|
||||
`;
|
||||
const [nodesCountPerCountry]: any = await DB.query(query);
|
||||
|
||||
query = `SELECT COUNT(*) as total FROM nodes WHERE country_id IS NOT NULL`;
|
||||
const [nodesWithAS]: any = await DB.query(query);
|
||||
|
||||
const nodesPerCountry: any[] = [];
|
||||
for (const country of nodesCountPerCountry) {
|
||||
nodesPerCountry.push({
|
||||
name: JSON.parse(country.names),
|
||||
iso: country.iso_code,
|
||||
count: country.nodesCount,
|
||||
share: Math.floor(country.nodesCount / nodesWithAS[0].total * 10000) / 100,
|
||||
capacity: country.capacity,
|
||||
})
|
||||
}
|
||||
|
||||
return nodesPerCountry;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes grouped by AS. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodesApi();
|
161
backend/src/api/explorer/nodes.routes.ts
Normal file
161
backend/src/api/explorer/nodes.routes.ts
Normal file
@ -0,0 +1,161 @@
|
||||
import config from '../../config';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import nodesApi from './nodes.api';
|
||||
import DB from '../../database';
|
||||
|
||||
class NodesRoutes {
|
||||
constructor() { }
|
||||
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/country/:country', this.$getNodesPerCountry)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/search/:search', this.$searchNode)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/top', this.$getTopNodes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp-ranking', this.$getISPRanking)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp/:isp', this.$getNodesPerISP)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/countries', this.$getNodesCountries)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode)
|
||||
;
|
||||
}
|
||||
|
||||
private async $searchNode(req: Request, res: Response) {
|
||||
try {
|
||||
const nodes = await nodesApi.$searchNodeByPublicKeyOrAlias(req.params.search);
|
||||
res.json(nodes);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNode(req: Request, res: Response) {
|
||||
try {
|
||||
const node = await nodesApi.$getNode(req.params.public_key);
|
||||
if (!node) {
|
||||
res.status(404).send('Node not found');
|
||||
return;
|
||||
}
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(node);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalNodeStats(req: Request, res: Response) {
|
||||
try {
|
||||
const statistics = await nodesApi.$getNodeStats(req.params.public_key);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(statistics);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getTopNodes(req: Request, res: Response) {
|
||||
try {
|
||||
const topCapacityNodes = await nodesApi.$getTopCapacityNodes();
|
||||
const topChannelsNodes = await nodesApi.$getTopChannelsNodes();
|
||||
res.json({
|
||||
topByCapacity: topCapacityNodes,
|
||||
topByChannels: topChannelsNodes,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getISPRanking(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const groupBy = req.query.groupBy as string;
|
||||
const showTor = req.query.showTor as string === 'true' ? true : false;
|
||||
|
||||
if (!['capacity', 'node-count'].includes(groupBy)) {
|
||||
res.status(400).send(`groupBy must be one of 'capacity' or 'node-count'`);
|
||||
return;
|
||||
}
|
||||
|
||||
const nodesPerAs = await nodesApi.$getNodesISP(groupBy, showTor);
|
||||
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
|
||||
res.json(nodesPerAs);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodesPerCountry(req: Request, res: Response) {
|
||||
try {
|
||||
const [country]: any[] = await DB.query(
|
||||
`SELECT geo_names.id, geo_names_country.names as country_names
|
||||
FROM geo_names
|
||||
JOIN geo_names geo_names_country on geo_names.id = geo_names_country.id AND geo_names_country.type = 'country'
|
||||
WHERE geo_names.type = 'country_iso_code' AND geo_names.names = ?`,
|
||||
[req.params.country]
|
||||
);
|
||||
|
||||
if (country.length === 0) {
|
||||
res.status(404).send(`This country does not exist or does not host any lightning nodes on clearnet`);
|
||||
return;
|
||||
}
|
||||
|
||||
const nodes = await nodesApi.$getNodesPerCountry(country[0].id);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json({
|
||||
country: JSON.parse(country[0].country_names),
|
||||
nodes: nodes,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodesPerISP(req: Request, res: Response) {
|
||||
try {
|
||||
const [isp]: any[] = await DB.query(
|
||||
`SELECT geo_names.names as isp_name
|
||||
FROM geo_names
|
||||
WHERE geo_names.type = 'as_organization' AND geo_names.id = ?`,
|
||||
[req.params.isp]
|
||||
);
|
||||
|
||||
if (isp.length === 0) {
|
||||
res.status(404).send(`This ISP does not exist or does not host any lightning nodes on clearnet`);
|
||||
return;
|
||||
}
|
||||
|
||||
const nodes = await nodesApi.$getNodesPerISP(req.params.isp);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json({
|
||||
isp: JSON.parse(isp[0].isp_name),
|
||||
nodes: nodes,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodesCountries(req: Request, res: Response) {
|
||||
try {
|
||||
const nodesPerAs = await nodesApi.$getNodesCountries();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
|
||||
res.json(nodesPerAs);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodesRoutes();
|
52
backend/src/api/explorer/statistics.api.ts
Normal file
52
backend/src/api/explorer/statistics.api.ts
Normal file
@ -0,0 +1,52 @@
|
||||
import logger from '../../logger';
|
||||
import DB from '../../database';
|
||||
import { Common } from '../common';
|
||||
|
||||
class StatisticsApi {
|
||||
public async $getStatistics(interval: string | null = null): Promise<any> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT UNIX_TIMESTAMP(added) AS added, channel_count, total_capacity, tor_nodes, clearnet_nodes, unannounced_nodes
|
||||
FROM lightning_stats`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
query += ` ORDER BY added DESC`;
|
||||
|
||||
try {
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getStatistics error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getLatestStatistics(): Promise<any> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1`);
|
||||
const [rows2]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1 OFFSET 7`);
|
||||
return {
|
||||
latest: rows[0],
|
||||
previous: rows2[0],
|
||||
};
|
||||
} catch (e) {
|
||||
logger.err('$getLatestStatistics error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getStatisticsCount(): Promise<number> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT count(*) as count FROM lightning_stats`);
|
||||
return rows[0].count;
|
||||
} catch (e) {
|
||||
logger.err('$getLatestStatistics error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new StatisticsApi();
|
272
backend/src/api/lightning/clightning/clightning-client.ts
Normal file
272
backend/src/api/lightning/clightning/clightning-client.ts
Normal file
@ -0,0 +1,272 @@
|
||||
// Imported from https://github.com/shesek/lightning-client-js
|
||||
|
||||
'use strict';
|
||||
|
||||
const methods = [
|
||||
'addgossip',
|
||||
'autocleaninvoice',
|
||||
'check',
|
||||
'checkmessage',
|
||||
'close',
|
||||
'connect',
|
||||
'createinvoice',
|
||||
'createinvoicerequest',
|
||||
'createoffer',
|
||||
'createonion',
|
||||
'decode',
|
||||
'decodepay',
|
||||
'delexpiredinvoice',
|
||||
'delinvoice',
|
||||
'delpay',
|
||||
'dev-listaddrs',
|
||||
'dev-rescan-outputs',
|
||||
'disableoffer',
|
||||
'disconnect',
|
||||
'estimatefees',
|
||||
'feerates',
|
||||
'fetchinvoice',
|
||||
'fundchannel',
|
||||
'fundchannel_cancel',
|
||||
'fundchannel_complete',
|
||||
'fundchannel_start',
|
||||
'fundpsbt',
|
||||
'getchaininfo',
|
||||
'getinfo',
|
||||
'getlog',
|
||||
'getrawblockbyheight',
|
||||
'getroute',
|
||||
'getsharedsecret',
|
||||
'getutxout',
|
||||
'help',
|
||||
'invoice',
|
||||
'keysend',
|
||||
'legacypay',
|
||||
'listchannels',
|
||||
'listconfigs',
|
||||
'listforwards',
|
||||
'listfunds',
|
||||
'listinvoices',
|
||||
'listnodes',
|
||||
'listoffers',
|
||||
'listpays',
|
||||
'listpeers',
|
||||
'listsendpays',
|
||||
'listtransactions',
|
||||
'multifundchannel',
|
||||
'multiwithdraw',
|
||||
'newaddr',
|
||||
'notifications',
|
||||
'offer',
|
||||
'offerout',
|
||||
'openchannel_abort',
|
||||
'openchannel_bump',
|
||||
'openchannel_init',
|
||||
'openchannel_signed',
|
||||
'openchannel_update',
|
||||
'pay',
|
||||
'payersign',
|
||||
'paystatus',
|
||||
'ping',
|
||||
'plugin',
|
||||
'reserveinputs',
|
||||
'sendinvoice',
|
||||
'sendonion',
|
||||
'sendonionmessage',
|
||||
'sendpay',
|
||||
'sendpsbt',
|
||||
'sendrawtransaction',
|
||||
'setchannelfee',
|
||||
'signmessage',
|
||||
'signpsbt',
|
||||
'stop',
|
||||
'txdiscard',
|
||||
'txprepare',
|
||||
'txsend',
|
||||
'unreserveinputs',
|
||||
'utxopsbt',
|
||||
'waitanyinvoice',
|
||||
'waitblockheight',
|
||||
'waitinvoice',
|
||||
'waitsendpay',
|
||||
'withdraw'
|
||||
];
|
||||
|
||||
|
||||
import EventEmitter from 'events';
|
||||
import { existsSync, statSync } from 'fs';
|
||||
import { createConnection, Socket } from 'net';
|
||||
import { homedir } from 'os';
|
||||
import path from 'path';
|
||||
import { createInterface, Interface } from 'readline';
|
||||
import logger from '../../../logger';
|
||||
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import { convertAndmergeBidirectionalChannels, convertNode } from './clightning-convert';
|
||||
|
||||
class LightningError extends Error {
|
||||
type: string = 'lightning';
|
||||
message: string = 'lightning-client error';
|
||||
|
||||
constructor(error) {
|
||||
super();
|
||||
this.type = error.type;
|
||||
this.message = error.message;
|
||||
}
|
||||
}
|
||||
|
||||
const defaultRpcPath = path.join(homedir(), '.lightning')
|
||||
, fStat = (...p) => statSync(path.join(...p))
|
||||
, fExists = (...p) => existsSync(path.join(...p))
|
||||
|
||||
export default class CLightningClient extends EventEmitter implements AbstractLightningApi {
|
||||
private rpcPath: string;
|
||||
private reconnectWait: number;
|
||||
private reconnectTimeout;
|
||||
private reqcount: number;
|
||||
private client: Socket;
|
||||
private rl: Interface;
|
||||
private clientConnectionPromise: Promise<unknown>;
|
||||
|
||||
constructor(rpcPath = defaultRpcPath) {
|
||||
if (!path.isAbsolute(rpcPath)) {
|
||||
throw new Error('The rpcPath must be an absolute path');
|
||||
}
|
||||
|
||||
if (!fExists(rpcPath) || !fStat(rpcPath).isSocket()) {
|
||||
// network directory provided, use the lightning-rpc within in
|
||||
if (fExists(rpcPath, 'lightning-rpc')) {
|
||||
rpcPath = path.join(rpcPath, 'lightning-rpc');
|
||||
}
|
||||
|
||||
// main data directory provided, default to using the bitcoin mainnet subdirectory
|
||||
// to be removed in v0.2.0
|
||||
else if (fExists(rpcPath, 'bitcoin', 'lightning-rpc')) {
|
||||
logger.warn(`[CLightningClient] ${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`)
|
||||
logger.warn(`[CLightningClient] specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`)
|
||||
rpcPath = path.join(rpcPath, 'bitcoin', 'lightning-rpc')
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`[CLightningClient] Connecting to ${rpcPath}`);
|
||||
|
||||
super();
|
||||
this.rpcPath = rpcPath;
|
||||
this.reconnectWait = 0.5;
|
||||
this.reconnectTimeout = null;
|
||||
this.reqcount = 0;
|
||||
|
||||
const _self = this;
|
||||
|
||||
this.client = createConnection(rpcPath).on(
|
||||
'error', () => {
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
}
|
||||
);
|
||||
this.rl = createInterface({ input: this.client }).on(
|
||||
'error', () => {
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
}
|
||||
);
|
||||
|
||||
this.clientConnectionPromise = new Promise<void>(resolve => {
|
||||
_self.client.on('connect', () => {
|
||||
logger.info(`[CLightningClient] Lightning client connected`);
|
||||
_self.reconnectWait = 1;
|
||||
resolve();
|
||||
});
|
||||
|
||||
_self.client.on('end', () => {
|
||||
logger.err('[CLightningClient] Lightning client connection closed, reconnecting');
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
});
|
||||
|
||||
_self.client.on('error', error => {
|
||||
logger.err(`[CLightningClient] Lightning client connection error: ${error}`);
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
});
|
||||
});
|
||||
|
||||
this.rl.on('line', line => {
|
||||
line = line.trim();
|
||||
if (!line) {
|
||||
return;
|
||||
}
|
||||
const data = JSON.parse(line);
|
||||
// logger.debug(`[CLightningClient] #${data.id} <-- ${JSON.stringify(data.error || data.result)}`);
|
||||
_self.emit('res:' + data.id, data);
|
||||
});
|
||||
}
|
||||
|
||||
increaseWaitTime(): void {
|
||||
if (this.reconnectWait >= 16) {
|
||||
this.reconnectWait = 16;
|
||||
} else {
|
||||
this.reconnectWait *= 2;
|
||||
}
|
||||
}
|
||||
|
||||
reconnect(): void {
|
||||
const _self = this;
|
||||
|
||||
if (this.reconnectTimeout) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reconnectTimeout = setTimeout(() => {
|
||||
logger.debug('[CLightningClient] Trying to reconnect...');
|
||||
|
||||
_self.client.connect(_self.rpcPath);
|
||||
_self.reconnectTimeout = null;
|
||||
}, this.reconnectWait * 1000);
|
||||
}
|
||||
|
||||
call(method, args = []): Promise<any> {
|
||||
const _self = this;
|
||||
|
||||
const callInt = ++this.reqcount;
|
||||
const sendObj = {
|
||||
jsonrpc: '2.0',
|
||||
method,
|
||||
params: args,
|
||||
id: '' + callInt
|
||||
};
|
||||
|
||||
// logger.debug(`[CLightningClient] #${callInt} --> ${method} ${args}`);
|
||||
|
||||
// Wait for the client to connect
|
||||
return this.clientConnectionPromise
|
||||
.then(() => new Promise((resolve, reject) => {
|
||||
// Wait for a response
|
||||
this.once('res:' + callInt, res => res.error == null
|
||||
? resolve(res.result)
|
||||
: reject(new LightningError(res.error))
|
||||
);
|
||||
|
||||
// Send the command
|
||||
_self.client.write(JSON.stringify(sendObj));
|
||||
}));
|
||||
}
|
||||
|
||||
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
|
||||
const listnodes: any[] = await this.call('listnodes');
|
||||
const listchannels: any[] = await this.call('listchannels');
|
||||
const channelsList = await convertAndmergeBidirectionalChannels(listchannels['channels']);
|
||||
|
||||
return {
|
||||
nodes: listnodes['nodes'].map(node => convertNode(node)),
|
||||
edges: channelsList,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const protify = s => s.replace(/-([a-z])/g, m => m[1].toUpperCase());
|
||||
|
||||
methods.forEach(k => {
|
||||
CLightningClient.prototype[protify(k)] = function (...args: any) {
|
||||
return this.call(k, args);
|
||||
};
|
||||
});
|
138
backend/src/api/lightning/clightning/clightning-convert.ts
Normal file
138
backend/src/api/lightning/clightning/clightning-convert.ts
Normal file
@ -0,0 +1,138 @@
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||
import logger from '../../../logger';
|
||||
|
||||
/**
|
||||
* Convert a clightning "listnode" entry to a lnd node entry
|
||||
*/
|
||||
export function convertNode(clNode: any): ILightningApi.Node {
|
||||
return {
|
||||
alias: clNode.alias ?? '',
|
||||
color: `#${clNode.color ?? ''}`,
|
||||
features: [], // TODO parse and return clNode.feature
|
||||
pub_key: clNode.nodeid,
|
||||
addresses: clNode.addresses?.map((addr) => {
|
||||
return {
|
||||
network: addr.type,
|
||||
addr: `${addr.address}:${addr.port}`
|
||||
};
|
||||
}),
|
||||
last_update: clNode?.last_timestamp ?? 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
|
||||
*/
|
||||
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
|
||||
logger.info('Converting clightning nodes and channels to lnd graph format');
|
||||
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let channelProcessed = 0;
|
||||
|
||||
const consolidatedChannelList: ILightningApi.Channel[] = [];
|
||||
const clChannelsDict = {};
|
||||
const clChannelsDictCount = {};
|
||||
|
||||
for (const clChannel of clChannels) {
|
||||
if (!clChannelsDict[clChannel.short_channel_id]) {
|
||||
clChannelsDict[clChannel.short_channel_id] = clChannel;
|
||||
clChannelsDictCount[clChannel.short_channel_id] = 1;
|
||||
} else {
|
||||
consolidatedChannelList.push(
|
||||
await buildFullChannel(clChannel, clChannelsDict[clChannel.short_channel_id])
|
||||
);
|
||||
delete clChannelsDict[clChannel.short_channel_id];
|
||||
clChannelsDictCount[clChannel.short_channel_id]++;
|
||||
}
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
++channelProcessed;
|
||||
}
|
||||
|
||||
channelProcessed = 0;
|
||||
const keys = Object.keys(clChannelsDict);
|
||||
for (const short_channel_id of keys) {
|
||||
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
return consolidatedChannelList;
|
||||
}
|
||||
|
||||
export function convertChannelId(channelId): string {
|
||||
if (channelId.indexOf('/') !== -1) {
|
||||
channelId = channelId.slice(0, -2);
|
||||
}
|
||||
const s = channelId.split('x').map(part => BigInt(part));
|
||||
return ((s[0] << 40n) | (s[1] << 16n) | s[2]).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert two clightning "getchannels" entries into a full a lnd "describegraph.edges" format
|
||||
* In this case, clightning knows the channel policy for both nodes
|
||||
*/
|
||||
async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILightningApi.Channel> {
|
||||
const lastUpdate = Math.max(clChannelA.last_update ?? 0, clChannelB.last_update ?? 0);
|
||||
|
||||
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannelA.short_channel_id);
|
||||
const parts = clChannelA.short_channel_id.split('x');
|
||||
const outputIdx = parts[2];
|
||||
|
||||
return {
|
||||
channel_id: clChannelA.short_channel_id,
|
||||
capacity: clChannelA.satoshis,
|
||||
last_update: lastUpdate,
|
||||
node1_policy: convertPolicy(clChannelA),
|
||||
node2_policy: convertPolicy(clChannelB),
|
||||
chan_point: `${tx.txid}:${outputIdx}`,
|
||||
node1_pub: clChannelA.source,
|
||||
node2_pub: clChannelB.source,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert one clightning "getchannels" entry into a full a lnd "describegraph.edges" format
|
||||
* In this case, clightning knows the channel policy of only one node
|
||||
*/
|
||||
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel> {
|
||||
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannel.short_channel_id);
|
||||
const parts = clChannel.short_channel_id.split('x');
|
||||
const outputIdx = parts[2];
|
||||
|
||||
return {
|
||||
channel_id: clChannel.short_channel_id,
|
||||
capacity: clChannel.satoshis,
|
||||
last_update: clChannel.last_update ?? 0,
|
||||
node1_policy: convertPolicy(clChannel),
|
||||
node2_policy: null,
|
||||
chan_point: `${tx.txid}:${outputIdx}`,
|
||||
node1_pub: clChannel.source,
|
||||
node2_pub: clChannel.destination,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a clightning "listnode" response to a lnd channel policy format
|
||||
*/
|
||||
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
|
||||
return {
|
||||
time_lock_delta: 0, // TODO
|
||||
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
|
||||
max_htlc_msat: clChannel.htlc_maximum_msat.slice(0, -4),
|
||||
fee_base_msat: clChannel.base_fee_millisatoshi,
|
||||
fee_rate_milli_msat: clChannel.fee_per_millionth,
|
||||
disabled: !clChannel.active,
|
||||
last_update: clChannel.last_update ?? 0,
|
||||
};
|
||||
}
|
@ -0,0 +1,5 @@
|
||||
import { ILightningApi } from './lightning-api.interface';
|
||||
|
||||
export interface AbstractLightningApi {
|
||||
$getNetworkGraph(): Promise<ILightningApi.NetworkGraph>;
|
||||
}
|
16
backend/src/api/lightning/lightning-api-factory.ts
Normal file
16
backend/src/api/lightning/lightning-api-factory.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import config from '../../config';
|
||||
import CLightningClient from './clightning/clightning-client';
|
||||
import { AbstractLightningApi } from './lightning-api-abstract-factory';
|
||||
import LndApi from './lnd/lnd-api';
|
||||
|
||||
function lightningApiFactory(): AbstractLightningApi {
|
||||
switch (config.LIGHTNING.ENABLED === true && config.LIGHTNING.BACKEND) {
|
||||
case 'cln':
|
||||
return new CLightningClient(config.CLIGHTNING.SOCKET);
|
||||
case 'lnd':
|
||||
default:
|
||||
return new LndApi();
|
||||
}
|
||||
}
|
||||
|
||||
export default lightningApiFactory();
|
85
backend/src/api/lightning/lightning-api.interface.ts
Normal file
85
backend/src/api/lightning/lightning-api.interface.ts
Normal file
@ -0,0 +1,85 @@
|
||||
export namespace ILightningApi {
|
||||
export interface NetworkInfo {
|
||||
graph_diameter: number;
|
||||
avg_out_degree: number;
|
||||
max_out_degree: number;
|
||||
num_nodes: number;
|
||||
num_channels: number;
|
||||
total_network_capacity: string;
|
||||
avg_channel_size: number;
|
||||
min_channel_size: string;
|
||||
max_channel_size: string;
|
||||
median_channel_size_sat: string;
|
||||
num_zombie_chans: string;
|
||||
}
|
||||
|
||||
export interface NetworkGraph {
|
||||
nodes: Node[];
|
||||
edges: Channel[];
|
||||
}
|
||||
|
||||
export interface Channel {
|
||||
channel_id: string;
|
||||
chan_point: string;
|
||||
last_update: number;
|
||||
node1_pub: string;
|
||||
node2_pub: string;
|
||||
capacity: string;
|
||||
node1_policy: RoutingPolicy | null;
|
||||
node2_policy: RoutingPolicy | null;
|
||||
}
|
||||
|
||||
export interface RoutingPolicy {
|
||||
time_lock_delta: number;
|
||||
min_htlc: string;
|
||||
fee_base_msat: string;
|
||||
fee_rate_milli_msat: string;
|
||||
disabled: boolean;
|
||||
max_htlc_msat: string;
|
||||
last_update: number;
|
||||
}
|
||||
|
||||
export interface Node {
|
||||
last_update: number;
|
||||
pub_key: string;
|
||||
alias: string;
|
||||
addresses: {
|
||||
network: string;
|
||||
addr: string;
|
||||
}[];
|
||||
color: string;
|
||||
features: { [key: number]: Feature };
|
||||
}
|
||||
|
||||
export interface Info {
|
||||
identity_pubkey: string;
|
||||
alias: string;
|
||||
num_pending_channels: number;
|
||||
num_active_channels: number;
|
||||
num_peers: number;
|
||||
block_height: number;
|
||||
block_hash: string;
|
||||
synced_to_chain: boolean;
|
||||
testnet: boolean;
|
||||
uris: string[];
|
||||
best_header_timestamp: string;
|
||||
version: string;
|
||||
num_inactive_channels: number;
|
||||
chains: {
|
||||
chain: string;
|
||||
network: string;
|
||||
}[];
|
||||
color: string;
|
||||
synced_to_graph: boolean;
|
||||
features: { [key: number]: Feature };
|
||||
commit_hash: string;
|
||||
/** Available on LND since v0.15.0-beta */
|
||||
require_htlc_interceptor?: boolean;
|
||||
}
|
||||
|
||||
export interface Feature {
|
||||
name: string;
|
||||
is_required: boolean;
|
||||
is_known: boolean;
|
||||
}
|
||||
}
|
41
backend/src/api/lightning/lnd/lnd-api.ts
Normal file
41
backend/src/api/lightning/lnd/lnd-api.ts
Normal file
@ -0,0 +1,41 @@
|
||||
import axios, { AxiosRequestConfig } from 'axios';
|
||||
import { Agent } from 'https';
|
||||
import * as fs from 'fs';
|
||||
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import config from '../../../config';
|
||||
|
||||
class LndApi implements AbstractLightningApi {
|
||||
axiosConfig: AxiosRequestConfig = {};
|
||||
|
||||
constructor() {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.axiosConfig = {
|
||||
headers: {
|
||||
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex')
|
||||
},
|
||||
httpsAgent: new Agent({
|
||||
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
|
||||
}),
|
||||
timeout: 10000
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async $getNetworkInfo(): Promise<ILightningApi.NetworkInfo> {
|
||||
return axios.get<ILightningApi.NetworkInfo>(config.LND.REST_API_URL + '/v1/graph/info', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
async $getInfo(): Promise<ILightningApi.Info> {
|
||||
return axios.get<ILightningApi.Info>(config.LND.REST_API_URL + '/v1/getinfo', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
|
||||
return axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
}
|
||||
|
||||
export default LndApi;
|
73
backend/src/api/liquid/liquid.routes.ts
Normal file
73
backend/src/api/liquid/liquid.routes.ts
Normal file
@ -0,0 +1,73 @@
|
||||
import axios from 'axios';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from '../../config';
|
||||
import elementsParser from './elements-parser';
|
||||
import icons from './icons';
|
||||
|
||||
class LiquidRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/icons', this.getAllLiquidIcon)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/featured', this.$getAllFeaturedLiquidAssets)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'asset/:assetId/icon', this.getLiquidIcon)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/group/:id', this.$getAssetGroup)
|
||||
;
|
||||
|
||||
if (config.DATABASE.ENABLED) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'liquid/pegs/month', this.$getElementsPegsByMonth)
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private getLiquidIcon(req: Request, res: Response) {
|
||||
const result = icons.getIconByAssetId(req.params.assetId);
|
||||
if (result) {
|
||||
res.setHeader('content-type', 'image/png');
|
||||
res.setHeader('content-length', result.length);
|
||||
res.send(result);
|
||||
} else {
|
||||
res.status(404).send('Asset icon not found');
|
||||
}
|
||||
}
|
||||
|
||||
private getAllLiquidIcon(req: Request, res: Response) {
|
||||
const result = icons.getAllIconIds();
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('Asset icons not found');
|
||||
}
|
||||
}
|
||||
|
||||
private async $getAllFeaturedLiquidAssets(req: Request, res: Response) {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.LIQUID_API}/assets/featured`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
}
|
||||
|
||||
private async $getAssetGroup(req: Request, res: Response) {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.LIQUID_API}/assets/group/${parseInt(req.params.id, 10)}`,
|
||||
{ responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
}
|
||||
|
||||
private async $getElementsPegsByMonth(req: Request, res: Response) {
|
||||
try {
|
||||
const pegs = await elementsParser.$getPegDataByMonth();
|
||||
res.json(pegs);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new LiquidRoutes();
|
251
backend/src/api/mining/mining-routes.ts
Normal file
251
backend/src/api/mining/mining-routes.ts
Normal file
@ -0,0 +1,251 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from "../../config";
|
||||
import logger from '../../logger';
|
||||
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import BlocksRepository from '../../repositories/BlocksRepository';
|
||||
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
|
||||
import HashratesRepository from '../../repositories/HashratesRepository';
|
||||
import bitcoinClient from '../bitcoin/bitcoin-client';
|
||||
import mining from "./mining";
|
||||
|
||||
class MiningRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pools/:interval', this.$getPools)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/hashrate', this.$getPoolHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks', this.$getPoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks/:height', this.$getPoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug', this.$getPool)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/pools/:interval', this.$getPoolsHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/:interval', this.$getHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments', this.$getDifficultyAdjustments)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/reward-stats/:blockCount', this.$getRewardStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fees/:interval', this.$getHistoricalBlockFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/rewards/:interval', this.$getHistoricalBlockRewards)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fee-rates/:interval', this.$getHistoricalBlockFeeRates)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
|
||||
;
|
||||
}
|
||||
|
||||
private async $getPool(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const stats = await mining.$getPoolStat(req.params.slug);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(stats);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
|
||||
res.status(404).send(e.message);
|
||||
} else {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPoolBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
const poolBlocks = await BlocksRepository.$getBlocksByPool(
|
||||
req.params.slug,
|
||||
req.params.height === undefined ? undefined : parseInt(req.params.height, 10),
|
||||
);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(poolBlocks);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
|
||||
res.status(404).send(e.message);
|
||||
} else {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPools(req: Request, res: Response) {
|
||||
try {
|
||||
const stats = await mining.$getPoolsStats(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(stats);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPoolsHistoricalHashrate(req: Request, res: Response) {
|
||||
try {
|
||||
const hashrates = await HashratesRepository.$getPoolsWeeklyHashrate(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json(hashrates);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPoolHistoricalHashrate(req: Request, res: Response) {
|
||||
try {
|
||||
const hashrates = await HashratesRepository.$getPoolWeeklyHashrate(req.params.slug);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json(hashrates);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
|
||||
res.status(404).send(e.message);
|
||||
} else {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalHashrate(req: Request, res: Response) {
|
||||
let currentHashrate = 0, currentDifficulty = 0;
|
||||
try {
|
||||
currentHashrate = await bitcoinClient.getNetworkHashPs();
|
||||
currentDifficulty = await bitcoinClient.getDifficulty();
|
||||
} catch (e) {
|
||||
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate and difficulty');
|
||||
}
|
||||
|
||||
try {
|
||||
const hashrates = await HashratesRepository.$getNetworkDailyHashrate(req.params.interval);
|
||||
const difficulty = await DifficultyAdjustmentsRepository.$getAdjustments(req.params.interval, false);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json({
|
||||
hashrates: hashrates,
|
||||
difficulty: difficulty,
|
||||
currentHashrate: currentHashrate,
|
||||
currentDifficulty: currentDifficulty,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockFees(req: Request, res: Response) {
|
||||
try {
|
||||
const blockFees = await mining.$getHistoricalBlockFees(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(blockFees);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockRewards(req: Request, res: Response) {
|
||||
try {
|
||||
const blockRewards = await mining.$getHistoricalBlockRewards(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(blockRewards);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockFeeRates(req: Request, res: Response) {
|
||||
try {
|
||||
const blockFeeRates = await mining.$getHistoricalBlockFeeRates(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(blockFeeRates);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockSizeAndWeight(req: Request, res: Response) {
|
||||
try {
|
||||
const blockSizes = await mining.$getHistoricalBlockSizes(req.params.interval);
|
||||
const blockWeights = await mining.$getHistoricalBlockWeights(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json({
|
||||
sizes: blockSizes,
|
||||
weights: blockWeights
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getDifficultyAdjustments(req: Request, res: Response) {
|
||||
try {
|
||||
const difficulty = await DifficultyAdjustmentsRepository.$getRawAdjustments(req.params.interval, true);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json(difficulty.map(adj => [adj.time, adj.height, adj.difficulty, adj.adjustment]));
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getRewardStats(req: Request, res: Response) {
|
||||
try {
|
||||
const response = await mining.$getRewardStats(parseInt(req.params.blockCount, 10));
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(response);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockPrediction(req: Request, res: Response) {
|
||||
try {
|
||||
const blockPredictions = await mining.$getBlockPredictionsHistory(req.params.interval);
|
||||
const blockCount = await BlocksAuditsRepository.$getPredictionsCount();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(blockPredictions.map(prediction => [prediction.time, prediction.height, prediction.match_rate]));
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockAudit(req: Request, res: Response) {
|
||||
try {
|
||||
const audit = await BlocksAuditsRepository.$getBlockAudit(req.params.hash);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
|
||||
res.json(audit);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new MiningRoutes();
|
@ -1,18 +1,20 @@
|
||||
import { IndexedDifficultyAdjustment, PoolInfo, PoolStats, RewardStats } from '../mempool.interfaces';
|
||||
import BlocksRepository from '../repositories/BlocksRepository';
|
||||
import PoolsRepository from '../repositories/PoolsRepository';
|
||||
import HashratesRepository from '../repositories/HashratesRepository';
|
||||
import bitcoinClient from './bitcoin/bitcoin-client';
|
||||
import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
import loadingIndicators from './loading-indicators';
|
||||
import { BlockPrice, PoolInfo, PoolStats, RewardStats } from '../../mempool.interfaces';
|
||||
import BlocksRepository from '../../repositories/BlocksRepository';
|
||||
import PoolsRepository from '../../repositories/PoolsRepository';
|
||||
import HashratesRepository from '../../repositories/HashratesRepository';
|
||||
import bitcoinClient from '../bitcoin/bitcoin-client';
|
||||
import logger from '../../logger';
|
||||
import { Common } from '../common';
|
||||
import loadingIndicators from '../loading-indicators';
|
||||
import { escape } from 'mysql2';
|
||||
import indexer from '../indexer';
|
||||
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
|
||||
import config from '../config';
|
||||
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
|
||||
import config from '../../config';
|
||||
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import PricesRepository from '../../repositories/PricesRepository';
|
||||
|
||||
class Mining {
|
||||
blocksPriceIndexingRunning = false;
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
@ -31,7 +33,7 @@ class Mining {
|
||||
*/
|
||||
public async $getHistoricalBlockFees(interval: string | null = null): Promise<any> {
|
||||
return await BlocksRepository.$getHistoricalBlockFees(
|
||||
this.getTimeRange(interval),
|
||||
this.getTimeRange(interval, 5),
|
||||
Common.getSqlInterval(interval)
|
||||
);
|
||||
}
|
||||
@ -173,26 +175,25 @@ class Mining {
|
||||
*/
|
||||
public async $generatePoolHashrateHistory(): Promise<void> {
|
||||
const now = new Date();
|
||||
const lastestRunDate = await HashratesRepository.$getLatestRun('last_weekly_hashrates_indexing');
|
||||
|
||||
try {
|
||||
const lastestRunDate = await HashratesRepository.$getLatestRun('last_weekly_hashrates_indexing');
|
||||
|
||||
// Run only if:
|
||||
// * lastestRunDate is set to 0 (node backend restart, reorg)
|
||||
// * we started a new week (around Monday midnight)
|
||||
const runIndexing = lastestRunDate === 0 || now.getUTCDay() === 1 && lastestRunDate !== now.getUTCDate();
|
||||
if (!runIndexing) {
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
// Run only if:
|
||||
// * lastestRunDate is set to 0 (node backend restart, reorg)
|
||||
// * we started a new week (around Monday midnight)
|
||||
const runIndexing = lastestRunDate === 0 || now.getUTCDay() === 1 && lastestRunDate !== now.getUTCDate();
|
||||
if (!runIndexing) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
|
||||
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisTimestamp = genesisBlock.time * 1000;
|
||||
|
||||
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
|
||||
const hashrates: any[] = [];
|
||||
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||
|
||||
|
||||
const lastMonday = new Date(now.setDate(now.getDate() - (now.getDay() + 6) % 7));
|
||||
const lastMondayMidnight = this.getDateMidnight(lastMonday);
|
||||
let toTimestamp = lastMondayMidnight.getTime();
|
||||
@ -207,7 +208,7 @@ class Mining {
|
||||
logger.debug(`Indexing weekly mining pool hashrate`);
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 0);
|
||||
|
||||
while (toTimestamp > genesisTimestamp) {
|
||||
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
|
||||
const fromTimestamp = toTimestamp - 604800000;
|
||||
|
||||
// Skip already indexed weeks
|
||||
@ -217,14 +218,6 @@ class Mining {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we have blocks for the previous week (which mean that the week
|
||||
// we are currently indexing has complete data)
|
||||
const blockStatsPreviousWeek: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||
null, (fromTimestamp - 604800000) / 1000, (toTimestamp - 604800000) / 1000);
|
||||
if (blockStatsPreviousWeek.blockCount === 0) { // We are done indexing
|
||||
break;
|
||||
}
|
||||
|
||||
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||
null, fromTimestamp / 1000, toTimestamp / 1000);
|
||||
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||
@ -232,34 +225,35 @@ class Mining {
|
||||
|
||||
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp / 1000, toTimestamp / 1000);
|
||||
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
|
||||
pools = pools.map((pool: any) => {
|
||||
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
|
||||
pool.share = (pool.blockCount / totalBlocks);
|
||||
return pool;
|
||||
});
|
||||
|
||||
for (const pool of pools) {
|
||||
hashrates.push({
|
||||
hashrateTimestamp: toTimestamp / 1000,
|
||||
avgHashrate: pool['hashrate'],
|
||||
poolId: pool.poolId,
|
||||
share: pool['share'],
|
||||
type: 'weekly',
|
||||
if (totalBlocks > 0) {
|
||||
pools = pools.map((pool: any) => {
|
||||
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
|
||||
pool.share = (pool.blockCount / totalBlocks);
|
||||
return pool;
|
||||
});
|
||||
}
|
||||
|
||||
newlyIndexed += hashrates.length;
|
||||
await HashratesRepository.$saveHashrates(hashrates);
|
||||
hashrates.length = 0;
|
||||
for (const pool of pools) {
|
||||
hashrates.push({
|
||||
hashrateTimestamp: toTimestamp / 1000,
|
||||
avgHashrate: pool['hashrate'] ,
|
||||
poolId: pool.poolId,
|
||||
share: pool['share'],
|
||||
type: 'weekly',
|
||||
});
|
||||
}
|
||||
|
||||
newlyIndexed += hashrates.length;
|
||||
await HashratesRepository.$saveHashrates(hashrates);
|
||||
hashrates.length = 0;
|
||||
}
|
||||
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 1) {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const weeksPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||
const progress = Math.round(totalIndexed / totalWeekIndexed * 10000) / 100;
|
||||
const timeLeft = Math.round((totalWeekIndexed - totalIndexed) / weeksPerSeconds);
|
||||
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', progress, false);
|
||||
@ -272,6 +266,8 @@ class Mining {
|
||||
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', new Date().getUTCDate());
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
|
||||
} else {
|
||||
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
|
||||
}
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
|
||||
} catch (e) {
|
||||
@ -285,20 +281,19 @@ class Mining {
|
||||
* [INDEXING] Generate daily hashrate data
|
||||
*/
|
||||
public async $generateNetworkHashrateHistory(): Promise<void> {
|
||||
try {
|
||||
// We only run this once a day around midnight
|
||||
const latestRunDate = await HashratesRepository.$getLatestRun('last_hashrates_indexing');
|
||||
const now = new Date().getUTCDate();
|
||||
if (now === latestRunDate) {
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
// We only run this once a day around midnight
|
||||
const latestRunDate = await HashratesRepository.$getLatestRun('last_hashrates_indexing');
|
||||
const now = new Date().getUTCDate();
|
||||
if (now === latestRunDate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
|
||||
|
||||
try {
|
||||
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
||||
const genesisTimestamp = (config.MEMPOOL.NETWORK === 'signet') ? 1598918400000 : 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisTimestamp = genesisBlock.time * 1000;
|
||||
const indexedTimestamp = (await HashratesRepository.$getRawNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
||||
const lastMidnight = this.getDateMidnight(new Date());
|
||||
let toTimestamp = Math.round(lastMidnight.getTime());
|
||||
const hashrates: any[] = [];
|
||||
@ -313,7 +308,7 @@ class Mining {
|
||||
logger.debug(`Indexing daily network hashrate`);
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 0);
|
||||
|
||||
while (toTimestamp > genesisTimestamp) {
|
||||
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
|
||||
const fromTimestamp = toTimestamp - 86400000;
|
||||
|
||||
// Skip already indexed days
|
||||
@ -323,17 +318,9 @@ class Mining {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we have blocks for the previous day (which mean that the day
|
||||
// we are currently indexing has complete data)
|
||||
const blockStatsPreviousDay: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||
null, (fromTimestamp - 86400000) / 1000, (toTimestamp - 86400000) / 1000);
|
||||
if (blockStatsPreviousDay.blockCount === 0 && config.MEMPOOL.NETWORK === 'mainnet') { // We are done indexing
|
||||
break;
|
||||
}
|
||||
|
||||
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||
null, fromTimestamp / 1000, toTimestamp / 1000);
|
||||
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||
const lastBlockHashrate = blockStats.blockCount === 0 ? 0 : await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||
blockStats.lastBlockHeight);
|
||||
|
||||
hashrates.push({
|
||||
@ -355,9 +342,8 @@ class Mining {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const daysPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||
const progress = Math.round(totalIndexed / totalDayIndexed * 10000) / 100;
|
||||
const timeLeft = Math.round((totalDayIndexed - totalIndexed) / daysPerSeconds);
|
||||
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', progress);
|
||||
@ -368,8 +354,8 @@ class Mining {
|
||||
++totalIndexed;
|
||||
}
|
||||
|
||||
// Add genesis block manually on mainnet and testnet
|
||||
if ('signet' !== config.MEMPOOL.NETWORK && toTimestamp <= genesisTimestamp && !indexedTimestamp.includes(genesisTimestamp)) {
|
||||
// Add genesis block manually
|
||||
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && !indexedTimestamp.includes(genesisTimestamp / 1000)) {
|
||||
hashrates.push({
|
||||
hashrateTimestamp: genesisTimestamp / 1000,
|
||||
avgHashrate: await bitcoinClient.getNetworkHashPs(1, 1),
|
||||
@ -385,6 +371,8 @@ class Mining {
|
||||
await HashratesRepository.$setLatestRun('last_hashrates_indexing', new Date().getUTCDate());
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
|
||||
} else {
|
||||
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
|
||||
}
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
|
||||
} catch (e) {
|
||||
@ -405,27 +393,37 @@ class Mining {
|
||||
}
|
||||
|
||||
const blocks: any = await BlocksRepository.$getBlocksDifficulty();
|
||||
|
||||
let currentDifficulty = 0;
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
let currentDifficulty = genesisBlock.difficulty;
|
||||
let totalIndexed = 0;
|
||||
|
||||
if (indexedHeights[0] !== true) {
|
||||
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && indexedHeights[0] !== true) {
|
||||
await DifficultyAdjustmentsRepository.$saveAdjustments({
|
||||
time: (config.MEMPOOL.NETWORK === 'signet') ? 1598918400 : 1231006505,
|
||||
time: genesisBlock.time,
|
||||
height: 0,
|
||||
difficulty: (config.MEMPOOL.NETWORK === 'signet') ? 0.001126515290698186 : 1.0,
|
||||
difficulty: currentDifficulty,
|
||||
adjustment: 0.0,
|
||||
});
|
||||
}
|
||||
|
||||
const oldestConsecutiveBlock = await BlocksRepository.$getOldestConsecutiveBlock();
|
||||
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== -1) {
|
||||
currentDifficulty = oldestConsecutiveBlock.difficulty;
|
||||
}
|
||||
|
||||
let totalBlockChecked = 0;
|
||||
let timer = new Date().getTime() / 1000;
|
||||
|
||||
for (const block of blocks) {
|
||||
if (block.difficulty !== currentDifficulty) {
|
||||
if (block.height === 0 || indexedHeights[block.height] === true) { // Already indexed
|
||||
currentDifficulty = block.difficulty;
|
||||
if (indexedHeights[block.height] === true) { // Already indexed
|
||||
if (block.height >= oldestConsecutiveBlock.height) {
|
||||
currentDifficulty = block.difficulty;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let adjustment = block.difficulty / Math.max(1, currentDifficulty);
|
||||
let adjustment = block.difficulty / currentDifficulty;
|
||||
adjustment = Math.round(adjustment * 1000000) / 1000000; // Remove float point noise
|
||||
|
||||
await DifficultyAdjustmentsRepository.$saveAdjustments({
|
||||
@ -436,15 +434,91 @@ class Mining {
|
||||
});
|
||||
|
||||
totalIndexed++;
|
||||
currentDifficulty = block.difficulty;
|
||||
if (block.height >= oldestConsecutiveBlock.height) {
|
||||
currentDifficulty = block.difficulty;
|
||||
}
|
||||
}
|
||||
|
||||
totalBlockChecked++;
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 5) {
|
||||
const progress = Math.round(totalBlockChecked / blocks.length * 100);
|
||||
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
if (totalIndexed > 0) {
|
||||
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`);
|
||||
} else {
|
||||
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a link between blocks and the latest price at when they were mined
|
||||
*/
|
||||
public async $indexBlockPrices() {
|
||||
if (this.blocksPriceIndexingRunning === true) {
|
||||
return;
|
||||
}
|
||||
this.blocksPriceIndexingRunning = true;
|
||||
|
||||
try {
|
||||
const prices: any[] = await PricesRepository.$getPricesTimesAndId();
|
||||
const blocksWithoutPrices: any[] = await BlocksRepository.$getBlocksWithoutPrice();
|
||||
|
||||
let totalInserted = 0;
|
||||
const blocksPrices: BlockPrice[] = [];
|
||||
|
||||
for (const block of blocksWithoutPrices) {
|
||||
// Quick optimisation, out mtgox feed only goes back to 2010-07-19 02:00:00, so skip the first 68951 blocks
|
||||
if (block.height < 68951) {
|
||||
blocksPrices.push({
|
||||
height: block.height,
|
||||
priceId: prices[0].id,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
for (const price of prices) {
|
||||
if (block.timestamp < price.time) {
|
||||
blocksPrices.push({
|
||||
height: block.height,
|
||||
priceId: price.id,
|
||||
});
|
||||
break;
|
||||
};
|
||||
}
|
||||
|
||||
if (blocksPrices.length >= 100000) {
|
||||
totalInserted += blocksPrices.length;
|
||||
if (blocksWithoutPrices.length > 200000) {
|
||||
logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`);
|
||||
} else {
|
||||
logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price`);
|
||||
}
|
||||
await BlocksRepository.$saveBlockPrices(blocksPrices);
|
||||
blocksPrices.length = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (blocksPrices.length > 0) {
|
||||
totalInserted += blocksPrices.length;
|
||||
if (blocksWithoutPrices.length > 200000) {
|
||||
logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`);
|
||||
} else {
|
||||
logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price`);
|
||||
}
|
||||
await BlocksRepository.$saveBlockPrices(blocksPrices);
|
||||
}
|
||||
} catch (e) {
|
||||
this.blocksPriceIndexingRunning = false;
|
||||
throw e;
|
||||
}
|
||||
|
||||
this.blocksPriceIndexingRunning = false;
|
||||
}
|
||||
|
||||
private getDateMidnight(date: Date): Date {
|
||||
date.setUTCHours(0);
|
||||
date.setUTCMinutes(0);
|
||||
@ -454,18 +528,18 @@ class Mining {
|
||||
return date;
|
||||
}
|
||||
|
||||
private getTimeRange(interval: string | null): number {
|
||||
private getTimeRange(interval: string | null, scale = 1): number {
|
||||
switch (interval) {
|
||||
case '3y': return 43200; // 12h
|
||||
case '2y': return 28800; // 8h
|
||||
case '1y': return 28800; // 8h
|
||||
case '6m': return 10800; // 3h
|
||||
case '3m': return 7200; // 2h
|
||||
case '1m': return 1800; // 30min
|
||||
case '1w': return 300; // 5min
|
||||
case '3d': return 1;
|
||||
case '24h': return 1;
|
||||
default: return 86400; // 24h
|
||||
case '3y': return 43200 * scale; // 12h
|
||||
case '2y': return 28800 * scale; // 8h
|
||||
case '1y': return 28800 * scale; // 8h
|
||||
case '6m': return 10800 * scale; // 3h
|
||||
case '3m': return 7200 * scale; // 2h
|
||||
case '1m': return 1800 * scale; // 30min
|
||||
case '1w': return 300 * scale; // 5min
|
||||
case '3d': return 1 * scale;
|
||||
case '24h': return 1 * scale;
|
||||
default: return 86400 * scale;
|
||||
}
|
||||
}
|
||||
}
|
@ -222,6 +222,10 @@ class PoolsParser {
|
||||
* Delete blocks which needs to be reindexed
|
||||
*/
|
||||
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
|
||||
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
if (blockCount === 0) {
|
||||
return;
|
||||
|
@ -1,160 +1,11 @@
|
||||
import memPool from './mempool';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import { Statistic, OptimizedStatistic } from '../../mempool.interfaces';
|
||||
|
||||
import { Statistic, TransactionExtended, OptimizedStatistic } from '../mempool.interfaces';
|
||||
import config from '../config';
|
||||
import { Common } from './common';
|
||||
|
||||
class Statistics {
|
||||
protected intervalTimer: NodeJS.Timer | undefined;
|
||||
protected newStatisticsEntryCallback: ((stats: OptimizedStatistic) => void) | undefined;
|
||||
class StatisticsApi {
|
||||
protected queryTimeout = 120000;
|
||||
|
||||
public setNewStatisticsEntryCallback(fn: (stats: OptimizedStatistic) => void) {
|
||||
this.newStatisticsEntryCallback = fn;
|
||||
}
|
||||
|
||||
constructor() { }
|
||||
|
||||
public startStatistics(): void {
|
||||
logger.info('Starting statistics service');
|
||||
|
||||
const now = new Date();
|
||||
const nextInterval = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(),
|
||||
Math.floor(now.getMinutes() / 1) * 1 + 1, 0, 0);
|
||||
const difference = nextInterval.getTime() - now.getTime();
|
||||
|
||||
setTimeout(() => {
|
||||
this.runStatistics();
|
||||
this.intervalTimer = setInterval(() => {
|
||||
this.runStatistics();
|
||||
}, 1 * 60 * 1000);
|
||||
}, difference);
|
||||
}
|
||||
|
||||
private async runStatistics(): Promise<void> {
|
||||
if (!memPool.isInSync()) {
|
||||
return;
|
||||
}
|
||||
const currentMempool = memPool.getMempool();
|
||||
const txPerSecond = memPool.getTxPerSecond();
|
||||
const vBytesPerSecond = memPool.getVBytesPerSecond();
|
||||
|
||||
logger.debug('Running statistics');
|
||||
|
||||
let memPoolArray: TransactionExtended[] = [];
|
||||
for (const i in currentMempool) {
|
||||
if (currentMempool.hasOwnProperty(i)) {
|
||||
memPoolArray.push(currentMempool[i]);
|
||||
}
|
||||
}
|
||||
// Remove 0 and undefined
|
||||
memPoolArray = memPoolArray.filter((tx) => tx.effectiveFeePerVsize);
|
||||
|
||||
if (!memPoolArray.length) {
|
||||
try {
|
||||
const insertIdZeroed = await this.$createZeroedStatistic();
|
||||
if (this.newStatisticsEntryCallback && insertIdZeroed) {
|
||||
const newStats = await this.$get(insertIdZeroed);
|
||||
if (newStats) {
|
||||
this.newStatisticsEntryCallback(newStats);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert zeroed statistics. ' + e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
memPoolArray.sort((a, b) => a.effectiveFeePerVsize - b.effectiveFeePerVsize);
|
||||
const totalWeight = memPoolArray.map((tx) => tx.vsize).reduce((acc, curr) => acc + curr) * 4;
|
||||
const totalFee = memPoolArray.map((tx) => tx.fee).reduce((acc, curr) => acc + curr);
|
||||
|
||||
const logFees = [1, 2, 3, 4, 5, 6, 8, 10, 12, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100, 125, 150, 175, 200,
|
||||
250, 300, 350, 400, 500, 600, 700, 800, 900, 1000, 1200, 1400, 1600, 1800, 2000];
|
||||
|
||||
const weightVsizeFees: { [feePerWU: number]: number } = {};
|
||||
const lastItem = logFees.length - 1;
|
||||
|
||||
memPoolArray.forEach((transaction) => {
|
||||
for (let i = 0; i < logFees.length; i++) {
|
||||
if (
|
||||
(Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize * 10 < logFees[i + 1]))
|
||||
||
|
||||
(!Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize < logFees[i + 1]))
|
||||
) {
|
||||
if (weightVsizeFees[logFees[i]]) {
|
||||
weightVsizeFees[logFees[i]] += transaction.vsize;
|
||||
} else {
|
||||
weightVsizeFees[logFees[i]] = transaction.vsize;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const insertId = await this.$create({
|
||||
added: 'NOW()',
|
||||
unconfirmed_transactions: memPoolArray.length,
|
||||
tx_per_second: txPerSecond,
|
||||
vbytes_per_second: Math.round(vBytesPerSecond),
|
||||
mempool_byte_weight: totalWeight,
|
||||
total_fee: totalFee,
|
||||
fee_data: '',
|
||||
vsize_1: weightVsizeFees['1'] || 0,
|
||||
vsize_2: weightVsizeFees['2'] || 0,
|
||||
vsize_3: weightVsizeFees['3'] || 0,
|
||||
vsize_4: weightVsizeFees['4'] || 0,
|
||||
vsize_5: weightVsizeFees['5'] || 0,
|
||||
vsize_6: weightVsizeFees['6'] || 0,
|
||||
vsize_8: weightVsizeFees['8'] || 0,
|
||||
vsize_10: weightVsizeFees['10'] || 0,
|
||||
vsize_12: weightVsizeFees['12'] || 0,
|
||||
vsize_15: weightVsizeFees['15'] || 0,
|
||||
vsize_20: weightVsizeFees['20'] || 0,
|
||||
vsize_30: weightVsizeFees['30'] || 0,
|
||||
vsize_40: weightVsizeFees['40'] || 0,
|
||||
vsize_50: weightVsizeFees['50'] || 0,
|
||||
vsize_60: weightVsizeFees['60'] || 0,
|
||||
vsize_70: weightVsizeFees['70'] || 0,
|
||||
vsize_80: weightVsizeFees['80'] || 0,
|
||||
vsize_90: weightVsizeFees['90'] || 0,
|
||||
vsize_100: weightVsizeFees['100'] || 0,
|
||||
vsize_125: weightVsizeFees['125'] || 0,
|
||||
vsize_150: weightVsizeFees['150'] || 0,
|
||||
vsize_175: weightVsizeFees['175'] || 0,
|
||||
vsize_200: weightVsizeFees['200'] || 0,
|
||||
vsize_250: weightVsizeFees['250'] || 0,
|
||||
vsize_300: weightVsizeFees['300'] || 0,
|
||||
vsize_350: weightVsizeFees['350'] || 0,
|
||||
vsize_400: weightVsizeFees['400'] || 0,
|
||||
vsize_500: weightVsizeFees['500'] || 0,
|
||||
vsize_600: weightVsizeFees['600'] || 0,
|
||||
vsize_700: weightVsizeFees['700'] || 0,
|
||||
vsize_800: weightVsizeFees['800'] || 0,
|
||||
vsize_900: weightVsizeFees['900'] || 0,
|
||||
vsize_1000: weightVsizeFees['1000'] || 0,
|
||||
vsize_1200: weightVsizeFees['1200'] || 0,
|
||||
vsize_1400: weightVsizeFees['1400'] || 0,
|
||||
vsize_1600: weightVsizeFees['1600'] || 0,
|
||||
vsize_1800: weightVsizeFees['1800'] || 0,
|
||||
vsize_2000: weightVsizeFees['2000'] || 0,
|
||||
});
|
||||
|
||||
if (this.newStatisticsEntryCallback && insertId) {
|
||||
const newStats = await this.$get(insertId);
|
||||
if (newStats) {
|
||||
this.newStatisticsEntryCallback(newStats);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert statistics. ' + e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $createZeroedStatistic(): Promise<number | undefined> {
|
||||
public async $createZeroedStatistic(): Promise<number | undefined> {
|
||||
try {
|
||||
const query = `INSERT INTO statistics(
|
||||
added,
|
||||
@ -212,7 +63,7 @@ class Statistics {
|
||||
}
|
||||
}
|
||||
|
||||
private async $create(statistics: Statistic): Promise<number | undefined> {
|
||||
public async $create(statistics: Statistic): Promise<number | undefined> {
|
||||
try {
|
||||
const query = `INSERT INTO statistics(
|
||||
added,
|
||||
@ -413,7 +264,7 @@ class Statistics {
|
||||
ORDER BY statistics.added DESC;`;
|
||||
}
|
||||
|
||||
private async $get(id: number): Promise<OptimizedStatistic | undefined> {
|
||||
public async $get(id: number): Promise<OptimizedStatistic | undefined> {
|
||||
try {
|
||||
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics WHERE id = ?`;
|
||||
const [rows] = await DB.query(query, [id]);
|
||||
@ -574,7 +425,6 @@ class Statistics {
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new Statistics();
|
||||
export default new StatisticsApi();
|
67
backend/src/api/statistics/statistics.routes.ts
Normal file
67
backend/src/api/statistics/statistics.routes.ts
Normal file
@ -0,0 +1,67 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from '../../config';
|
||||
import statisticsApi from './statistics-api';
|
||||
|
||||
class StatisticsRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2h', this.$getStatisticsByTime.bind(this, '2h'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/24h', this.$getStatisticsByTime.bind(this, '24h'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1w', this.$getStatisticsByTime.bind(this, '1w'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1m', this.$getStatisticsByTime.bind(this, '1m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3m', this.$getStatisticsByTime.bind(this, '3m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/6m', this.$getStatisticsByTime.bind(this, '6m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1y', this.$getStatisticsByTime.bind(this, '1y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', this.$getStatisticsByTime.bind(this, '2y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', this.$getStatisticsByTime.bind(this, '3y'))
|
||||
;
|
||||
}
|
||||
|
||||
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y', req: Request, res: Response) {
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
|
||||
try {
|
||||
let result;
|
||||
switch (time as string) {
|
||||
case '2h':
|
||||
result = await statisticsApi.$list2H();
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
|
||||
break;
|
||||
case '24h':
|
||||
result = await statisticsApi.$list24H();
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
break;
|
||||
case '1w':
|
||||
result = await statisticsApi.$list1W();
|
||||
break;
|
||||
case '1m':
|
||||
result = await statisticsApi.$list1M();
|
||||
break;
|
||||
case '3m':
|
||||
result = await statisticsApi.$list3M();
|
||||
break;
|
||||
case '6m':
|
||||
result = await statisticsApi.$list6M();
|
||||
break;
|
||||
case '1y':
|
||||
result = await statisticsApi.$list1Y();
|
||||
break;
|
||||
case '2y':
|
||||
result = await statisticsApi.$list2Y();
|
||||
break;
|
||||
case '3y':
|
||||
result = await statisticsApi.$list3Y();
|
||||
break;
|
||||
default:
|
||||
result = await statisticsApi.$list2H();
|
||||
}
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new StatisticsRoutes();
|
153
backend/src/api/statistics/statistics.ts
Normal file
153
backend/src/api/statistics/statistics.ts
Normal file
@ -0,0 +1,153 @@
|
||||
import memPool from '../mempool';
|
||||
import logger from '../../logger';
|
||||
import { TransactionExtended, OptimizedStatistic } from '../../mempool.interfaces';
|
||||
import { Common } from '../common';
|
||||
import statisticsApi from './statistics-api';
|
||||
|
||||
class Statistics {
|
||||
protected intervalTimer: NodeJS.Timer | undefined;
|
||||
protected newStatisticsEntryCallback: ((stats: OptimizedStatistic) => void) | undefined;
|
||||
|
||||
public setNewStatisticsEntryCallback(fn: (stats: OptimizedStatistic) => void) {
|
||||
this.newStatisticsEntryCallback = fn;
|
||||
}
|
||||
|
||||
public startStatistics(): void {
|
||||
logger.info('Starting statistics service');
|
||||
|
||||
const now = new Date();
|
||||
const nextInterval = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(),
|
||||
Math.floor(now.getMinutes() / 1) * 1 + 1, 0, 0);
|
||||
const difference = nextInterval.getTime() - now.getTime();
|
||||
|
||||
setTimeout(() => {
|
||||
this.runStatistics();
|
||||
this.intervalTimer = setInterval(() => {
|
||||
this.runStatistics();
|
||||
}, 1 * 60 * 1000);
|
||||
}, difference);
|
||||
}
|
||||
|
||||
private async runStatistics(): Promise<void> {
|
||||
if (!memPool.isInSync()) {
|
||||
return;
|
||||
}
|
||||
const currentMempool = memPool.getMempool();
|
||||
const txPerSecond = memPool.getTxPerSecond();
|
||||
const vBytesPerSecond = memPool.getVBytesPerSecond();
|
||||
|
||||
logger.debug('Running statistics');
|
||||
|
||||
let memPoolArray: TransactionExtended[] = [];
|
||||
for (const i in currentMempool) {
|
||||
if (currentMempool.hasOwnProperty(i)) {
|
||||
memPoolArray.push(currentMempool[i]);
|
||||
}
|
||||
}
|
||||
// Remove 0 and undefined
|
||||
memPoolArray = memPoolArray.filter((tx) => tx.effectiveFeePerVsize);
|
||||
|
||||
if (!memPoolArray.length) {
|
||||
try {
|
||||
const insertIdZeroed = await statisticsApi.$createZeroedStatistic();
|
||||
if (this.newStatisticsEntryCallback && insertIdZeroed) {
|
||||
const newStats = await statisticsApi.$get(insertIdZeroed);
|
||||
if (newStats) {
|
||||
this.newStatisticsEntryCallback(newStats);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert zeroed statistics. ' + e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
memPoolArray.sort((a, b) => a.effectiveFeePerVsize - b.effectiveFeePerVsize);
|
||||
const totalWeight = memPoolArray.map((tx) => tx.vsize).reduce((acc, curr) => acc + curr) * 4;
|
||||
const totalFee = memPoolArray.map((tx) => tx.fee).reduce((acc, curr) => acc + curr);
|
||||
|
||||
const logFees = [1, 2, 3, 4, 5, 6, 8, 10, 12, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100, 125, 150, 175, 200,
|
||||
250, 300, 350, 400, 500, 600, 700, 800, 900, 1000, 1200, 1400, 1600, 1800, 2000];
|
||||
|
||||
const weightVsizeFees: { [feePerWU: number]: number } = {};
|
||||
const lastItem = logFees.length - 1;
|
||||
|
||||
memPoolArray.forEach((transaction) => {
|
||||
for (let i = 0; i < logFees.length; i++) {
|
||||
if (
|
||||
(Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize * 10 < logFees[i + 1]))
|
||||
||
|
||||
(!Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize < logFees[i + 1]))
|
||||
) {
|
||||
if (weightVsizeFees[logFees[i]]) {
|
||||
weightVsizeFees[logFees[i]] += transaction.vsize;
|
||||
} else {
|
||||
weightVsizeFees[logFees[i]] = transaction.vsize;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const insertId = await statisticsApi.$create({
|
||||
added: 'NOW()',
|
||||
unconfirmed_transactions: memPoolArray.length,
|
||||
tx_per_second: txPerSecond,
|
||||
vbytes_per_second: Math.round(vBytesPerSecond),
|
||||
mempool_byte_weight: totalWeight,
|
||||
total_fee: totalFee,
|
||||
fee_data: '',
|
||||
vsize_1: weightVsizeFees['1'] || 0,
|
||||
vsize_2: weightVsizeFees['2'] || 0,
|
||||
vsize_3: weightVsizeFees['3'] || 0,
|
||||
vsize_4: weightVsizeFees['4'] || 0,
|
||||
vsize_5: weightVsizeFees['5'] || 0,
|
||||
vsize_6: weightVsizeFees['6'] || 0,
|
||||
vsize_8: weightVsizeFees['8'] || 0,
|
||||
vsize_10: weightVsizeFees['10'] || 0,
|
||||
vsize_12: weightVsizeFees['12'] || 0,
|
||||
vsize_15: weightVsizeFees['15'] || 0,
|
||||
vsize_20: weightVsizeFees['20'] || 0,
|
||||
vsize_30: weightVsizeFees['30'] || 0,
|
||||
vsize_40: weightVsizeFees['40'] || 0,
|
||||
vsize_50: weightVsizeFees['50'] || 0,
|
||||
vsize_60: weightVsizeFees['60'] || 0,
|
||||
vsize_70: weightVsizeFees['70'] || 0,
|
||||
vsize_80: weightVsizeFees['80'] || 0,
|
||||
vsize_90: weightVsizeFees['90'] || 0,
|
||||
vsize_100: weightVsizeFees['100'] || 0,
|
||||
vsize_125: weightVsizeFees['125'] || 0,
|
||||
vsize_150: weightVsizeFees['150'] || 0,
|
||||
vsize_175: weightVsizeFees['175'] || 0,
|
||||
vsize_200: weightVsizeFees['200'] || 0,
|
||||
vsize_250: weightVsizeFees['250'] || 0,
|
||||
vsize_300: weightVsizeFees['300'] || 0,
|
||||
vsize_350: weightVsizeFees['350'] || 0,
|
||||
vsize_400: weightVsizeFees['400'] || 0,
|
||||
vsize_500: weightVsizeFees['500'] || 0,
|
||||
vsize_600: weightVsizeFees['600'] || 0,
|
||||
vsize_700: weightVsizeFees['700'] || 0,
|
||||
vsize_800: weightVsizeFees['800'] || 0,
|
||||
vsize_900: weightVsizeFees['900'] || 0,
|
||||
vsize_1000: weightVsizeFees['1000'] || 0,
|
||||
vsize_1200: weightVsizeFees['1200'] || 0,
|
||||
vsize_1400: weightVsizeFees['1400'] || 0,
|
||||
vsize_1600: weightVsizeFees['1600'] || 0,
|
||||
vsize_1800: weightVsizeFees['1800'] || 0,
|
||||
vsize_2000: weightVsizeFees['2000'] || 0,
|
||||
});
|
||||
|
||||
if (this.newStatisticsEntryCallback && insertId) {
|
||||
const newStats = await statisticsApi.$get(insertId);
|
||||
if (newStats) {
|
||||
this.newStatisticsEntryCallback(newStats);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert statistics. ' + e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new Statistics();
|
@ -17,6 +17,7 @@ import rbfCache from './rbf-cache';
|
||||
import difficultyAdjustment from './difficulty-adjustment';
|
||||
import feeApi from './fee-api';
|
||||
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
|
||||
class WebsocketHandler {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@ -442,6 +443,22 @@ class WebsocketHandler {
|
||||
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
const stripped = _mempoolBlocks[0].transactions.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
fee: tx.fee ? Math.round(tx.fee) : 0,
|
||||
value: tx.value,
|
||||
};
|
||||
});
|
||||
BlocksSummariesRepository.$saveSummary({
|
||||
height: block.height,
|
||||
template: {
|
||||
id: block.id,
|
||||
transactions: stripped
|
||||
}
|
||||
});
|
||||
|
||||
BlocksAuditsRepository.$saveAudit({
|
||||
time: block.timestamp,
|
||||
height: block.height,
|
||||
|
@ -23,10 +23,25 @@ interface IConfig {
|
||||
EXTERNAL_RETRY_INTERVAL: number;
|
||||
USER_AGENT: string;
|
||||
STDOUT_LOG_MIN_PRIORITY: 'emerg' | 'alert' | 'crit' | 'err' | 'warn' | 'notice' | 'info' | 'debug';
|
||||
AUTOMATIC_BLOCK_REINDEXING: boolean;
|
||||
};
|
||||
ESPLORA: {
|
||||
REST_API_URL: string;
|
||||
};
|
||||
LIGHTNING: {
|
||||
ENABLED: boolean;
|
||||
BACKEND: 'lnd' | 'cln' | 'ldk';
|
||||
TOPOLOGY_FOLDER: string;
|
||||
NODE_STATS_REFRESH_INTERVAL: number;
|
||||
};
|
||||
LND: {
|
||||
TLS_CERT_PATH: string;
|
||||
MACAROON_PATH: string;
|
||||
REST_API_URL: string;
|
||||
};
|
||||
CLIGHTNING: {
|
||||
SOCKET: string;
|
||||
};
|
||||
ELECTRUM: {
|
||||
HOST: string;
|
||||
PORT: number;
|
||||
@ -88,6 +103,12 @@ interface IConfig {
|
||||
BISQ_URL: string;
|
||||
BISQ_ONION: string;
|
||||
};
|
||||
MAXMIND: {
|
||||
ENABLED: boolean;
|
||||
GEOLITE2_CITY: string;
|
||||
GEOLITE2_ASN: string;
|
||||
GEOIP2_ISP: string;
|
||||
},
|
||||
}
|
||||
|
||||
const defaults: IConfig = {
|
||||
@ -113,6 +134,7 @@ const defaults: IConfig = {
|
||||
'EXTERNAL_RETRY_INTERVAL': 0,
|
||||
'USER_AGENT': 'mempool',
|
||||
'STDOUT_LOG_MIN_PRIORITY': 'debug',
|
||||
'AUTOMATIC_BLOCK_REINDEXING': false,
|
||||
},
|
||||
'ESPLORA': {
|
||||
'REST_API_URL': 'http://127.0.0.1:3000',
|
||||
@ -158,6 +180,20 @@ const defaults: IConfig = {
|
||||
'ENABLED': false,
|
||||
'DATA_PATH': '/bisq/statsnode-data/btc_mainnet/db'
|
||||
},
|
||||
'LIGHTNING': {
|
||||
'ENABLED': false,
|
||||
'BACKEND': 'lnd',
|
||||
'TOPOLOGY_FOLDER': '',
|
||||
'NODE_STATS_REFRESH_INTERVAL': 600,
|
||||
},
|
||||
'LND': {
|
||||
'TLS_CERT_PATH': '',
|
||||
'MACAROON_PATH': '',
|
||||
'REST_API_URL': 'https://localhost:8080',
|
||||
},
|
||||
'CLIGHTNING': {
|
||||
'SOCKET': '',
|
||||
},
|
||||
'SOCKS5PROXY': {
|
||||
'ENABLED': false,
|
||||
'USE_ONION': true,
|
||||
@ -166,18 +202,24 @@ const defaults: IConfig = {
|
||||
'USERNAME': '',
|
||||
'PASSWORD': ''
|
||||
},
|
||||
"PRICE_DATA_SERVER": {
|
||||
'PRICE_DATA_SERVER': {
|
||||
'TOR_URL': 'http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices',
|
||||
'CLEARNET_URL': 'https://price.bisq.wiz.biz/getAllMarketPrices'
|
||||
},
|
||||
"EXTERNAL_DATA_SERVER": {
|
||||
'EXTERNAL_DATA_SERVER': {
|
||||
'MEMPOOL_API': 'https://mempool.space/api/v1',
|
||||
'MEMPOOL_ONION': 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
|
||||
'LIQUID_API': 'https://liquid.network/api/v1',
|
||||
'LIQUID_ONION': 'http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1',
|
||||
'BISQ_URL': 'https://bisq.markets/api',
|
||||
'BISQ_ONION': 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
|
||||
}
|
||||
},
|
||||
"MAXMIND": {
|
||||
'ENABLED': false,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
|
||||
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
|
||||
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
|
||||
},
|
||||
};
|
||||
|
||||
class Config implements IConfig {
|
||||
@ -190,9 +232,13 @@ class Config implements IConfig {
|
||||
SYSLOG: IConfig['SYSLOG'];
|
||||
STATISTICS: IConfig['STATISTICS'];
|
||||
BISQ: IConfig['BISQ'];
|
||||
LIGHTNING: IConfig['LIGHTNING'];
|
||||
LND: IConfig['LND'];
|
||||
CLIGHTNING: IConfig['CLIGHTNING'];
|
||||
SOCKS5PROXY: IConfig['SOCKS5PROXY'];
|
||||
PRICE_DATA_SERVER: IConfig['PRICE_DATA_SERVER'];
|
||||
EXTERNAL_DATA_SERVER: IConfig['EXTERNAL_DATA_SERVER'];
|
||||
MAXMIND: IConfig['MAXMIND'];
|
||||
|
||||
constructor() {
|
||||
const configs = this.merge(configFile, defaults);
|
||||
@ -205,9 +251,13 @@ class Config implements IConfig {
|
||||
this.SYSLOG = configs.SYSLOG;
|
||||
this.STATISTICS = configs.STATISTICS;
|
||||
this.BISQ = configs.BISQ;
|
||||
this.LIGHTNING = configs.LIGHTNING;
|
||||
this.LND = configs.LND;
|
||||
this.CLIGHTNING = configs.CLIGHTNING;
|
||||
this.SOCKS5PROXY = configs.SOCKS5PROXY;
|
||||
this.PRICE_DATA_SERVER = configs.PRICE_DATA_SERVER;
|
||||
this.EXTERNAL_DATA_SERVER = configs.EXTERNAL_DATA_SERVER;
|
||||
this.MAXMIND = configs.MAXMIND;
|
||||
}
|
||||
|
||||
merge = (...objects: object[]): IConfig => {
|
||||
|
@ -1,17 +1,14 @@
|
||||
import express from "express";
|
||||
import { Application, Request, Response, NextFunction, Express } from 'express';
|
||||
import { Application, Request, Response, NextFunction } from 'express';
|
||||
import * as http from 'http';
|
||||
import * as WebSocket from 'ws';
|
||||
import cluster from 'cluster';
|
||||
import axios from 'axios';
|
||||
|
||||
import DB from './database';
|
||||
import config from './config';
|
||||
import routes from './routes';
|
||||
import blocks from './api/blocks';
|
||||
import memPool from './api/mempool';
|
||||
import diskCache from './api/disk-cache';
|
||||
import statistics from './api/statistics';
|
||||
import statistics from './api/statistics/statistics';
|
||||
import websocketHandler from './api/websocket-handler';
|
||||
import fiatConversion from './api/fiat-conversion';
|
||||
import bisq from './api/bisq/bisq';
|
||||
@ -27,8 +24,17 @@ import icons from './api/liquid/icons';
|
||||
import { Common } from './api/common';
|
||||
import poolsUpdater from './tasks/pools-updater';
|
||||
import indexer from './indexer';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
import BlocksAuditsRepository from './repositories/BlocksAuditsRepository';
|
||||
import nodesRoutes from './api/explorer/nodes.routes';
|
||||
import channelsRoutes from './api/explorer/channels.routes';
|
||||
import generalLightningRoutes from './api/explorer/general.routes';
|
||||
import lightningStatsUpdater from './tasks/lightning/stats-updater.service';
|
||||
import networkSyncService from './tasks/lightning/network-sync.service';
|
||||
import statisticsRoutes from './api/statistics/statistics.routes';
|
||||
import miningRoutes from './api/mining/mining-routes';
|
||||
import bisqRoutes from './api/bisq/bisq.routes';
|
||||
import liquidRoutes from './api/liquid/liquid.routes';
|
||||
import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
|
||||
import fundingTxFetcher from "./tasks/lightning/sync-tasks/funding-tx-fetcher";
|
||||
|
||||
class Server {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@ -130,6 +136,10 @@ class Server {
|
||||
bisqMarkets.startBisqService();
|
||||
}
|
||||
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.$runLightningBackend();
|
||||
}
|
||||
|
||||
this.server.listen(config.MEMPOOL.HTTP_PORT, () => {
|
||||
if (worker) {
|
||||
logger.info(`Mempool Server worker #${process.pid} started`);
|
||||
@ -155,7 +165,6 @@ class Server {
|
||||
await blocks.$updateBlocks();
|
||||
await memPool.$updateMempool();
|
||||
indexer.$run();
|
||||
priceUpdater.$run();
|
||||
|
||||
setTimeout(this.runMainUpdateLoop.bind(this), config.MEMPOOL.POLL_RATE_MS);
|
||||
this.currentBackendRetryInterval = 5;
|
||||
@ -174,6 +183,18 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
async $runLightningBackend() {
|
||||
try {
|
||||
await fundingTxFetcher.$init();
|
||||
await networkSyncService.$startService();
|
||||
await lightningStatsUpdater.$startService();
|
||||
} catch(e) {
|
||||
logger.err(`Lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
await Common.sleep$(1000 * 60);
|
||||
this.$runLightningBackend();
|
||||
};
|
||||
}
|
||||
|
||||
setUpWebsocketHandling() {
|
||||
if (this.wss) {
|
||||
websocketHandler.setWebsocketServer(this.wss);
|
||||
@ -196,171 +217,23 @@ class Server {
|
||||
}
|
||||
|
||||
setUpHttpApiRoutes() {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', routes.getTransactionTimes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', routes.$getBatchedOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', routes.getCpfpInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', routes.getDifficultyChange)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', routes.getRecommendedFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', routes.getMempoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', routes.getBackendInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', routes.getInitData)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', routes.validateAddress)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', routes.$postTransactionForm)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'donations/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'translators', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'translators/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
;
|
||||
|
||||
bitcoinRoutes.initRoutes(this.app);
|
||||
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2h', routes.$getStatisticsByTime.bind(routes, '2h'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/24h', routes.$getStatisticsByTime.bind(routes, '24h'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1w', routes.$getStatisticsByTime.bind(routes, '1w'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1m', routes.$getStatisticsByTime.bind(routes, '1m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3m', routes.$getStatisticsByTime.bind(routes, '3m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/6m', routes.$getStatisticsByTime.bind(routes, '6m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1y', routes.$getStatisticsByTime.bind(routes, '1y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', routes.$getStatisticsByTime.bind(routes, '2y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', routes.$getStatisticsByTime.bind(routes, '3y'))
|
||||
;
|
||||
statisticsRoutes.initRoutes(this.app);
|
||||
}
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pools/:interval', routes.$getPools)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/hashrate', routes.$getPoolHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks', routes.$getPoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks/:height', routes.$getPoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug', routes.$getPool)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/pools/:interval', routes.$getPoolsHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/:interval', routes.$getHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments', routes.$getDifficultyAdjustments)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/reward-stats/:blockCount', routes.$getRewardStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fees/:interval', routes.$getHistoricalBlockFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/rewards/:interval', routes.$getHistoricalBlockRewards)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fee-rates/:interval', routes.$getHistoricalBlockFeeRates)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', routes.$getHistoricalBlockSizeAndWeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', routes.$getDifficultyAdjustments)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', routes.$getHistoricalBlockPrediction)
|
||||
;
|
||||
miningRoutes.initRoutes(this.app);
|
||||
}
|
||||
|
||||
if (config.BISQ.ENABLED) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/stats', routes.getBisqStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/tx/:txId', routes.getBisqTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/block/:hash', routes.getBisqBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/tip/height', routes.getBisqTip)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/:index/:length', routes.getBisqBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/address/:address', routes.getBisqAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/txs/:index/:length', routes.getBisqTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/currencies', routes.getBisqMarketCurrencies.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/depth', routes.getBisqMarketDepth.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/hloc', routes.getBisqMarketHloc.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/markets', routes.getBisqMarketMarkets.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/offers', routes.getBisqMarketOffers.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/ticker', routes.getBisqMarketTicker.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/trades', routes.getBisqMarketTrades.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes', routes.getBisqMarketVolumes.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes/7d', routes.getBisqMarketVolumes7d.bind(routes))
|
||||
;
|
||||
bisqRoutes.initRoutes(this.app);
|
||||
}
|
||||
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', routes.getBlocks.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', routes.getBlocks.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', routes.getBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', routes.getStrippedBlockTransactions);
|
||||
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool', routes.getMempool)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/txids', routes.getMempoolTxIds)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/recent', routes.getRecentMempoolTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId', routes.getTransaction)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'tx', routes.$postTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/hex', routes.getRawTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', routes.getTransactionStatus)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', routes.getTransactionOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', routes.getBlockHeader)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', routes.getBlockTipHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/hash', routes.getBlockTipHash)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs', routes.getBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs/:index', routes.getBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txids', routes.getTxIdsForBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', routes.getBlockHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', routes.getAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', routes.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs/chain/:txId', routes.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', routes.getAddressPrefix)
|
||||
;
|
||||
}
|
||||
|
||||
if (Common.isLiquid()) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/icons', routes.getAllLiquidIcon)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/featured', routes.$getAllFeaturedLiquidAssets)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'asset/:assetId/icon', routes.getLiquidIcon)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/group/:id', routes.$getAssetGroup)
|
||||
;
|
||||
liquidRoutes.initRoutes(this.app);
|
||||
}
|
||||
|
||||
if (Common.isLiquid() && config.DATABASE.ENABLED) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'liquid/pegs/month', routes.$getElementsPegsByMonth)
|
||||
;
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
generalLightningRoutes.initRoutes(this.app);
|
||||
nodesRoutes.initRoutes(this.app);
|
||||
channelsRoutes.initRoutes(this.app);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,11 @@
|
||||
import { Common } from './api/common';
|
||||
import blocks from './api/blocks';
|
||||
import mempool from './api/mempool';
|
||||
import mining from './api/mining';
|
||||
import mining from './api/mining/mining';
|
||||
import logger from './logger';
|
||||
import HashratesRepository from './repositories/HashratesRepository';
|
||||
import bitcoinClient from './api/bitcoin/bitcoin-client';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
|
||||
class Indexer {
|
||||
runIndexer = true;
|
||||
@ -35,7 +36,11 @@ class Indexer {
|
||||
this.runIndexer = false;
|
||||
this.indexerRunning = true;
|
||||
|
||||
logger.debug(`Running mining indexer`);
|
||||
|
||||
try {
|
||||
await priceUpdater.$run();
|
||||
|
||||
const chainValid = await blocks.$generateBlockDatabase();
|
||||
if (chainValid === false) {
|
||||
// Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration
|
||||
@ -45,8 +50,9 @@ class Indexer {
|
||||
return;
|
||||
}
|
||||
|
||||
await mining.$indexBlockPrices();
|
||||
await mining.$indexDifficultyAdjustments();
|
||||
await this.$resetHashratesIndexingState();
|
||||
await this.$resetHashratesIndexingState(); // TODO - Remove this as it's not efficient
|
||||
await mining.$generateNetworkHashrateHistory();
|
||||
await mining.$generatePoolHashrateHistory();
|
||||
await blocks.$generateBlocksSummariesDatabase();
|
||||
@ -54,9 +60,15 @@ class Indexer {
|
||||
this.indexerRunning = false;
|
||||
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
setTimeout(() => this.reindex(), 10000);
|
||||
this.indexerRunning = false;
|
||||
return;
|
||||
}
|
||||
|
||||
this.indexerRunning = false;
|
||||
|
||||
const runEvery = 1000 * 3600; // 1 hour
|
||||
logger.debug(`Indexing completed. Next run planned at ${new Date(new Date().getTime() + runEvery).toUTCString()}`);
|
||||
setTimeout(() => this.reindex(), runEvery);
|
||||
}
|
||||
|
||||
async $resetHashratesIndexingState() {
|
||||
|
@ -73,6 +73,9 @@ class Logger {
|
||||
}
|
||||
|
||||
private getNetwork(): string {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
return 'lightning';
|
||||
}
|
||||
if (config.BISQ.ENABLED) {
|
||||
return 'bisq';
|
||||
}
|
||||
|
@ -109,6 +109,7 @@ export interface BlockExtension {
|
||||
avgFee?: number;
|
||||
avgFeeRate?: number;
|
||||
coinbaseRaw?: string;
|
||||
usd?: number | null;
|
||||
}
|
||||
|
||||
export interface BlockExtended extends IEsploraApi.Block {
|
||||
@ -120,6 +121,11 @@ export interface BlockSummary {
|
||||
transactions: TransactionStripped[];
|
||||
}
|
||||
|
||||
export interface BlockPrice {
|
||||
height: number;
|
||||
priceId: number;
|
||||
}
|
||||
|
||||
export interface TransactionMinerInfo {
|
||||
vin: VinStrippedToScriptsig[];
|
||||
vout: VoutStrippedToScriptPubkey[];
|
||||
|
@ -1,3 +1,4 @@
|
||||
import transactionUtils from '../api/transaction-utils';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { BlockAudit } from '../mempool.interfaces';
|
||||
@ -45,6 +46,30 @@ class BlocksAuditRepositories {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockAudit(hash: string): Promise<any> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(
|
||||
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
|
||||
blocks.weight, blocks.tx_count,
|
||||
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, match_rate as matchRate
|
||||
FROM blocks_audits
|
||||
JOIN blocks ON blocks.hash = blocks_audits.hash
|
||||
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
|
||||
WHERE blocks_audits.hash = "${hash}"
|
||||
`);
|
||||
|
||||
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
|
||||
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
|
||||
rows[0].transactions = JSON.parse(rows[0].transactions);
|
||||
rows[0].template = JSON.parse(rows[0].template);
|
||||
|
||||
return rows[0];
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksAuditRepositories();
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { BlockExtended } from '../mempool.interfaces';
|
||||
import { BlockExtended, BlockPrice } from '../mempool.interfaces';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { Common } from '../api/common';
|
||||
@ -256,7 +256,7 @@ class BlocksRepository {
|
||||
|
||||
const params: any[] = [];
|
||||
let query = ` SELECT
|
||||
height,
|
||||
blocks.height,
|
||||
hash as id,
|
||||
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
|
||||
size,
|
||||
@ -308,7 +308,7 @@ class BlocksRepository {
|
||||
public async $getBlockByHeight(height: number): Promise<object | null> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT
|
||||
height,
|
||||
blocks.height,
|
||||
hash,
|
||||
hash as id,
|
||||
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
|
||||
@ -336,7 +336,7 @@ class BlocksRepository {
|
||||
avg_fee_rate
|
||||
FROM blocks
|
||||
JOIN pools ON blocks.pool_id = pools.id
|
||||
WHERE height = ${height};
|
||||
WHERE blocks.height = ${height}
|
||||
`);
|
||||
|
||||
if (rows.length <= 0) {
|
||||
@ -357,15 +357,15 @@ class BlocksRepository {
|
||||
public async $getBlockByHash(hash: string): Promise<object | null> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT *, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp, hash as id,
|
||||
SELECT *, blocks.height, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp, hash as id,
|
||||
pools.id as pool_id, pools.name as pool_name, pools.link as pool_link, pools.slug as pool_slug,
|
||||
pools.addresses as pool_addresses, pools.regexes as pool_regexes,
|
||||
previous_block_hash as previousblockhash
|
||||
FROM blocks
|
||||
JOIN pools ON blocks.pool_id = pools.id
|
||||
WHERE hash = '${hash}';
|
||||
WHERE hash = ?;
|
||||
`;
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
const [rows]: any[] = await DB.query(query, [hash]);
|
||||
|
||||
if (rows.length <= 0) {
|
||||
return null;
|
||||
@ -387,7 +387,20 @@ class BlocksRepository {
|
||||
const [rows]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(blockTimestamp) as time, height, difficulty FROM blocks`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot generate difficulty history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot get blocks difficulty list from the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return blocks height
|
||||
*/
|
||||
public async $getBlocksHeightsAndTimestamp(): Promise<object[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT height, blockTimestamp as timestamp FROM blocks`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot get blocks height and timestamp from the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -446,7 +459,7 @@ class BlocksRepository {
|
||||
++idx;
|
||||
}
|
||||
|
||||
logger.info(`${idx} blocks hash validated in ${new Date().getTime() - start} ms`);
|
||||
logger.debug(`${idx} blocks hash validated in ${new Date().getTime() - start} ms`);
|
||||
return true;
|
||||
} catch (e) {
|
||||
logger.err('Cannot validate chain of block hash. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
@ -473,10 +486,14 @@ class BlocksRepository {
|
||||
public async $getHistoricalBlockFees(div: number, interval: string | null): Promise<any> {
|
||||
try {
|
||||
let query = `SELECT
|
||||
CAST(AVG(height) as INT) as avgHeight,
|
||||
CAST(AVG(blocks.height) as INT) as avgHeight,
|
||||
CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
|
||||
CAST(AVG(fees) as INT) as avgFees
|
||||
FROM blocks`;
|
||||
CAST(AVG(fees) as INT) as avgFees,
|
||||
prices.USD
|
||||
FROM blocks
|
||||
JOIN blocks_prices on blocks_prices.height = blocks.height
|
||||
JOIN prices on prices.id = blocks_prices.price_id
|
||||
`;
|
||||
|
||||
if (interval !== null) {
|
||||
query += ` WHERE blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
@ -498,10 +515,14 @@ class BlocksRepository {
|
||||
public async $getHistoricalBlockRewards(div: number, interval: string | null): Promise<any> {
|
||||
try {
|
||||
let query = `SELECT
|
||||
CAST(AVG(height) as INT) as avgHeight,
|
||||
CAST(AVG(blocks.height) as INT) as avgHeight,
|
||||
CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
|
||||
CAST(AVG(reward) as INT) as avgRewards
|
||||
FROM blocks`;
|
||||
CAST(AVG(reward) as INT) as avgRewards,
|
||||
prices.USD
|
||||
FROM blocks
|
||||
JOIN blocks_prices on blocks_prices.height = blocks.height
|
||||
JOIN prices on prices.id = blocks_prices.price_id
|
||||
`;
|
||||
|
||||
if (interval !== null) {
|
||||
query += ` WHERE blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
@ -610,6 +631,64 @@ class BlocksRepository {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the oldest block from a consecutive chain of block from the most recent one
|
||||
*/
|
||||
public async $getOldestConsecutiveBlock(): Promise<any> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT height, UNIX_TIMESTAMP(blockTimestamp) as timestamp, difficulty FROM blocks ORDER BY height DESC`);
|
||||
for (let i = 0; i < rows.length - 1; ++i) {
|
||||
if (rows[i].height - rows[i + 1].height > 1) {
|
||||
return rows[i];
|
||||
}
|
||||
}
|
||||
return rows[rows.length - 1];
|
||||
} catch (e) {
|
||||
logger.err('Cannot generate block size and weight history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all blocks which have not be linked to a price yet
|
||||
*/
|
||||
public async $getBlocksWithoutPrice(): Promise<object[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.height
|
||||
FROM blocks
|
||||
LEFT JOIN blocks_prices ON blocks.height = blocks_prices.height
|
||||
WHERE blocks_prices.height IS NULL
|
||||
ORDER BY blocks.height
|
||||
`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot get blocks height and timestamp from the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save block price by batch
|
||||
*/
|
||||
public async $saveBlockPrices(blockPrices: BlockPrice[]): Promise<void> {
|
||||
try {
|
||||
let query = `INSERT INTO blocks_prices(height, price_id) VALUES`;
|
||||
for (const price of blockPrices) {
|
||||
query += ` (${price.height}, ${price.priceId}),`
|
||||
}
|
||||
query = query.slice(0, -1);
|
||||
await DB.query(query);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save blocks prices for blocks [${blockPrices[0].height} to ${blockPrices[blockPrices.length - 1].height}] because it has already been indexed, ignoring`);
|
||||
} else {
|
||||
logger.err(`Cannot save blocks prices for blocks [${blockPrices[0].height} to ${blockPrices[blockPrices.length - 1].height}] into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksRepository();
|
||||
|
@ -17,14 +17,24 @@ class BlocksSummariesRepository {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
public async $saveSummary(height: number, summary: BlockSummary) {
|
||||
public async $saveSummary(params: { height: number, mined?: BlockSummary, template?: BlockSummary}) {
|
||||
const blockId = params.mined?.id ?? params.template?.id;
|
||||
try {
|
||||
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?)`, [height, summary.id, JSON.stringify(summary.transactions)]);
|
||||
const [dbSummary]: any[] = await DB.query(`SELECT * FROM blocks_summaries WHERE id = "${blockId}"`);
|
||||
if (dbSummary.length === 0) { // First insertion
|
||||
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?, ?)`, [
|
||||
params.height, blockId, JSON.stringify(params.mined?.transactions ?? []), JSON.stringify(params.template?.transactions ?? [])
|
||||
]);
|
||||
} else if (params.mined !== undefined) { // Update mined block summary
|
||||
await DB.query(`UPDATE blocks_summaries SET transactions = ? WHERE id = "${params.mined.id}"`, [JSON.stringify(params.mined.transactions)]);
|
||||
} else if (params.template !== undefined) { // Update template block summary
|
||||
await DB.query(`UPDATE blocks_summaries SET template = ? WHERE id = "${params.template.id}"`, [JSON.stringify(params.template?.transactions)]);
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block summary for ${summary.id} because it has already been indexed, ignoring`);
|
||||
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
|
||||
} else {
|
||||
logger.debug(`Cannot save block summary for ${summary.id}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.debug(`Cannot save block summary for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -44,7 +54,7 @@ class BlocksSummariesRepository {
|
||||
/**
|
||||
* Delete blocks from the database from blockHeight
|
||||
*/
|
||||
public async $deleteBlocksFrom(blockHeight: number) {
|
||||
public async $deleteBlocksFrom(blockHeight: number) {
|
||||
logger.info(`Delete newer blocks summary from height ${blockHeight} from the database`);
|
||||
|
||||
try {
|
||||
|
@ -46,9 +46,38 @@ class DifficultyAdjustmentsRepository {
|
||||
query += ` GROUP BY UNIX_TIMESTAMP(time) DIV ${86400}`;
|
||||
|
||||
if (descOrder === true) {
|
||||
query += ` ORDER BY time DESC`;
|
||||
query += ` ORDER BY height DESC`;
|
||||
} else {
|
||||
query += ` ORDER BY time`;
|
||||
query += ` ORDER BY height`;
|
||||
}
|
||||
|
||||
try {
|
||||
const [rows] = await DB.query(query);
|
||||
return rows as IndexedDifficultyAdjustment[];
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getRawAdjustments(interval: string | null, descOrder: boolean = false): Promise<IndexedDifficultyAdjustment[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT
|
||||
UNIX_TIMESTAMP(time) as time,
|
||||
height as height,
|
||||
difficulty as difficulty,
|
||||
adjustment as adjustment
|
||||
FROM difficulty_adjustments`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE time BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
if (descOrder === true) {
|
||||
query += ` ORDER BY height DESC`;
|
||||
} else {
|
||||
query += ` ORDER BY height`;
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { escape } from 'mysql2';
|
||||
import { Common } from '../api/common';
|
||||
import config from '../config';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import PoolsRepository from './PoolsRepository';
|
||||
@ -30,6 +29,32 @@ class HashratesRepository {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getRawNetworkDailyHashrate(interval: string | null): Promise<any[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT
|
||||
UNIX_TIMESTAMP(hashrate_timestamp) as timestamp,
|
||||
avg_hashrate as avgHashrate
|
||||
FROM hashrates`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE hashrate_timestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()
|
||||
AND hashrates.type = 'daily'`;
|
||||
} else {
|
||||
query += ` WHERE hashrates.type = 'daily'`;
|
||||
}
|
||||
|
||||
query += ` ORDER by hashrate_timestamp`;
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNetworkDailyHashrate(interval: string | null): Promise<any[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
|
@ -4,6 +4,12 @@ import { Prices } from '../tasks/price-updater';
|
||||
|
||||
class PricesRepository {
|
||||
public async $savePrices(time: number, prices: Prices): Promise<void> {
|
||||
if (prices.USD === -1) {
|
||||
// Some historical price entries have not USD prices, so we just ignore them to avoid future UX issues
|
||||
// As of today there are only 4 (on 2013-09-05, 2013-09-19, 2013-09-12 and 2013-09-26) so that's fine
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await DB.query(`
|
||||
INSERT INTO prices(time, USD, EUR, GBP, CAD, CHF, AUD, JPY)
|
||||
@ -17,19 +23,24 @@ class PricesRepository {
|
||||
}
|
||||
|
||||
public async $getOldestPriceTime(): Promise<number> {
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices ORDER BY time LIMIT 1`);
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
}
|
||||
|
||||
public async $getLatestPriceTime(): Promise<number> {
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices ORDER BY time DESC LIMIT 1`);
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
}
|
||||
|
||||
public async $getPricesTimes(): Promise<number[]> {
|
||||
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices`);
|
||||
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time`);
|
||||
return times.map(time => time.time);
|
||||
}
|
||||
|
||||
public async $getPricesTimesAndId(): Promise<number[]> {
|
||||
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time, id, USD from prices ORDER BY time`);
|
||||
return times;
|
||||
}
|
||||
}
|
||||
|
||||
export default new PricesRepository();
|
||||
|
File diff suppressed because it is too large
Load Diff
464
backend/src/tasks/lightning/network-sync.service.ts
Normal file
464
backend/src/tasks/lightning/network-sync.service.ts
Normal file
@ -0,0 +1,464 @@
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import channelsApi from '../../api/explorer/channels.api';
|
||||
import bitcoinClient from '../../api/bitcoin/bitcoin-client';
|
||||
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
|
||||
import config from '../../config';
|
||||
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
|
||||
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
|
||||
import { $lookupNodeLocation } from './sync-tasks/node-locations';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
import { convertChannelId } from '../../api/lightning/clightning/clightning-convert';
|
||||
import { Common } from '../../api/common';
|
||||
|
||||
class NetworkSyncService {
|
||||
constructor() {}
|
||||
|
||||
public async $startService() {
|
||||
logger.info('Starting node sync service');
|
||||
|
||||
await this.$runUpdater();
|
||||
|
||||
setInterval(async () => {
|
||||
await this.$runUpdater();
|
||||
}, 1000 * 60 * 60);
|
||||
}
|
||||
|
||||
private async $runUpdater(): Promise<void> {
|
||||
try {
|
||||
logger.info(`Updating nodes and channels...`);
|
||||
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
if (networkGraph.nodes.length === 0 || networkGraph.edges.length === 0) {
|
||||
logger.info(`LN Network graph is empty, retrying in 10 seconds`);
|
||||
await Common.sleep$(10000);
|
||||
this.$runUpdater();
|
||||
return;
|
||||
}
|
||||
|
||||
for (const node of networkGraph.nodes) {
|
||||
await this.$saveNode(node);
|
||||
}
|
||||
logger.info(`Nodes updated.`);
|
||||
|
||||
if (config.MAXMIND.ENABLED) {
|
||||
await $lookupNodeLocation();
|
||||
}
|
||||
|
||||
const graphChannelsIds: string[] = [];
|
||||
for (const channel of networkGraph.edges) {
|
||||
await this.$saveChannel(channel);
|
||||
graphChannelsIds.push(channel.channel_id);
|
||||
}
|
||||
await this.$setChannelsInactive(graphChannelsIds);
|
||||
|
||||
logger.info(`Channels updated.`);
|
||||
|
||||
await this.$findInactiveNodesAndChannels();
|
||||
await this.$lookUpCreationDateFromChain();
|
||||
await this.$updateNodeFirstSeen();
|
||||
await this.$scanForClosedChannels();
|
||||
if (config.MEMPOOL.BACKEND === 'esplora') {
|
||||
await this.$runClosedChannelsForensics();
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.err('$runUpdater() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
// This method look up the creation date of the earliest channel of the node
|
||||
// and update the node to that date in order to get the earliest first seen date
|
||||
private async $updateNodeFirstSeen() {
|
||||
try {
|
||||
const [nodes]: any[] = await DB.query(`SELECT nodes.public_key, UNIX_TIMESTAMP(nodes.first_seen) AS first_seen, (SELECT UNIX_TIMESTAMP(created) FROM channels WHERE channels.node1_public_key = nodes.public_key ORDER BY created ASC LIMIT 1) AS created1, (SELECT UNIX_TIMESTAMP(created) FROM channels WHERE channels.node2_public_key = nodes.public_key ORDER BY created ASC LIMIT 1) AS created2 FROM nodes`);
|
||||
for (const node of nodes) {
|
||||
let lowest = 0;
|
||||
if (node.created1) {
|
||||
if (node.created2 && node.created2 < node.created1) {
|
||||
lowest = node.created2;
|
||||
} else {
|
||||
lowest = node.created1;
|
||||
}
|
||||
} else if (node.created2) {
|
||||
lowest = node.created2;
|
||||
}
|
||||
if (lowest && lowest < node.first_seen) {
|
||||
const query = `UPDATE nodes SET first_seen = FROM_UNIXTIME(?) WHERE public_key = ?`;
|
||||
const params = [lowest, node.public_key];
|
||||
await DB.query(query, params);
|
||||
}
|
||||
}
|
||||
logger.info(`Node first seen dates scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$updateNodeFirstSeen() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $lookUpCreationDateFromChain() {
|
||||
logger.info(`Running channel creation date lookup...`);
|
||||
try {
|
||||
const channels = await channelsApi.$getChannelsWithoutCreatedDate();
|
||||
for (const channel of channels) {
|
||||
const transaction = await bitcoinClient.getRawTransaction(channel.transaction_id, 1);
|
||||
await DB.query(`UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.id = ?`, [transaction.blocktime, channel.id]);
|
||||
}
|
||||
logger.info(`Channel creation dates scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$setCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
// Looking for channels whos nodes are inactive
|
||||
private async $findInactiveNodesAndChannels(): Promise<void> {
|
||||
logger.info(`Running inactive channels scan...`);
|
||||
|
||||
try {
|
||||
const [channels]: [{ id: string }[]] = await <any>DB.query(`
|
||||
SELECT channels.id
|
||||
FROM channels
|
||||
WHERE channels.status = 1
|
||||
AND (
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM nodes
|
||||
WHERE nodes.public_key = channels.node1_public_key
|
||||
) = 0
|
||||
OR (
|
||||
SELECT COUNT(*)
|
||||
FROM nodes
|
||||
WHERE nodes.public_key = channels.node2_public_key
|
||||
) = 0)
|
||||
`);
|
||||
|
||||
for (const channel of channels) {
|
||||
await this.$updateChannelStatus(channel.id, 0);
|
||||
}
|
||||
logger.info(`Inactive channels scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$findInactiveNodesAndChannels() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $scanForClosedChannels(): Promise<void> {
|
||||
try {
|
||||
logger.info(`Starting closed channels scan...`);
|
||||
const channels = await channelsApi.$getChannelsByStatus(0);
|
||||
for (const channel of channels) {
|
||||
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
|
||||
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
|
||||
logger.debug('Marking channel: ' + channel.id + ' as closed.');
|
||||
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
|
||||
[spendingTx.status.block_time, channel.id]);
|
||||
if (spendingTx.txid && !channel.closing_transaction_id) {
|
||||
await DB.query(`UPDATE channels SET closing_transaction_id = ? WHERE id = ?`, [spendingTx.txid, channel.id]);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`Closed channels scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$scanForClosedChannels() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
1. Mutually closed
|
||||
2. Forced closed
|
||||
3. Forced closed with penalty
|
||||
*/
|
||||
|
||||
private async $runClosedChannelsForensics(): Promise<void> {
|
||||
if (!config.ESPLORA.REST_API_URL) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
logger.info(`Started running closed channel forensics...`);
|
||||
const channels = await channelsApi.$getClosedChannelsWithoutReason();
|
||||
for (const channel of channels) {
|
||||
let reason = 0;
|
||||
// Only Esplora backend can retrieve spent transaction outputs
|
||||
const outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
|
||||
const lightningScriptReasons: number[] = [];
|
||||
for (const outspend of outspends) {
|
||||
if (outspend.spent && outspend.txid) {
|
||||
const spendingTx = await bitcoinApi.$getRawTransaction(outspend.txid);
|
||||
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
|
||||
lightningScriptReasons.push(lightningScript);
|
||||
}
|
||||
}
|
||||
if (lightningScriptReasons.length === outspends.length
|
||||
&& lightningScriptReasons.filter((r) => r === 1).length === outspends.length) {
|
||||
reason = 1;
|
||||
} else {
|
||||
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
|
||||
if (filteredReasons.length) {
|
||||
if (filteredReasons.some((r) => r === 2 || r === 4)) {
|
||||
reason = 3;
|
||||
} else {
|
||||
reason = 2;
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
We can detect a commitment transaction (force close) by reading Sequence and Locktime
|
||||
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
|
||||
*/
|
||||
const closingTx = await bitcoinApi.$getRawTransaction(channel.closing_transaction_id);
|
||||
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
|
||||
const locktimeHex: string = closingTx.locktime.toString(16);
|
||||
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
|
||||
reason = 2; // Here we can't be sure if it's a penalty or not
|
||||
} else {
|
||||
reason = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (reason) {
|
||||
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
|
||||
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
|
||||
}
|
||||
}
|
||||
logger.info(`Closed channels forensics scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private findLightningScript(vin: IEsploraApi.Vin): number {
|
||||
const topElement = vin.witness[vin.witness.length - 2];
|
||||
if (/^OP_IF OP_PUSHBYTES_33 \w{66} OP_ELSE OP_PUSH(NUM_\d+|BYTES_(1 \w{2}|2 \w{4})) OP_CSV OP_DROP OP_PUSHBYTES_33 \w{66} OP_ENDIF OP_CHECKSIG$/.test(vin.inner_witnessscript_asm)) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction-outputs
|
||||
if (topElement === '01') {
|
||||
// top element is '01' to get in the revocation path
|
||||
// 'Revoked Lightning Force Close';
|
||||
// Penalty force closed
|
||||
return 2;
|
||||
} else {
|
||||
// top element is '', this is a delayed to_local output
|
||||
// 'Lightning Force Close';
|
||||
return 3;
|
||||
}
|
||||
} else if (
|
||||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_NOTIF OP_DROP OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm) ||
|
||||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_IF OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_DROP OP_PUSHBYTES_3 \w{6} OP_CLTV OP_DROP OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm)
|
||||
) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#offered-htlc-outputs
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#received-htlc-outputs
|
||||
if (topElement.length === 66) {
|
||||
// top element is a public key
|
||||
// 'Revoked Lightning HTLC'; Penalty force closed
|
||||
return 4;
|
||||
} else if (topElement) {
|
||||
// top element is a preimage
|
||||
// 'Lightning HTLC';
|
||||
return 5;
|
||||
} else {
|
||||
// top element is '' to get in the expiry of the script
|
||||
// 'Expired Lightning HTLC';
|
||||
return 6;
|
||||
}
|
||||
} else if (/^OP_PUSHBYTES_33 \w{66} OP_CHECKSIG OP_IFDUP OP_NOTIF OP_PUSHNUM_16 OP_CSV OP_ENDIF$/.test(vin.inner_witnessscript_asm)) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#to_local_anchor-and-to_remote_anchor-output-option_anchors
|
||||
if (topElement) {
|
||||
// top element is a signature
|
||||
// 'Lightning Anchor';
|
||||
return 7;
|
||||
} else {
|
||||
// top element is '', it has been swept after 16 blocks
|
||||
// 'Swept Lightning Anchor';
|
||||
return 8;
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
private async $saveChannel(channel: ILightningApi.Channel): Promise<void> {
|
||||
const [ txid, vout ] = channel.chan_point.split(':');
|
||||
|
||||
const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
|
||||
const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
|
||||
|
||||
try {
|
||||
const query = `INSERT INTO channels
|
||||
(
|
||||
id,
|
||||
short_id,
|
||||
capacity,
|
||||
transaction_id,
|
||||
transaction_vout,
|
||||
updated_at,
|
||||
status,
|
||||
node1_public_key,
|
||||
node1_base_fee_mtokens,
|
||||
node1_cltv_delta,
|
||||
node1_fee_rate,
|
||||
node1_is_disabled,
|
||||
node1_max_htlc_mtokens,
|
||||
node1_min_htlc_mtokens,
|
||||
node1_updated_at,
|
||||
node2_public_key,
|
||||
node2_base_fee_mtokens,
|
||||
node2_cltv_delta,
|
||||
node2_fee_rate,
|
||||
node2_is_disabled,
|
||||
node2_max_htlc_mtokens,
|
||||
node2_min_htlc_mtokens,
|
||||
node2_updated_at
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
capacity = ?,
|
||||
updated_at = ?,
|
||||
status = 1,
|
||||
node1_public_key = ?,
|
||||
node1_base_fee_mtokens = ?,
|
||||
node1_cltv_delta = ?,
|
||||
node1_fee_rate = ?,
|
||||
node1_is_disabled = ?,
|
||||
node1_max_htlc_mtokens = ?,
|
||||
node1_min_htlc_mtokens = ?,
|
||||
node1_updated_at = ?,
|
||||
node2_public_key = ?,
|
||||
node2_base_fee_mtokens = ?,
|
||||
node2_cltv_delta = ?,
|
||||
node2_fee_rate = ?,
|
||||
node2_is_disabled = ?,
|
||||
node2_max_htlc_mtokens = ?,
|
||||
node2_min_htlc_mtokens = ?,
|
||||
node2_updated_at = ?
|
||||
;`;
|
||||
|
||||
await DB.query(query, [
|
||||
this.toIntegerId(channel.channel_id),
|
||||
this.toShortId(channel.channel_id),
|
||||
channel.capacity,
|
||||
txid,
|
||||
vout,
|
||||
this.utcDateToMysql(channel.last_update),
|
||||
channel.node1_pub,
|
||||
policy1.fee_base_msat,
|
||||
policy1.time_lock_delta,
|
||||
policy1.fee_rate_milli_msat,
|
||||
policy1.disabled,
|
||||
policy1.max_htlc_msat,
|
||||
policy1.min_htlc,
|
||||
this.utcDateToMysql(policy1.last_update),
|
||||
channel.node2_pub,
|
||||
policy2.fee_base_msat,
|
||||
policy2.time_lock_delta,
|
||||
policy2.fee_rate_milli_msat,
|
||||
policy2.disabled,
|
||||
policy2.max_htlc_msat,
|
||||
policy2.min_htlc,
|
||||
this.utcDateToMysql(policy2.last_update),
|
||||
channel.capacity,
|
||||
this.utcDateToMysql(channel.last_update),
|
||||
channel.node1_pub,
|
||||
policy1.fee_base_msat,
|
||||
policy1.time_lock_delta,
|
||||
policy1.fee_rate_milli_msat,
|
||||
policy1.disabled,
|
||||
policy1.max_htlc_msat,
|
||||
policy1.min_htlc,
|
||||
this.utcDateToMysql(policy1.last_update),
|
||||
channel.node2_pub,
|
||||
policy2.fee_base_msat,
|
||||
policy2.time_lock_delta,
|
||||
policy2.fee_rate_milli_msat,
|
||||
policy2.disabled,
|
||||
policy2.max_htlc_msat,
|
||||
policy2.min_htlc,
|
||||
this.utcDateToMysql(policy2.last_update)
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$saveChannel() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $updateChannelStatus(channelId: string, status: number): Promise<void> {
|
||||
try {
|
||||
await DB.query(`UPDATE channels SET status = ? WHERE id = ?`, [status, channelId]);
|
||||
} catch (e) {
|
||||
logger.err('$updateChannelStatus() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $setChannelsInactive(graphChannelsIds: string[]): Promise<void> {
|
||||
if (graphChannelsIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await DB.query(`
|
||||
UPDATE channels
|
||||
SET status = 0
|
||||
WHERE short_id NOT IN (
|
||||
${graphChannelsIds.map(id => `"${id}"`).join(',')}
|
||||
)
|
||||
AND status != 2
|
||||
`);
|
||||
} catch (e) {
|
||||
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $saveNode(node: ILightningApi.Node): Promise<void> {
|
||||
try {
|
||||
const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
|
||||
const query = `INSERT INTO nodes(
|
||||
public_key,
|
||||
first_seen,
|
||||
updated_at,
|
||||
alias,
|
||||
color,
|
||||
sockets
|
||||
)
|
||||
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, color = ?, sockets = ?`;
|
||||
|
||||
await DB.query(query, [
|
||||
node.pub_key,
|
||||
node.last_update,
|
||||
node.alias,
|
||||
node.color,
|
||||
sockets,
|
||||
node.last_update,
|
||||
node.alias,
|
||||
node.color,
|
||||
sockets,
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private toIntegerId(id: string): string {
|
||||
if (config.LIGHTNING.BACKEND === 'cln') {
|
||||
return convertChannelId(id);
|
||||
}
|
||||
else if (config.LIGHTNING.BACKEND === 'lnd') {
|
||||
return id;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
/** Decodes a channel id returned by lnd as uint64 to a short channel id */
|
||||
private toShortId(id: string): string {
|
||||
if (config.LIGHTNING.BACKEND === 'cln') {
|
||||
return id;
|
||||
}
|
||||
|
||||
const n = BigInt(id);
|
||||
return [
|
||||
n >> 40n, // nth block
|
||||
(n >> 16n) & 0xffffffn, // nth tx of the block
|
||||
n & 0xffffn // nth output of the tx
|
||||
].join('x');
|
||||
}
|
||||
|
||||
private utcDateToMysql(date?: number): string {
|
||||
const d = new Date((date || 0) * 1000);
|
||||
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
|
||||
}
|
||||
}
|
||||
|
||||
export default new NetworkSyncService();
|
43
backend/src/tasks/lightning/stats-updater.service.ts
Normal file
43
backend/src/tasks/lightning/stats-updater.service.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
import LightningStatsImporter from './sync-tasks/stats-importer';
|
||||
import config from '../../config';
|
||||
|
||||
class LightningStatsUpdater {
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting Lightning Stats service');
|
||||
|
||||
await this.$runTasks();
|
||||
LightningStatsImporter.$run();
|
||||
}
|
||||
|
||||
private setDateMidnight(date: Date): void {
|
||||
date.setUTCHours(0);
|
||||
date.setUTCMinutes(0);
|
||||
date.setUTCSeconds(0);
|
||||
date.setUTCMilliseconds(0);
|
||||
}
|
||||
|
||||
private async $runTasks(): Promise<void> {
|
||||
await this.$logStatsDaily();
|
||||
|
||||
setTimeout(() => {
|
||||
this.$runTasks();
|
||||
}, 1000 * config.LIGHTNING.NODE_STATS_REFRESH_INTERVAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the latest entry for each node every config.LIGHTNING.NODE_STATS_REFRESH_INTERVAL seconds
|
||||
*/
|
||||
private async $logStatsDaily(): Promise<void> {
|
||||
const date = new Date();
|
||||
this.setDateMidnight(date);
|
||||
|
||||
logger.info(`Updating latest networks stats`);
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
|
||||
}
|
||||
}
|
||||
|
||||
export default new LightningStatsUpdater();
|
113
backend/src/tasks/lightning/sync-tasks/funding-tx-fetcher.ts
Normal file
113
backend/src/tasks/lightning/sync-tasks/funding-tx-fetcher.ts
Normal file
@ -0,0 +1,113 @@
|
||||
import { existsSync, promises } from 'fs';
|
||||
import bitcoinClient from '../../../api/bitcoin/bitcoin-client';
|
||||
import config from '../../../config';
|
||||
import logger from '../../../logger';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
const BLOCKS_CACHE_MAX_SIZE = 100;
|
||||
const CACHE_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/ln-funding-txs-cache.json';
|
||||
|
||||
class FundingTxFetcher {
|
||||
private running = false;
|
||||
private blocksCache = {};
|
||||
private channelNewlyProcessed = 0;
|
||||
public fundingTxCache = {};
|
||||
|
||||
async $init(): Promise<void> {
|
||||
// Load funding tx disk cache
|
||||
if (Object.keys(this.fundingTxCache).length === 0 && existsSync(CACHE_FILE_NAME)) {
|
||||
try {
|
||||
this.fundingTxCache = JSON.parse(await fsPromises.readFile(CACHE_FILE_NAME, 'utf-8'));
|
||||
} catch (e) {
|
||||
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`);
|
||||
this.fundingTxCache = {};
|
||||
}
|
||||
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`);
|
||||
}
|
||||
}
|
||||
|
||||
async $fetchChannelsFundingTxs(channelIds: string[]): Promise<void> {
|
||||
if (this.running) {
|
||||
return;
|
||||
}
|
||||
this.running = true;
|
||||
|
||||
const globalTimer = new Date().getTime() / 1000;
|
||||
let cacheTimer = new Date().getTime() / 1000;
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let channelProcessed = 0;
|
||||
this.channelNewlyProcessed = 0;
|
||||
for (const channelId of channelIds) {
|
||||
await this.$fetchChannelOpenTx(channelId);
|
||||
++channelProcessed;
|
||||
|
||||
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
|
||||
logger.info(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
|
||||
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
|
||||
`elapsed: ${elapsedSeconds} seconds`
|
||||
);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - cacheTimer);
|
||||
if (elapsedSeconds > 60) {
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
cacheTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.channelNewlyProcessed > 0) {
|
||||
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`);
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
}
|
||||
|
||||
this.running = false;
|
||||
}
|
||||
|
||||
public async $fetchChannelOpenTx(channelId: string): Promise<any> {
|
||||
if (this.fundingTxCache[channelId]) {
|
||||
return this.fundingTxCache[channelId];
|
||||
}
|
||||
|
||||
const parts = channelId.split('x');
|
||||
const blockHeight = parts[0];
|
||||
const txIdx = parts[1];
|
||||
const outputIdx = parts[2];
|
||||
|
||||
let block = this.blocksCache[blockHeight];
|
||||
// Fetch it from core
|
||||
if (!block) {
|
||||
const blockHash = await bitcoinClient.getBlockHash(parseInt(blockHeight, 10));
|
||||
block = await bitcoinClient.getBlock(blockHash, 1);
|
||||
}
|
||||
this.blocksCache[block.height] = block;
|
||||
|
||||
const blocksCacheHashes = Object.keys(this.blocksCache).sort((a, b) => parseInt(b) - parseInt(a)).reverse();
|
||||
if (blocksCacheHashes.length > BLOCKS_CACHE_MAX_SIZE) {
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
delete this.blocksCache[blocksCacheHashes[i]];
|
||||
}
|
||||
}
|
||||
|
||||
const txid = block.tx[txIdx];
|
||||
const rawTx = await bitcoinClient.getRawTransaction(txid);
|
||||
const tx = await bitcoinClient.decodeRawTransaction(rawTx);
|
||||
|
||||
this.fundingTxCache[channelId] = {
|
||||
timestamp: block.time,
|
||||
txid: txid,
|
||||
value: tx.vout[outputIdx].value,
|
||||
};
|
||||
|
||||
++this.channelNewlyProcessed;
|
||||
|
||||
return this.fundingTxCache[channelId];
|
||||
}
|
||||
}
|
||||
|
||||
export default new FundingTxFetcher;
|
98
backend/src/tasks/lightning/sync-tasks/node-locations.ts
Normal file
98
backend/src/tasks/lightning/sync-tasks/node-locations.ts
Normal file
@ -0,0 +1,98 @@
|
||||
import * as net from 'net';
|
||||
import maxmind, { CityResponse, AsnResponse, IspResponse } from 'maxmind';
|
||||
import nodesApi from '../../../api/explorer/nodes.api';
|
||||
import config from '../../../config';
|
||||
import DB from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
export async function $lookupNodeLocation(): Promise<void> {
|
||||
logger.info(`Running node location updater using Maxmind...`);
|
||||
try {
|
||||
const nodes = await nodesApi.$getAllNodes();
|
||||
const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
|
||||
const lookupAsn = await maxmind.open<AsnResponse>(config.MAXMIND.GEOLITE2_ASN);
|
||||
const lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
|
||||
|
||||
for (const node of nodes) {
|
||||
const sockets: string[] = node.sockets.split(',');
|
||||
for (const socket of sockets) {
|
||||
const ip = socket.substring(0, socket.lastIndexOf(':')).replace('[', '').replace(']', '');
|
||||
const hasClearnet = [4, 6].includes(net.isIP(ip));
|
||||
if (hasClearnet && ip !== '127.0.1.1' && ip !== '127.0.0.1') {
|
||||
const city = lookupCity.get(ip);
|
||||
const asn = lookupAsn.get(ip);
|
||||
const isp = lookupIsp.get(ip);
|
||||
|
||||
if (city && (asn || isp)) {
|
||||
const query = `UPDATE nodes SET
|
||||
as_number = ?,
|
||||
city_id = ?,
|
||||
country_id = ?,
|
||||
subdivision_id = ?,
|
||||
longitude = ?,
|
||||
latitude = ?,
|
||||
accuracy_radius = ?
|
||||
WHERE public_key = ?`;
|
||||
|
||||
const params = [
|
||||
isp?.autonomous_system_number ?? asn?.autonomous_system_number,
|
||||
city.city?.geoname_id,
|
||||
city.country?.geoname_id,
|
||||
city.subdivisions ? city.subdivisions[0].geoname_id : null,
|
||||
city.location?.longitude,
|
||||
city.location?.latitude,
|
||||
city.location?.accuracy_radius,
|
||||
node.public_key
|
||||
];
|
||||
await DB.query(query, params);
|
||||
|
||||
// Store Continent
|
||||
if (city.continent?.geoname_id) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'continent', ?)`,
|
||||
[city.continent?.geoname_id, JSON.stringify(city.continent?.names)]);
|
||||
}
|
||||
|
||||
// Store Country
|
||||
if (city.country?.geoname_id) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country', ?)`,
|
||||
[city.country?.geoname_id, JSON.stringify(city.country?.names)]);
|
||||
}
|
||||
|
||||
// Store Country ISO code
|
||||
if (city.country?.iso_code) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
|
||||
[city.country?.geoname_id, city.country?.iso_code]);
|
||||
}
|
||||
|
||||
// Store Division
|
||||
if (city.subdivisions && city.subdivisions[0]) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'division', ?)`,
|
||||
[city.subdivisions[0].geoname_id, JSON.stringify(city.subdivisions[0]?.names)]);
|
||||
}
|
||||
|
||||
// Store City
|
||||
if (city.city?.geoname_id) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'city', ?)`,
|
||||
[city.city?.geoname_id, JSON.stringify(city.city?.names)]);
|
||||
}
|
||||
|
||||
// Store AS name
|
||||
if (isp?.autonomous_system_organization ?? asn?.autonomous_system_organization) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'as_organization', ?)`,
|
||||
[isp?.autonomous_system_number ?? asn?.autonomous_system_number, JSON.stringify(isp?.isp ?? asn?.autonomous_system_organization)]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`Node location data updated.`);
|
||||
} catch (e) {
|
||||
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
408
backend/src/tasks/lightning/sync-tasks/stats-importer.ts
Normal file
408
backend/src/tasks/lightning/sync-tasks/stats-importer.ts
Normal file
@ -0,0 +1,408 @@
|
||||
import DB from '../../../database';
|
||||
import { promises } from 'fs';
|
||||
import { XMLParser } from 'fast-xml-parser';
|
||||
import logger from '../../../logger';
|
||||
import fundingTxFetcher from './funding-tx-fetcher';
|
||||
import config from '../../../config';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
interface Node {
|
||||
id: string;
|
||||
timestamp: number;
|
||||
features: string;
|
||||
rgb_color: string;
|
||||
alias: string;
|
||||
addresses: unknown[];
|
||||
out_degree: number;
|
||||
in_degree: number;
|
||||
}
|
||||
|
||||
interface Channel {
|
||||
channel_id: string;
|
||||
node1_pub: string;
|
||||
node2_pub: string;
|
||||
timestamp: number;
|
||||
features: string;
|
||||
fee_base_msat: number;
|
||||
fee_rate_milli_msat: number;
|
||||
htlc_minimim_msat: number;
|
||||
cltv_expiry_delta: number;
|
||||
htlc_maximum_msat: number;
|
||||
}
|
||||
|
||||
class LightningStatsImporter {
|
||||
topologiesFolder = config.LIGHTNING.TOPOLOGY_FOLDER;
|
||||
parser = new XMLParser();
|
||||
|
||||
async $run(): Promise<void> {
|
||||
logger.info(`Importing historical lightning stats`);
|
||||
|
||||
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
||||
logger.info('Caching funding txs for currently existing channels');
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
||||
|
||||
await this.$importHistoricalLightningStats();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate LN network stats for one day
|
||||
*/
|
||||
public async computeNetworkStats(timestamp: number, networkGraph): Promise<unknown> {
|
||||
// Node counts and network shares
|
||||
let clearnetNodes = 0;
|
||||
let torNodes = 0;
|
||||
let clearnetTorNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
|
||||
for (const node of networkGraph.nodes) {
|
||||
let hasOnion = false;
|
||||
let hasClearnet = false;
|
||||
let isUnnanounced = true;
|
||||
|
||||
for (const socket of (node.addresses ?? [])) {
|
||||
hasOnion = hasOnion || ['torv2', 'torv3'].includes(socket.network);
|
||||
hasClearnet = hasClearnet || ['ipv4', 'ipv6'].includes(socket.network);
|
||||
}
|
||||
if (hasOnion && hasClearnet) {
|
||||
clearnetTorNodes++;
|
||||
isUnnanounced = false;
|
||||
} else if (hasOnion) {
|
||||
torNodes++;
|
||||
isUnnanounced = false;
|
||||
} else if (hasClearnet) {
|
||||
clearnetNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
if (isUnnanounced) {
|
||||
unannouncedNodes++;
|
||||
}
|
||||
}
|
||||
|
||||
// Channels and node historical stats
|
||||
const nodeStats = {};
|
||||
let capacity = 0;
|
||||
let avgFeeRate = 0;
|
||||
let avgBaseFee = 0;
|
||||
const capacities: number[] = [];
|
||||
const feeRates: number[] = [];
|
||||
const baseFees: number[] = [];
|
||||
const alreadyCountedChannels = {};
|
||||
|
||||
for (const channel of networkGraph.edges) {
|
||||
let short_id = channel.channel_id;
|
||||
if (short_id.indexOf('/') !== -1) {
|
||||
short_id = short_id.slice(0, -2);
|
||||
}
|
||||
|
||||
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
|
||||
if (!tx) {
|
||||
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!nodeStats[channel.node1_pub]) {
|
||||
nodeStats[channel.node1_pub] = {
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
};
|
||||
}
|
||||
if (!nodeStats[channel.node2_pub]) {
|
||||
nodeStats[channel.node2_pub] = {
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
};
|
||||
}
|
||||
|
||||
if (!alreadyCountedChannels[short_id]) {
|
||||
capacity += Math.round(tx.value * 100000000);
|
||||
capacities.push(Math.round(tx.value * 100000000));
|
||||
alreadyCountedChannels[short_id] = true;
|
||||
|
||||
nodeStats[channel.node1_pub].capacity += Math.round(tx.value * 100000000);
|
||||
nodeStats[channel.node1_pub].channels++;
|
||||
nodeStats[channel.node2_pub].capacity += Math.round(tx.value * 100000000);
|
||||
nodeStats[channel.node2_pub].channels++;
|
||||
}
|
||||
|
||||
if (channel.node1_policy !== undefined) { // Coming from the node
|
||||
for (const policy of [channel.node1_policy, channel.node2_policy]) {
|
||||
if (policy && policy.fee_rate_milli_msat < 5000) {
|
||||
avgFeeRate += policy.fee_rate_milli_msat;
|
||||
feeRates.push(policy.fee_rate_milli_msat);
|
||||
}
|
||||
if (policy && policy.fee_base_msat < 5000) {
|
||||
avgBaseFee += policy.fee_base_msat;
|
||||
baseFees.push(policy.fee_base_msat);
|
||||
}
|
||||
}
|
||||
} else { // Coming from the historical import
|
||||
if (channel.fee_rate_milli_msat < 5000) {
|
||||
avgFeeRate += channel.fee_rate_milli_msat;
|
||||
feeRates.push(channel.fee_rate_milli_msat);
|
||||
}
|
||||
if (channel.fee_base_msat < 5000) {
|
||||
avgBaseFee += channel.fee_base_msat;
|
||||
baseFees.push(channel.fee_base_msat);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
avgFeeRate /= networkGraph.edges.length;
|
||||
avgBaseFee /= networkGraph.edges.length;
|
||||
const medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
|
||||
const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
|
||||
const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
|
||||
const avgCapacity = Math.round(capacity / capacities.length);
|
||||
|
||||
let query = `INSERT INTO lightning_stats(
|
||||
added,
|
||||
channel_count,
|
||||
node_count,
|
||||
total_capacity,
|
||||
tor_nodes,
|
||||
clearnet_nodes,
|
||||
unannounced_nodes,
|
||||
clearnet_tor_nodes,
|
||||
avg_capacity,
|
||||
avg_fee_rate,
|
||||
avg_base_fee_mtokens,
|
||||
med_capacity,
|
||||
med_fee_rate,
|
||||
med_base_fee_mtokens
|
||||
)
|
||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
added = FROM_UNIXTIME(?),
|
||||
channel_count = ?,
|
||||
node_count = ?,
|
||||
total_capacity = ?,
|
||||
tor_nodes = ?,
|
||||
clearnet_nodes = ?,
|
||||
unannounced_nodes = ?,
|
||||
clearnet_tor_nodes = ?,
|
||||
avg_capacity = ?,
|
||||
avg_fee_rate = ?,
|
||||
avg_base_fee_mtokens = ?,
|
||||
med_capacity = ?,
|
||||
med_fee_rate = ?,
|
||||
med_base_fee_mtokens = ?
|
||||
`;
|
||||
|
||||
await DB.query(query, [
|
||||
timestamp,
|
||||
capacities.length,
|
||||
networkGraph.nodes.length,
|
||||
capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
clearnetTorNodes,
|
||||
avgCapacity,
|
||||
avgFeeRate,
|
||||
avgBaseFee,
|
||||
medCapacity,
|
||||
medFeeRate,
|
||||
medBaseFee,
|
||||
timestamp,
|
||||
capacities.length,
|
||||
networkGraph.nodes.length,
|
||||
capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
clearnetTorNodes,
|
||||
avgCapacity,
|
||||
avgFeeRate,
|
||||
avgBaseFee,
|
||||
medCapacity,
|
||||
medFeeRate,
|
||||
medBaseFee,
|
||||
]);
|
||||
|
||||
for (const public_key of Object.keys(nodeStats)) {
|
||||
query = `INSERT INTO node_stats(
|
||||
public_key,
|
||||
added,
|
||||
capacity,
|
||||
channels
|
||||
)
|
||||
VALUES (?, FROM_UNIXTIME(?), ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
added = FROM_UNIXTIME(?),
|
||||
capacity = ?,
|
||||
channels = ?
|
||||
`;
|
||||
|
||||
await DB.query(query, [
|
||||
public_key,
|
||||
timestamp,
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
timestamp,
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
]);
|
||||
}
|
||||
|
||||
return {
|
||||
added: timestamp,
|
||||
node_count: networkGraph.nodes.length
|
||||
};
|
||||
}
|
||||
|
||||
async $importHistoricalLightningStats(): Promise<void> {
|
||||
let latestNodeCount = 1;
|
||||
|
||||
const fileList = await fsPromises.readdir(this.topologiesFolder);
|
||||
// Insert history from the most recent to the oldest
|
||||
// This also put the .json cached files first
|
||||
fileList.sort().reverse();
|
||||
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(added) AS added, node_count
|
||||
FROM lightning_stats
|
||||
ORDER BY added DESC
|
||||
`);
|
||||
const existingStatsTimestamps = {};
|
||||
for (const row of rows) {
|
||||
existingStatsTimestamps[row.added] = row;
|
||||
}
|
||||
|
||||
// For logging purpose
|
||||
let processed = 10;
|
||||
let totalProcessed = -1;
|
||||
|
||||
for (const filename of fileList) {
|
||||
processed++;
|
||||
totalProcessed++;
|
||||
|
||||
const timestamp = parseInt(filename.split('_')[1], 10);
|
||||
|
||||
// Stats exist already, don't calculate/insert them
|
||||
if (existingStatsTimestamps[timestamp] !== undefined) {
|
||||
latestNodeCount = existingStatsTimestamps[timestamp].node_count;
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
|
||||
const fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
||||
|
||||
let graph;
|
||||
if (filename.indexOf('.json') !== -1) {
|
||||
try {
|
||||
graph = JSON.parse(fileContent);
|
||||
} catch (e) {
|
||||
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content`);
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
graph = this.parseFile(fileContent);
|
||||
if (!graph) {
|
||||
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content`);
|
||||
continue;
|
||||
}
|
||||
await fsPromises.writeFile(`${this.topologiesFolder}/${filename}.json`, JSON.stringify(graph));
|
||||
}
|
||||
|
||||
if (timestamp > 1556316000) {
|
||||
// "No, the reason most likely is just that I started collection in 2019,
|
||||
// so what I had before that is just the survivors from before, which weren't that many"
|
||||
const diffRatio = graph.nodes.length / latestNodeCount;
|
||||
if (diffRatio < 0.9) {
|
||||
// Ignore drop of more than 90% of the node count as it's probably a missing data point
|
||||
logger.debug(`Nodes count diff ratio threshold reached, ignore the data for this day ${graph.nodes.length} nodes vs ${latestNodeCount}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
latestNodeCount = graph.nodes.length;
|
||||
|
||||
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
||||
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
|
||||
|
||||
if (processed > 10) {
|
||||
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||
processed = 0;
|
||||
} else {
|
||||
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||
}
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
|
||||
const stat = await this.computeNetworkStats(timestamp, graph);
|
||||
|
||||
existingStatsTimestamps[timestamp] = stat;
|
||||
}
|
||||
|
||||
logger.info(`Lightning network stats historical import completed`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the file content into XML, and return a list of nodes and channels
|
||||
*/
|
||||
private parseFile(fileContent): any {
|
||||
const graph = this.parser.parse(fileContent);
|
||||
if (Object.keys(graph).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nodes: Node[] = [];
|
||||
const channels: Channel[] = [];
|
||||
|
||||
// If there is only one entry, the parser does not return an array, so we override this
|
||||
if (!Array.isArray(graph.graphml.graph.node)) {
|
||||
graph.graphml.graph.node = [graph.graphml.graph.node];
|
||||
}
|
||||
if (!Array.isArray(graph.graphml.graph.edge)) {
|
||||
graph.graphml.graph.edge = [graph.graphml.graph.edge];
|
||||
}
|
||||
|
||||
for (const node of graph.graphml.graph.node) {
|
||||
if (!node.data) {
|
||||
continue;
|
||||
}
|
||||
const addresses: unknown[] = [];
|
||||
const sockets = node.data[5].split(',');
|
||||
for (const socket of sockets) {
|
||||
const parts = socket.split('://');
|
||||
addresses.push({
|
||||
network: parts[0],
|
||||
addr: parts[1],
|
||||
});
|
||||
}
|
||||
nodes.push({
|
||||
id: node.data[0],
|
||||
timestamp: node.data[1],
|
||||
features: node.data[2],
|
||||
rgb_color: node.data[3],
|
||||
alias: node.data[4],
|
||||
addresses: addresses,
|
||||
out_degree: node.data[6],
|
||||
in_degree: node.data[7],
|
||||
});
|
||||
}
|
||||
|
||||
for (const channel of graph.graphml.graph.edge) {
|
||||
if (!channel.data) {
|
||||
continue;
|
||||
}
|
||||
channels.push({
|
||||
channel_id: channel.data[0],
|
||||
node1_pub: channel.data[1],
|
||||
node2_pub: channel.data[2],
|
||||
timestamp: channel.data[3],
|
||||
features: channel.data[4],
|
||||
fee_base_msat: channel.data[5],
|
||||
fee_rate_milli_msat: channel.data[6],
|
||||
htlc_minimim_msat: channel.data[7],
|
||||
cltv_expiry_delta: channel.data[8],
|
||||
htlc_maximum_msat: channel.data[9],
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
nodes: nodes,
|
||||
edges: channels,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new LightningStatsImporter;
|
@ -16,7 +16,7 @@ class BitfinexApi implements PriceFeed {
|
||||
return response ? parseInt(response['last_price'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentHourlyPrice(currencies: string[]): Promise<PriceHistory> {
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
@ -24,7 +24,7 @@ class BitfinexApi implements PriceFeed {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', '1h').replace('{CURRENCY}', currency));
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '1h' : '1D').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
|
@ -16,7 +16,7 @@ class BitflyerApi implements PriceFeed {
|
||||
return response ? parseInt(response['ltp'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentHourlyPrice(currencies: string[]): Promise<PriceHistory> {
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ class CoinbaseApi implements PriceFeed {
|
||||
return response ? parseInt(response['data']['amount'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentHourlyPrice(currencies: string[]): Promise<PriceHistory> {
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
@ -24,7 +24,7 @@ class CoinbaseApi implements PriceFeed {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', '3600').replace('{CURRENCY}', currency));
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '3600' : '86400').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
|
@ -16,7 +16,7 @@ class FtxApi implements PriceFeed {
|
||||
return response ? parseInt(response['result']['last'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentHourlyPrice(currencies: string[]): Promise<PriceHistory> {
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
@ -24,7 +24,7 @@ class FtxApi implements PriceFeed {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', '3600').replace('{CURRENCY}', currency));
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '3600' : '86400').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response['result'] : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
|
@ -16,7 +16,7 @@ class GeminiApi implements PriceFeed {
|
||||
return response ? parseInt(response['last'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentHourlyPrice(currencies: string[]): Promise<PriceHistory> {
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
@ -24,7 +24,7 @@ class GeminiApi implements PriceFeed {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', '1hr').replace('{CURRENCY}', currency));
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '1hr' : '1day').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
|
@ -26,7 +26,7 @@ class KrakenApi implements PriceFeed {
|
||||
return response ? parseInt(response['result'][this.getTicker(currency)]['c'][0], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentHourlyPrice(currencies: string[]): Promise<PriceHistory> {
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
@ -62,7 +62,7 @@ class KrakenApi implements PriceFeed {
|
||||
// CHF weekly price history goes back to timestamp 1575504000 (December 5, 2019)
|
||||
// AUD weekly price history goes back to timestamp 1591833600 (June 11, 2020)
|
||||
|
||||
const priceHistory: any = {}; // map: timestamp -> Prices
|
||||
let priceHistory: any = {}; // map: timestamp -> Prices
|
||||
|
||||
for (const currency of this.currencies) {
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', '10080') + currency);
|
||||
@ -83,6 +83,10 @@ class KrakenApi implements PriceFeed {
|
||||
}
|
||||
|
||||
for (const time in priceHistory) {
|
||||
if (priceHistory[time].USD === -1) {
|
||||
delete priceHistory[time];
|
||||
continue;
|
||||
}
|
||||
await PricesRepository.$savePrices(parseInt(time, 10), priceHistory[time]);
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,7 @@ export interface PriceFeed {
|
||||
currencies: string[];
|
||||
|
||||
$fetchPrice(currency): Promise<number>;
|
||||
$fetchRecentHourlyPrice(currencies: string[]): Promise<PriceHistory>;
|
||||
$fetchRecentPrice(currencies: string[], type: string): Promise<PriceHistory>;
|
||||
}
|
||||
|
||||
export interface PriceHistory {
|
||||
@ -177,13 +177,16 @@ class PriceUpdater {
|
||||
}
|
||||
if (insertedCount > 0) {
|
||||
logger.notice(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
|
||||
} else {
|
||||
logger.debug(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
|
||||
}
|
||||
|
||||
// Insert Kraken weekly prices
|
||||
await new KrakenApi().$insertHistoricalPrice();
|
||||
|
||||
// Insert missing recent hourly prices
|
||||
await this.$insertMissingRecentPrices();
|
||||
await this.$insertMissingRecentPrices('day');
|
||||
await this.$insertMissingRecentPrices('hour');
|
||||
|
||||
this.historyInserted = true;
|
||||
this.lastHistoricalRun = new Date().getTime();
|
||||
@ -193,17 +196,17 @@ class PriceUpdater {
|
||||
* Find missing hourly prices and insert them in the database
|
||||
* It has a limited backward range and it depends on which API are available
|
||||
*/
|
||||
private async $insertMissingRecentPrices(): Promise<void> {
|
||||
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
|
||||
const existingPriceTimes = await PricesRepository.$getPricesTimes();
|
||||
|
||||
logger.info(`Fetching hourly price history from exchanges and saving missing ones into the database, this may take a while`);
|
||||
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database, this may take a while`);
|
||||
|
||||
const historicalPrices: PriceHistory[] = [];
|
||||
|
||||
// Fetch all historical hourly prices
|
||||
for (const feed of this.feeds) {
|
||||
try {
|
||||
historicalPrices.push(await feed.$fetchRecentHourlyPrice(this.currencies));
|
||||
historicalPrices.push(await feed.$fetchRecentPrice(this.currencies, type));
|
||||
} catch (e) {
|
||||
logger.err(`Cannot fetch hourly historical price from ${feed.name}. Ignoring this feed. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
@ -250,7 +253,9 @@ class PriceUpdater {
|
||||
}
|
||||
|
||||
if (totalInserted > 0) {
|
||||
logger.notice(`Inserted ${totalInserted} hourly historical prices into the db`);
|
||||
logger.notice(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
|
||||
} else {
|
||||
logger.debug(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -27,6 +27,7 @@ export function prepareBlock(block: any): BlockExtended {
|
||||
name: block.pool_name,
|
||||
slug: block.pool_slug,
|
||||
} : undefined),
|
||||
usd: block?.extras?.usd ?? block.usd ?? null,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
3
contributors/oleonardolima.txt
Normal file
3
contributors/oleonardolima.txt
Normal file
@ -0,0 +1,3 @@
|
||||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 25, 2022.
|
||||
|
||||
Signed: oleonardolima
|
@ -20,7 +20,8 @@
|
||||
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
|
||||
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
|
||||
"INDEXING_BLOCKS_AMOUNT": __MEMPOOL_INDEXING_BLOCKS_AMOUNT__,
|
||||
"BLOCKS_SUMMARIES_INDEXING": __MEMPOOL_BLOCKS_SUMMARIES_INDEXING__
|
||||
"BLOCKS_SUMMARIES_INDEXING": __MEMPOOL_BLOCKS_SUMMARIES_INDEXING__,
|
||||
"AUTOMATIC_BLOCK_REINDEXING": __MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "__CORE_RPC_HOST__",
|
||||
|
@ -22,6 +22,8 @@ __MEMPOOL_EXTERNAL_MAX_RETRY__=${MEMPOOL_EXTERNAL_MAX_RETRY:=1}
|
||||
__MEMPOOL_EXTERNAL_RETRY_INTERVAL__=${MEMPOOL_EXTERNAL_RETRY_INTERVAL:=0}
|
||||
__MEMPOOL_USER_AGENT__=${MEMPOOL_USER_AGENT:=mempool}
|
||||
__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__=${MEMPOOL_STDOUT_LOG_MIN_PRIORITY:=info}
|
||||
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=false}
|
||||
__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__=${MEMPOOL_AUTOMATIC_BLOCK_REINDEXING:=false}
|
||||
|
||||
# CORE_RPC
|
||||
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
|
||||
@ -110,6 +112,8 @@ sed -i "s!__MEMPOOL_EXTERNAL_MAX_RETRY__!${__MEMPOOL_EXTERNAL_MAX_RETRY__}!g" me
|
||||
sed -i "s!__MEMPOOL_EXTERNAL_RETRY_INTERVAL__!${__MEMPOOL_EXTERNAL_RETRY_INTERVAL__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_USER_AGENT__!${__MEMPOOL_USER_AGENT__}!g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__/${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__/${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}/g" mempool-config.json
|
||||
|
||||
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
|
||||
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json
|
||||
|
@ -14,10 +14,11 @@
|
||||
"@typescript-eslint/ban-types": 1,
|
||||
"@typescript-eslint/no-empty-function": 1,
|
||||
"@typescript-eslint/no-explicit-any": 1,
|
||||
"@typescript-eslint/no-inferrable-types": 1,
|
||||
"@typescript-eslint/no-inferrable-types": 0,
|
||||
"@typescript-eslint/no-namespace": 1,
|
||||
"@typescript-eslint/no-this-alias": 1,
|
||||
"@typescript-eslint/no-var-requires": 1,
|
||||
"@typescript-eslint/explicit-function-return-type": 1,
|
||||
"no-case-declarations": 1,
|
||||
"no-console": 1,
|
||||
"no-constant-condition": 1,
|
||||
@ -29,6 +30,8 @@
|
||||
"no-useless-catch": 1,
|
||||
"no-var": 1,
|
||||
"prefer-const": 1,
|
||||
"prefer-rest-params": 1
|
||||
"prefer-rest-params": 1,
|
||||
"quotes": [1, "single", { "allowTemplateLiterals": true }],
|
||||
"semi": 1
|
||||
}
|
||||
}
|
||||
|
@ -35,21 +35,23 @@ const getRectangle = ($el) => $el[0].getBoundingClientRect();
|
||||
describe('Mainnet', () => {
|
||||
beforeEach(() => {
|
||||
//cy.intercept('/sockjs-node/info*').as('socket');
|
||||
cy.intercept('/api/block-height/*').as('block-height');
|
||||
cy.intercept('/api/block/*').as('block');
|
||||
cy.intercept('/api/block/*/txs/0').as('block-txs');
|
||||
cy.intercept('/api/tx/*/outspends').as('tx-outspends');
|
||||
cy.intercept('/resources/pools.json').as('pools');
|
||||
// cy.intercept('/api/block-height/*').as('block-height');
|
||||
// cy.intercept('/api/v1/block/*').as('block');
|
||||
// cy.intercept('/api/block/*/txs/0').as('block-txs');
|
||||
// cy.intercept('/api/v1/block/*/summary').as('block-summary');
|
||||
// cy.intercept('/api/v1/outspends/*').as('outspends');
|
||||
// cy.intercept('/api/tx/*/outspends').as('tx-outspends');
|
||||
// cy.intercept('/resources/pools.json').as('pools');
|
||||
|
||||
// Search Auto Complete
|
||||
cy.intercept('/api/address-prefix/1wiz').as('search-1wiz');
|
||||
cy.intercept('/api/address-prefix/1wizS').as('search-1wizS');
|
||||
cy.intercept('/api/address-prefix/1wizSA').as('search-1wizSA');
|
||||
|
||||
Cypress.Commands.add('waitForBlockData', () => {
|
||||
cy.wait('@tx-outspends');
|
||||
cy.wait('@pools');
|
||||
});
|
||||
// Cypress.Commands.add('waitForBlockData', () => {
|
||||
// cy.wait('@tx-outspends');
|
||||
// cy.wait('@pools');
|
||||
// });
|
||||
});
|
||||
|
||||
if (baseModule === 'mempool') {
|
||||
@ -121,20 +123,20 @@ describe('Mainnet', () => {
|
||||
cy.visit('/');
|
||||
cy.get('.search-box-container > .form-control').type('1wiz').then(() => {
|
||||
cy.wait('@search-1wiz');
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 10);
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 10);
|
||||
});
|
||||
|
||||
cy.get('.search-box-container > .form-control').type('S').then(() => {
|
||||
cy.wait('@search-1wizS');
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 5);
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 5);
|
||||
});
|
||||
|
||||
cy.get('.search-box-container > .form-control').type('A').then(() => {
|
||||
cy.wait('@search-1wizSA');
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 1)
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 1)
|
||||
});
|
||||
|
||||
cy.get('ngb-typeahead-window button.dropdown-item.active').click().then(() => {
|
||||
cy.get('app-search-results button.dropdown-item.active').click().then(() => {
|
||||
cy.url().should('include', '/address/1wizSAYSbuyXbt9d8JV8ytm5acqq2TorC');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('.text-center').should('not.have.text', 'Invalid Bitcoin address');
|
||||
@ -145,8 +147,8 @@ describe('Mainnet', () => {
|
||||
it(`allows searching for partial case insensitive bech32m addresses: ${searchTerm}`, () => {
|
||||
cy.visit('/');
|
||||
cy.get('.search-box-container > .form-control').type(searchTerm).then(() => {
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 1);
|
||||
cy.get('ngb-typeahead-window button.dropdown-item.active').click().then(() => {
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 1);
|
||||
cy.get('app-search-results button.dropdown-item.active').click().then(() => {
|
||||
cy.url().should('include', '/address/bc1pqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyqsyjer9e');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('.text-center').should('not.have.text', 'Invalid Bitcoin address');
|
||||
@ -159,8 +161,8 @@ describe('Mainnet', () => {
|
||||
it(`allows searching for partial case insensitive bech32 addresses: ${searchTerm}`, () => {
|
||||
cy.visit('/');
|
||||
cy.get('.search-box-container > .form-control').type(searchTerm).then(() => {
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 1);
|
||||
cy.get('ngb-typeahead-window button.dropdown-item.active').click().then(() => {
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 1);
|
||||
cy.get('app-search-results button.dropdown-item.active').click().then(() => {
|
||||
cy.url().should('include', '/address/bc1q000375vxcuf5v04lmwy22vy2thvhqkxghgq7dy');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('.text-center').should('not.have.text', 'Invalid Bitcoin address');
|
||||
@ -409,7 +411,7 @@ describe('Mainnet', () => {
|
||||
|
||||
it('loads the tv screen - desktop', () => {
|
||||
cy.viewport('macbook-16');
|
||||
cy.visit('/');
|
||||
cy.visit('/graphs/mempool');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.viewport('macbook-16');
|
||||
|
@ -60,10 +60,10 @@ describe('Signet', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('tv mode', () => {
|
||||
describe.skip('tv mode', () => {
|
||||
it('loads the tv screen - desktop', () => {
|
||||
cy.viewport('macbook-16');
|
||||
cy.visit('/signet');
|
||||
cy.visit('/signet/graphs');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.get('.chart-holder').should('be.visible');
|
||||
@ -73,19 +73,17 @@ describe('Signet', () => {
|
||||
});
|
||||
|
||||
it('loads the tv screen - mobile', () => {
|
||||
cy.visit('/signet');
|
||||
cy.visit('/signet/graphs');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.viewport('iphone-8');
|
||||
cy.get('.chart-holder').should('be.visible');
|
||||
cy.get('.tv-only').should('not.exist');
|
||||
//TODO: Remove comment when the bug is fixed
|
||||
//cy.get('#mempool-block-0').should('be.visible');
|
||||
cy.get('#mempool-block-0').should('be.visible');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('loads the api screen', () => {
|
||||
cy.visit('/signet');
|
||||
cy.waitForSkeletonGone();
|
||||
|
@ -63,18 +63,17 @@ describe('Testnet', () => {
|
||||
describe('tv mode', () => {
|
||||
it('loads the tv screen - desktop', () => {
|
||||
cy.viewport('macbook-16');
|
||||
cy.visit('/testnet');
|
||||
cy.visit('/testnet/graphs');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.wait(1000);
|
||||
cy.get('.tv-only').should('not.exist');
|
||||
//TODO: Remove comment when the bug is fixed
|
||||
//cy.get('#mempool-block-0').should('be.visible');
|
||||
cy.get('#mempool-block-0').should('be.visible');
|
||||
});
|
||||
});
|
||||
|
||||
it('loads the tv screen - mobile', () => {
|
||||
cy.visit('/testnet');
|
||||
cy.visit('/testnet/graphs');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.viewport('iphone-6');
|
||||
|
@ -16,5 +16,6 @@
|
||||
"MEMPOOL_WEBSITE_URL": "https://mempool.space",
|
||||
"LIQUID_WEBSITE_URL": "https://liquid.network",
|
||||
"BISQ_WEBSITE_URL": "https://bisq.markets",
|
||||
"MINING_DASHBOARD": true
|
||||
"MINING_DASHBOARD": true,
|
||||
"LIGHTNING": false
|
||||
}
|
||||
|
72
frontend/package-lock.json
generated
72
frontend/package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "mempool-frontend",
|
||||
"version": "2.4.1-dev",
|
||||
"version": "2.5.0-dev",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "mempool-frontend",
|
||||
"version": "2.4.1-dev",
|
||||
"version": "2.5.0-dev",
|
||||
"license": "GNU Affero General Public License v3.0",
|
||||
"dependencies": {
|
||||
"@angular-devkit/build-angular": "~13.3.7",
|
||||
@ -34,6 +34,7 @@
|
||||
"clipboard": "^2.0.10",
|
||||
"domino": "^2.1.6",
|
||||
"echarts": "~5.3.2",
|
||||
"echarts-gl": "^2.0.9",
|
||||
"express": "^4.17.1",
|
||||
"lightweight-charts": "~3.8.0",
|
||||
"ngx-echarts": "8.0.1",
|
||||
@ -6396,6 +6397,11 @@
|
||||
"webpack": ">=4.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/claygl": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/claygl/-/claygl-1.3.0.tgz",
|
||||
"integrity": "sha512-+gGtJjT6SSHD2l2yC3MCubW/sCV40tZuSs5opdtn79vFSGUgp/lH139RNEQ6Jy078/L0aV8odCw8RSrUcMfLaQ=="
|
||||
},
|
||||
"node_modules/clean-stack": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
|
||||
@ -6588,11 +6594,11 @@
|
||||
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="
|
||||
},
|
||||
"node_modules/common-shakeify": {
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmjs.org/common-shakeify/-/common-shakeify-0.6.2.tgz",
|
||||
"integrity": "sha512-vxlXr26fqxm8ZJ0jh8MlvpeN6IbyUKqsVmgb4rAjDM/0f4nKebiHaAXpF/Mm86W9ENR5iSI7UOnUTylpVyplUA==",
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/common-shakeify/-/common-shakeify-1.1.1.tgz",
|
||||
"integrity": "sha512-M9hTU14RkpKvNggSU4zJIzgm89inwjnhipxvKxCNms/gM77R7keRqOqGYIM/Jr4BBhtbZB8ZF//raYqAbHk/DA==",
|
||||
"dependencies": {
|
||||
"@goto-bus-stop/common-shake": "^2.2.0",
|
||||
"@goto-bus-stop/common-shake": "^2.3.0",
|
||||
"convert-source-map": "^1.5.1",
|
||||
"through2": "^2.0.3",
|
||||
"transform-ast": "^2.4.3",
|
||||
@ -8107,6 +8113,18 @@
|
||||
"zrender": "5.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/echarts-gl": {
|
||||
"version": "2.0.9",
|
||||
"resolved": "https://registry.npmjs.org/echarts-gl/-/echarts-gl-2.0.9.tgz",
|
||||
"integrity": "sha512-oKeMdkkkpJGWOzjgZUsF41DOh6cMsyrGGXimbjK2l6Xeq/dBQu4ShG2w2Dzrs/1bD27b2pLTGSaUzouY191gzA==",
|
||||
"dependencies": {
|
||||
"claygl": "^1.2.1",
|
||||
"zrender": "^5.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"echarts": "^5.1.2"
|
||||
}
|
||||
},
|
||||
"node_modules/echarts/node_modules/tslib": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz",
|
||||
@ -8603,7 +8621,7 @@
|
||||
"node_modules/escope": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/escope/-/escope-3.6.0.tgz",
|
||||
"integrity": "sha1-4Bl16BJ4GhY6ba392AOY3GTIicM=",
|
||||
"integrity": "sha512-75IUQsusDdalQEW/G/2esa87J7raqdJF+Ca0/Xm5C3Q58Nr4yVYjZGp/P1+2xiEVgXRrA39dpRb8LcshajbqDQ==",
|
||||
"dependencies": {
|
||||
"es6-map": "^0.1.3",
|
||||
"es6-weak-map": "^2.0.1",
|
||||
@ -16304,15 +16322,15 @@
|
||||
"integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q=="
|
||||
},
|
||||
"node_modules/tinyify": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tinyify/-/tinyify-3.0.0.tgz",
|
||||
"integrity": "sha512-RtjVjC1xwwxt8AMVfxEmo+FzRJB6p5sAOtFaJj8vMrkWShtArsM4dLVRWhx2Vc07Me3NWgmP7pi9UPm/a2XNNA==",
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/tinyify/-/tinyify-3.1.0.tgz",
|
||||
"integrity": "sha512-r4tHoDkWhhoItWbxJ3KCHXask3hJN7gCUkR5PLfnQzQagTA6oDkzhCbiEDHkMqo7Ck7vVSA1pTP1gDc9p1AC1w==",
|
||||
"dependencies": {
|
||||
"@goto-bus-stop/envify": "^5.0.0",
|
||||
"acorn-node": "^1.8.2",
|
||||
"browser-pack-flat": "^3.0.9",
|
||||
"bundle-collapser": "^1.3.0",
|
||||
"common-shakeify": "^0.6.0",
|
||||
"common-shakeify": "^1.1.1",
|
||||
"dash-ast": "^1.0.0",
|
||||
"minify-stream": "^2.0.1",
|
||||
"multisplice": "^1.0.0",
|
||||
@ -22520,6 +22538,11 @@
|
||||
"integrity": "sha512-g38K9Cm5WRwlaH6g03B9OEz/0qRizI+2I7n+Gz+L5DxXJAPAiWQvwlYNm1V1jkdpUv95bOe/ASm2vfi/G560jQ==",
|
||||
"requires": {}
|
||||
},
|
||||
"claygl": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/claygl/-/claygl-1.3.0.tgz",
|
||||
"integrity": "sha512-+gGtJjT6SSHD2l2yC3MCubW/sCV40tZuSs5opdtn79vFSGUgp/lH139RNEQ6Jy078/L0aV8odCw8RSrUcMfLaQ=="
|
||||
},
|
||||
"clean-stack": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
|
||||
@ -22670,11 +22693,11 @@
|
||||
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="
|
||||
},
|
||||
"common-shakeify": {
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmjs.org/common-shakeify/-/common-shakeify-0.6.2.tgz",
|
||||
"integrity": "sha512-vxlXr26fqxm8ZJ0jh8MlvpeN6IbyUKqsVmgb4rAjDM/0f4nKebiHaAXpF/Mm86W9ENR5iSI7UOnUTylpVyplUA==",
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/common-shakeify/-/common-shakeify-1.1.1.tgz",
|
||||
"integrity": "sha512-M9hTU14RkpKvNggSU4zJIzgm89inwjnhipxvKxCNms/gM77R7keRqOqGYIM/Jr4BBhtbZB8ZF//raYqAbHk/DA==",
|
||||
"requires": {
|
||||
"@goto-bus-stop/common-shake": "^2.2.0",
|
||||
"@goto-bus-stop/common-shake": "^2.3.0",
|
||||
"convert-source-map": "^1.5.1",
|
||||
"through2": "^2.0.3",
|
||||
"transform-ast": "^2.4.3",
|
||||
@ -23866,6 +23889,15 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"echarts-gl": {
|
||||
"version": "2.0.9",
|
||||
"resolved": "https://registry.npmjs.org/echarts-gl/-/echarts-gl-2.0.9.tgz",
|
||||
"integrity": "sha512-oKeMdkkkpJGWOzjgZUsF41DOh6cMsyrGGXimbjK2l6Xeq/dBQu4ShG2w2Dzrs/1bD27b2pLTGSaUzouY191gzA==",
|
||||
"requires": {
|
||||
"claygl": "^1.2.1",
|
||||
"zrender": "^5.1.1"
|
||||
}
|
||||
},
|
||||
"ee-first": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
|
||||
@ -24256,7 +24288,7 @@
|
||||
"escope": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/escope/-/escope-3.6.0.tgz",
|
||||
"integrity": "sha1-4Bl16BJ4GhY6ba392AOY3GTIicM=",
|
||||
"integrity": "sha512-75IUQsusDdalQEW/G/2esa87J7raqdJF+Ca0/Xm5C3Q58Nr4yVYjZGp/P1+2xiEVgXRrA39dpRb8LcshajbqDQ==",
|
||||
"requires": {
|
||||
"es6-map": "^0.1.3",
|
||||
"es6-weak-map": "^2.0.1",
|
||||
@ -30040,15 +30072,15 @@
|
||||
"integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q=="
|
||||
},
|
||||
"tinyify": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tinyify/-/tinyify-3.0.0.tgz",
|
||||
"integrity": "sha512-RtjVjC1xwwxt8AMVfxEmo+FzRJB6p5sAOtFaJj8vMrkWShtArsM4dLVRWhx2Vc07Me3NWgmP7pi9UPm/a2XNNA==",
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/tinyify/-/tinyify-3.1.0.tgz",
|
||||
"integrity": "sha512-r4tHoDkWhhoItWbxJ3KCHXask3hJN7gCUkR5PLfnQzQagTA6oDkzhCbiEDHkMqo7Ck7vVSA1pTP1gDc9p1AC1w==",
|
||||
"requires": {
|
||||
"@goto-bus-stop/envify": "^5.0.0",
|
||||
"acorn-node": "^1.8.2",
|
||||
"browser-pack-flat": "^3.0.9",
|
||||
"bundle-collapser": "^1.3.0",
|
||||
"common-shakeify": "^0.6.0",
|
||||
"common-shakeify": "^1.1.1",
|
||||
"dash-ast": "^1.0.0",
|
||||
"minify-stream": "^2.0.1",
|
||||
"multisplice": "^1.0.0",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mempool-frontend",
|
||||
"version": "2.4.1-dev",
|
||||
"version": "2.5.0-dev",
|
||||
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
|
||||
"license": "GNU Affero General Public License v3.0",
|
||||
"homepage": "https://mempool.space",
|
||||
@ -88,6 +88,7 @@
|
||||
"clipboard": "^2.0.10",
|
||||
"domino": "^2.1.6",
|
||||
"echarts": "~5.3.2",
|
||||
"echarts-gl": "^2.0.9",
|
||||
"express": "^4.17.1",
|
||||
"lightweight-charts": "~3.8.0",
|
||||
"ngx-echarts": "8.0.1",
|
||||
|
@ -85,7 +85,7 @@ if (configContent && configContent.BASE_MODULE == "liquid") {
|
||||
});
|
||||
} else {
|
||||
PROXY_CONFIG.push({
|
||||
context: ['/resources/pools.json', '/resources/assets.json', '/resources/assets.minimal.json'],
|
||||
context: ['/resources/pools.json', '/resources/assets.json', '/resources/assets.minimal.json', '/resources/worldmap.json'],
|
||||
target: "https://mempool.space",
|
||||
secure: false,
|
||||
changeOrigin: true,
|
||||
|
@ -102,6 +102,16 @@ if (configContent && configContent.BASE_MODULE === 'bisq') {
|
||||
}
|
||||
|
||||
PROXY_CONFIG.push(...[
|
||||
{
|
||||
context: ['/testnet/api/v1/lightning/**'],
|
||||
target: `http://localhost:8999`,
|
||||
secure: false,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/testnet": ""
|
||||
},
|
||||
},
|
||||
{
|
||||
context: ['/api/v1/**'],
|
||||
target: `http://localhost:8999`,
|
||||
|
@ -3,8 +3,12 @@ import { Routes, RouterModule, PreloadAllModules } from '@angular/router';
|
||||
import { StartComponent } from './components/start/start.component';
|
||||
import { TransactionComponent } from './components/transaction/transaction.component';
|
||||
import { BlockComponent } from './components/block/block.component';
|
||||
import { BlockAuditComponent } from './components/block-audit/block-audit.component';
|
||||
import { BlockPreviewComponent } from './components/block/block-preview.component';
|
||||
import { AddressComponent } from './components/address/address.component';
|
||||
import { AddressPreviewComponent } from './components/address/address-preview.component';
|
||||
import { MasterPageComponent } from './components/master-page/master-page.component';
|
||||
import { MasterPagePreviewComponent } from './components/master-page-preview/master-page-preview.component';
|
||||
import { AboutComponent } from './components/about/about.component';
|
||||
import { StatusViewComponent } from './components/status-view/status-view.component';
|
||||
import { TermsOfServiceComponent } from './components/terms-of-service/terms-of-service.component';
|
||||
@ -22,7 +26,7 @@ import { AssetComponent } from './components/asset/asset.component';
|
||||
import { AssetsNavComponent } from './components/assets/assets-nav/assets-nav.component';
|
||||
|
||||
let routes: Routes = [
|
||||
{
|
||||
{
|
||||
path: 'testnet',
|
||||
children: [
|
||||
{
|
||||
@ -66,7 +70,10 @@ let routes: Routes = [
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressComponent
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
@ -84,7 +91,19 @@ let routes: Routes = [
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockComponent
|
||||
component: BlockComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'block-audit',
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockAuditComponent,
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -96,6 +115,10 @@ let routes: Routes = [
|
||||
path: 'api',
|
||||
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
|
||||
},
|
||||
{
|
||||
path: 'lightning',
|
||||
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
@ -156,7 +179,10 @@ let routes: Routes = [
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressComponent
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
@ -174,7 +200,19 @@ let routes: Routes = [
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockComponent
|
||||
component: BlockComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'block-audit',
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockAuditComponent,
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -186,6 +224,10 @@ let routes: Routes = [
|
||||
path: 'api',
|
||||
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
|
||||
},
|
||||
{
|
||||
path: 'lightning',
|
||||
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
@ -243,7 +285,10 @@ let routes: Routes = [
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressComponent
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
@ -261,7 +306,19 @@ let routes: Routes = [
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockComponent
|
||||
component: BlockComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'block-audit',
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockAuditComponent
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -273,6 +330,43 @@ let routes: Routes = [
|
||||
path: 'api',
|
||||
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
|
||||
},
|
||||
{
|
||||
path: 'lightning',
|
||||
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'preview',
|
||||
component: MasterPagePreviewComponent,
|
||||
children: [
|
||||
{
|
||||
path: 'block/:id',
|
||||
component: BlockPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'testnet/block/:id',
|
||||
component: BlockPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'signet/block/:id',
|
||||
component: BlockPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'testnet/address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'signet/address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
@ -346,7 +440,10 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressComponent
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
@ -364,7 +461,10 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockComponent
|
||||
component: BlockComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -450,7 +550,10 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressComponent
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
@ -468,7 +571,10 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockComponent
|
||||
component: BlockComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -508,6 +614,30 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'preview',
|
||||
component: MasterPagePreviewComponent,
|
||||
children: [
|
||||
{
|
||||
path: 'block/:id',
|
||||
component: BlockPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'testnet/block/:id',
|
||||
component: BlockPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'testnet/address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'status',
|
||||
component: StatusViewComponent
|
||||
@ -536,4 +666,3 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
})],
|
||||
})
|
||||
export class AppRoutingModule { }
|
||||
|
||||
|
@ -6,13 +6,16 @@ import { AppRoutingModule } from './app-routing.module';
|
||||
import { AppComponent } from './components/app/app.component';
|
||||
import { ElectrsApiService } from './services/electrs-api.service';
|
||||
import { StateService } from './services/state.service';
|
||||
import { EnterpriseService } from './services/enterprise.service';
|
||||
import { WebsocketService } from './services/websocket.service';
|
||||
import { AudioService } from './services/audio.service';
|
||||
import { SeoService } from './services/seo.service';
|
||||
import { OpenGraphService } from './services/opengraph.service';
|
||||
import { SharedModule } from './shared/shared.module';
|
||||
import { StorageService } from './services/storage.service';
|
||||
import { HttpCacheInterceptor } from './services/http-cache.interceptor';
|
||||
import { LanguageService } from './services/language.service';
|
||||
import { FiatShortenerPipe } from './shared/pipes/fiat-shortener.pipe';
|
||||
import { ShortenStringPipe } from './shared/pipes/shorten-string-pipe/shorten-string.pipe';
|
||||
import { CapAddressPipe } from './shared/pipes/cap-address-pipe/cap-address-pipe';
|
||||
|
||||
@ -34,9 +37,12 @@ import { CapAddressPipe } from './shared/pipes/cap-address-pipe/cap-address-pipe
|
||||
WebsocketService,
|
||||
AudioService,
|
||||
SeoService,
|
||||
OpenGraphService,
|
||||
StorageService,
|
||||
EnterpriseService,
|
||||
LanguageService,
|
||||
ShortenStringPipe,
|
||||
FiatShortenerPipe,
|
||||
CapAddressPipe,
|
||||
{ provide: HTTP_INTERCEPTORS, useClass: HttpCacheInterceptor, multi: true }
|
||||
],
|
||||
|
@ -5,64 +5,157 @@ const P2SH_P2WSH_COST = 35 * 4; // the WU cost for the non-witness part of P2SH
|
||||
|
||||
export function calcSegwitFeeGains(tx: Transaction) {
|
||||
// calculated in weight units
|
||||
let realizedGains = 0;
|
||||
let potentialBech32Gains = 0;
|
||||
let potentialP2shGains = 0;
|
||||
let realizedSegwitGains = 0;
|
||||
let potentialSegwitGains = 0;
|
||||
let potentialP2shSegwitGains = 0;
|
||||
let potentialTaprootGains = 0;
|
||||
let realizedTaprootGains = 0;
|
||||
|
||||
for (const vin of tx.vin) {
|
||||
if (!vin.prevout) { continue; }
|
||||
|
||||
const isP2pkh = vin.prevout.scriptpubkey_type === 'p2pkh';
|
||||
const isP2sh = vin.prevout.scriptpubkey_type === 'p2sh';
|
||||
const isP2wsh = vin.prevout.scriptpubkey_type === 'v0_p2wsh';
|
||||
const isP2wpkh = vin.prevout.scriptpubkey_type === 'v0_p2wpkh';
|
||||
const isP2tr = vin.prevout.scriptpubkey_type === 'v1_p2tr';
|
||||
const isP2pk = vin.prevout.scriptpubkey_type === 'p2pk';
|
||||
// const isBareMultisig = vin.prevout.scriptpubkey_type === 'multisig'; // type will be unknown, so use the multisig helper from the address labels
|
||||
const isBareMultisig = !!parseMultisigScript(vin.prevout.scriptpubkey_asm);
|
||||
const isP2pkh = vin.prevout.scriptpubkey_type === 'p2pkh';
|
||||
const isP2sh = vin.prevout.scriptpubkey_type === 'p2sh';
|
||||
const isP2wsh = vin.prevout.scriptpubkey_type === 'v0_p2wsh';
|
||||
const isP2wpkh = vin.prevout.scriptpubkey_type === 'v0_p2wpkh';
|
||||
const isP2tr = vin.prevout.scriptpubkey_type === 'v1_p2tr';
|
||||
|
||||
const op = vin.scriptsig ? vin.scriptsig_asm.split(' ')[0] : null;
|
||||
const isP2sh2Wpkh = isP2sh && !!vin.witness && op === 'OP_PUSHBYTES_22';
|
||||
const isP2sh2Wsh = isP2sh && !!vin.witness && op === 'OP_PUSHBYTES_34';
|
||||
const isP2shP2Wpkh = isP2sh && !!vin.witness && op === 'OP_PUSHBYTES_22';
|
||||
const isP2shP2Wsh = isP2sh && !!vin.witness && op === 'OP_PUSHBYTES_34';
|
||||
|
||||
switch (true) {
|
||||
// Native Segwit - P2WPKH/P2WSH (Bech32)
|
||||
// Native Segwit - P2WPKH/P2WSH/P2TR
|
||||
case isP2wpkh:
|
||||
case isP2wsh:
|
||||
case isP2tr:
|
||||
// maximal gains: the scriptSig is moved entirely to the witness part
|
||||
realizedGains += witnessSize(vin) * 3;
|
||||
// if taproot is used savings are 42 WU higher because it produces smaller signatures and doesn't require a pubkey in the witness
|
||||
// this number is explained above `realizedTaprootGains += 42;`
|
||||
realizedSegwitGains += (witnessSize(vin) + (isP2tr ? 42 : 0)) * 3;
|
||||
// XXX P2WSH output creation is more expensive, should we take this into consideration?
|
||||
break;
|
||||
|
||||
// Backward compatible Segwit - P2SH-P2WPKH
|
||||
case isP2sh2Wpkh:
|
||||
// the scriptSig is moved to the witness, but we have extra 21 extra non-witness bytes (48 WU)
|
||||
realizedGains += witnessSize(vin) * 3 - P2SH_P2WPKH_COST;
|
||||
potentialBech32Gains += P2SH_P2WPKH_COST;
|
||||
case isP2shP2Wpkh:
|
||||
// the scriptSig is moved to the witness, but we have extra 21 extra non-witness bytes (84 WU)
|
||||
realizedSegwitGains += witnessSize(vin) * 3 - P2SH_P2WPKH_COST;
|
||||
potentialSegwitGains += P2SH_P2WPKH_COST;
|
||||
break;
|
||||
|
||||
// Backward compatible Segwit - P2SH-P2WSH
|
||||
case isP2sh2Wsh:
|
||||
// the scriptSig is moved to the witness, but we have extra 35 extra non-witness bytes
|
||||
realizedGains += witnessSize(vin) * 3 - P2SH_P2WSH_COST;
|
||||
potentialBech32Gains += P2SH_P2WSH_COST;
|
||||
case isP2shP2Wsh:
|
||||
// the scriptSig is moved to the witness, but we have extra 35 extra non-witness bytes (140 WU)
|
||||
realizedSegwitGains += witnessSize(vin) * 3 - P2SH_P2WSH_COST;
|
||||
potentialSegwitGains += P2SH_P2WSH_COST;
|
||||
break;
|
||||
|
||||
// Non-segwit P2PKH/P2SH
|
||||
// Non-segwit P2PKH/P2SH/P2PK/bare multisig
|
||||
case isP2pkh:
|
||||
case isP2sh:
|
||||
const fullGains = scriptSigSize(vin) * 3;
|
||||
potentialBech32Gains += fullGains;
|
||||
potentialP2shGains += fullGains - (isP2pkh ? P2SH_P2WPKH_COST : P2SH_P2WSH_COST);
|
||||
case isP2pk:
|
||||
case isBareMultisig: {
|
||||
let fullGains = scriptSigSize(vin) * 3;
|
||||
if (isBareMultisig) {
|
||||
// a _bare_ multisig has the keys in the output script, but P2SH and P2WSH require them to be in the scriptSig/scriptWitness
|
||||
fullGains -= vin.prevout.scriptpubkey.length / 2;
|
||||
}
|
||||
potentialSegwitGains += fullGains;
|
||||
potentialP2shSegwitGains += fullGains - (isP2pkh ? P2SH_P2WPKH_COST : P2SH_P2WSH_COST);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: should we also consider P2PK and pay-to-bare-script (non-p2sh-wrapped) as upgradable to P2WPKH and P2WSH?
|
||||
if (isP2tr) {
|
||||
if (vin.witness.length === 1) {
|
||||
// key path spend
|
||||
// we don't know if this was a multisig or single sig (the goal of taproot :)),
|
||||
// so calculate fee savings by comparing to the cheapest single sig input type: P2WPKH and say "saved at least ...%"
|
||||
// the witness size of P2WPKH is 1 (stack size) + 1 (size) + 72 (low s signature) + 1 (size) + 33 (pubkey) = 108 WU
|
||||
// the witness size of key path P2TR is 1 (stack size) + 1 (size) + 64 (signature) = 66 WU
|
||||
realizedTaprootGains += 42;
|
||||
} else {
|
||||
// script path spend
|
||||
// complex scripts with multiple spending paths can often be made around 2x to 3x smaller with the Taproot script tree
|
||||
// because only the hash of the alternative spending path has the be in the witness data, not the entire script,
|
||||
// but only assumptions can be made because the scripts themselves are unknown (again, the goal of taproot :))
|
||||
// TODO maybe add some complex scripts that are specified somewhere, so that size is known, such as lightning scripts
|
||||
}
|
||||
} else {
|
||||
const script = isP2shP2Wsh || isP2wsh ? vin.inner_witnessscript_asm : vin.inner_redeemscript_asm;
|
||||
let replacementSize: number;
|
||||
if (
|
||||
// single sig
|
||||
isP2pk || isP2pkh || isP2wpkh || isP2shP2Wpkh ||
|
||||
// multisig
|
||||
isBareMultisig || parseMultisigScript(script)
|
||||
) {
|
||||
// the scriptSig and scriptWitness can all be replaced by a 66 witness WU with taproot
|
||||
replacementSize = 66;
|
||||
} else if (script) {
|
||||
// not single sig, not multisig: the complex scripts
|
||||
// rough calculations on spending paths
|
||||
// every OP_IF and OP_NOTIF indicates an _extra_ spending path, so add 1
|
||||
const spendingPaths = script.split(' ').filter(op => /^(OP_IF|OP_NOTIF)$/g.test(op)).length + 1;
|
||||
// now assume the script could have been split in ${spendingPaths} equal tapleaves
|
||||
replacementSize = script.length / 2 / spendingPaths +
|
||||
// but account for the leaf and branch hashes and internal key in the control block
|
||||
32 * Math.log2((spendingPaths - 1) || 1) + 33;
|
||||
}
|
||||
potentialTaprootGains += witnessSize(vin) + scriptSigSize(vin) * 4 - replacementSize;
|
||||
}
|
||||
}
|
||||
|
||||
// returned as percentage of the total tx weight
|
||||
return { realizedGains: realizedGains / (tx.weight + realizedGains) // percent of the pre-segwit tx size
|
||||
, potentialBech32Gains: potentialBech32Gains / tx.weight
|
||||
, potentialP2shGains: potentialP2shGains / tx.weight
|
||||
};
|
||||
return {
|
||||
realizedSegwitGains: realizedSegwitGains / (tx.weight + realizedSegwitGains), // percent of the pre-segwit tx size
|
||||
potentialSegwitGains: potentialSegwitGains / tx.weight,
|
||||
potentialP2shSegwitGains: potentialP2shSegwitGains / tx.weight,
|
||||
potentialTaprootGains: potentialTaprootGains / tx.weight,
|
||||
realizedTaprootGains: realizedTaprootGains / (tx.weight + realizedTaprootGains)
|
||||
};
|
||||
}
|
||||
|
||||
/** extracts m and n from a multisig script (asm), returns nothing if it is not a multisig script */
|
||||
export function parseMultisigScript(script: string): void | { m: number, n: number } {
|
||||
if (!script) {
|
||||
return;
|
||||
}
|
||||
const ops = script.split(' ');
|
||||
if (ops.length < 3 || ops.pop() !== 'OP_CHECKMULTISIG') {
|
||||
return;
|
||||
}
|
||||
const opN = ops.pop();
|
||||
if (!opN.startsWith('OP_PUSHNUM_')) {
|
||||
return;
|
||||
}
|
||||
const n = parseInt(opN.match(/[0-9]+/)[0], 10);
|
||||
if (ops.length < n * 2 + 1) {
|
||||
return;
|
||||
}
|
||||
// pop n public keys
|
||||
for (let i = 0; i < n; i++) {
|
||||
if (!/^0((2|3)\w{64}|4\w{128})$/.test(ops.pop())) {
|
||||
return;
|
||||
}
|
||||
if (!/^OP_PUSHBYTES_(33|65)$/.test(ops.pop())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
const opM = ops.pop();
|
||||
if (!opM.startsWith('OP_PUSHNUM_')) {
|
||||
return;
|
||||
}
|
||||
const m = parseInt(opM.match(/[0-9]+/)[0], 10);
|
||||
|
||||
if (ops.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
return { m, n };
|
||||
}
|
||||
|
||||
// https://github.com/shesek/move-decimal-point
|
||||
@ -101,12 +194,12 @@ export function moveDec(num: number, n: number) {
|
||||
return neg + (int || '0') + (frac.length ? '.' + frac : '');
|
||||
}
|
||||
|
||||
function zeros(n) {
|
||||
function zeros(n: number) {
|
||||
return new Array(n + 1).join('0');
|
||||
}
|
||||
|
||||
// Formats a number for display. Treats the number as a string to avoid rounding errors.
|
||||
export const formatNumber = (s, precision = null) => {
|
||||
export const formatNumber = (s: number | string, precision: number | null = null) => {
|
||||
let [ whole, dec ] = s.toString().split('.');
|
||||
|
||||
// divide numbers into groups of three separated with a thin space (U+202F, "NARROW NO-BREAK SPACE"),
|
||||
@ -128,31 +221,31 @@ export const formatNumber = (s, precision = null) => {
|
||||
};
|
||||
|
||||
// Utilities for segwitFeeGains
|
||||
const witnessSize = (vin: Vin) => vin.witness.reduce((S, w) => S + (w.length / 2), 0);
|
||||
const witnessSize = (vin: Vin) => vin.witness ? vin.witness.reduce((S, w) => S + (w.length / 2), 0) : 0;
|
||||
const scriptSigSize = (vin: Vin) => vin.scriptsig ? vin.scriptsig.length / 2 : 0;
|
||||
|
||||
// Power of ten wrapper
|
||||
export function selectPowerOfTen(val: number) {
|
||||
export function selectPowerOfTen(val: number): { divider: number, unit: string } {
|
||||
const powerOfTen = {
|
||||
exa: Math.pow(10, 18),
|
||||
peta: Math.pow(10, 15),
|
||||
terra: Math.pow(10, 12),
|
||||
tera: Math.pow(10, 12),
|
||||
giga: Math.pow(10, 9),
|
||||
mega: Math.pow(10, 6),
|
||||
kilo: Math.pow(10, 3),
|
||||
};
|
||||
|
||||
let selectedPowerOfTen;
|
||||
let selectedPowerOfTen: { divider: number, unit: string };
|
||||
if (val < powerOfTen.kilo) {
|
||||
selectedPowerOfTen = { divider: 1, unit: '' }; // no scaling
|
||||
} else if (val < powerOfTen.mega) {
|
||||
selectedPowerOfTen = { divider: powerOfTen.kilo, unit: 'k' };
|
||||
} else if (val < powerOfTen.giga) {
|
||||
selectedPowerOfTen = { divider: powerOfTen.mega, unit: 'M' };
|
||||
} else if (val < powerOfTen.terra) {
|
||||
} else if (val < powerOfTen.tera) {
|
||||
selectedPowerOfTen = { divider: powerOfTen.giga, unit: 'G' };
|
||||
} else if (val < powerOfTen.peta) {
|
||||
selectedPowerOfTen = { divider: powerOfTen.terra, unit: 'T' };
|
||||
selectedPowerOfTen = { divider: powerOfTen.tera, unit: 'T' };
|
||||
} else if (val < powerOfTen.exa) {
|
||||
selectedPowerOfTen = { divider: powerOfTen.peta, unit: 'P' };
|
||||
} else {
|
||||
@ -160,4 +253,4 @@ export function selectPowerOfTen(val: number) {
|
||||
}
|
||||
|
||||
return selectedPowerOfTen;
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
<div class="intro">
|
||||
<span style="margin-left: auto; margin-right: -20px; margin-bottom: -20px">™</span>
|
||||
<img class="logo" src="./resources/mempool-logo-bigger.png" />
|
||||
<img class="logo" src="/resources/mempool-logo-bigger.png" />
|
||||
<div class="version">
|
||||
v{{ packetJsonVersion }} [<a href="https://github.com/mempool/mempool/commit/{{ frontendGitCommitHash }}">{{ frontendGitCommitHash }}</a>]
|
||||
</div>
|
||||
|
@ -1,4 +1,13 @@
|
||||
<span
|
||||
*ngIf="label"
|
||||
class="badge badge-pill badge-warning"
|
||||
>{{ label }}</span>
|
||||
<a *ngIf="channel; else default" [routerLink]="['/lightning/channel' | relativeUrl, channel.id]">
|
||||
<span
|
||||
*ngIf="label"
|
||||
class="badge badge-pill badge-warning"
|
||||
>{{ label }}</span>
|
||||
</a>
|
||||
|
||||
<ng-template #default>
|
||||
<span
|
||||
*ngIf="label"
|
||||
class="badge badge-pill badge-warning"
|
||||
>{{ label }}</span>
|
||||
</ng-template>
|
@ -1,6 +1,7 @@
|
||||
import { Component, OnInit, ChangeDetectionStrategy, Input } from '@angular/core';
|
||||
import { Component, ChangeDetectionStrategy, Input, OnChanges } from '@angular/core';
|
||||
import { Vin, Vout } from '../../interfaces/electrs.interface';
|
||||
import { StateService } from 'src/app/services/state.service';
|
||||
import { parseMultisigScript } from 'src/app/bitcoin.utils';
|
||||
|
||||
@Component({
|
||||
selector: 'app-address-labels',
|
||||
@ -8,11 +9,12 @@ import { StateService } from 'src/app/services/state.service';
|
||||
styleUrls: ['./address-labels.component.scss'],
|
||||
changeDetection: ChangeDetectionStrategy.OnPush,
|
||||
})
|
||||
export class AddressLabelsComponent implements OnInit {
|
||||
export class AddressLabelsComponent implements OnChanges {
|
||||
network = '';
|
||||
|
||||
@Input() vin: Vin;
|
||||
@Input() vout: Vout;
|
||||
@Input() channel: any;
|
||||
|
||||
label?: string;
|
||||
|
||||
@ -22,14 +24,21 @@ export class AddressLabelsComponent implements OnInit {
|
||||
this.network = stateService.network;
|
||||
}
|
||||
|
||||
ngOnInit() {
|
||||
if (this.vin) {
|
||||
ngOnChanges() {
|
||||
if (this.channel) {
|
||||
this.handleChannel();
|
||||
} else if (this.vin) {
|
||||
this.handleVin();
|
||||
} else if (this.vout) {
|
||||
this.handleVout();
|
||||
}
|
||||
}
|
||||
|
||||
handleChannel() {
|
||||
const type = this.vout ? 'open' : 'close';
|
||||
this.label = `Channel ${type}: ${this.channel.node_left.alias} <> ${this.channel.node_right.alias}`;
|
||||
}
|
||||
|
||||
handleVin() {
|
||||
if (this.vin.inner_witnessscript_asm) {
|
||||
if (this.vin.inner_witnessscript_asm.indexOf('OP_DEPTH OP_PUSHNUM_12 OP_EQUAL OP_IF OP_PUSHNUM_11') === 0) {
|
||||
@ -90,41 +99,11 @@ export class AddressLabelsComponent implements OnInit {
|
||||
}
|
||||
|
||||
detectMultisig(script: string) {
|
||||
if (!script) {
|
||||
return;
|
||||
}
|
||||
const ops = script.split(' ');
|
||||
if (ops.length < 3 || ops.pop() !== 'OP_CHECKMULTISIG') {
|
||||
return;
|
||||
}
|
||||
const opN = ops.pop();
|
||||
if (!opN.startsWith('OP_PUSHNUM_')) {
|
||||
return;
|
||||
}
|
||||
const n = parseInt(opN.match(/[0-9]+/)[0], 10);
|
||||
if (ops.length < n * 2 + 1) {
|
||||
return;
|
||||
}
|
||||
// pop n public keys
|
||||
for (let i = 0; i < n; i++) {
|
||||
if (!/^0((2|3)\w{64}|4\w{128})$/.test(ops.pop())) {
|
||||
return;
|
||||
}
|
||||
if (!/^OP_PUSHBYTES_(33|65)$/.test(ops.pop())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
const opM = ops.pop();
|
||||
if (!opM.startsWith('OP_PUSHNUM_')) {
|
||||
return;
|
||||
}
|
||||
const m = parseInt(opM.match(/[0-9]+/)[0], 10);
|
||||
const ms = parseMultisigScript(script);
|
||||
|
||||
if (ops.length) {
|
||||
return;
|
||||
if (ms) {
|
||||
this.label = $localize`:@@address-label.multisig:Multisig ${ms.m}:multisigM: of ${ms.n}:multisigN:`;
|
||||
}
|
||||
|
||||
this.label = $localize`:@@address-label.multisig:Multisig ${m}:multisigM: of ${n}:multisigN:`;
|
||||
}
|
||||
|
||||
handleVout() {
|
||||
|
@ -0,0 +1,55 @@
|
||||
<div class="box preview-box" *ngIf="address && !error">
|
||||
<div class="row">
|
||||
<div class="col-md">
|
||||
<div class="title-address">
|
||||
<h1 i18n="shared.address">Address</h1>
|
||||
</div>
|
||||
<a [routerLink]="['/address/' | relativeUrl, addressString]" class="address-link" >
|
||||
<span class="truncated-address">{{addressString.slice(0,-4)}}</span><span class="last-four">{{addressString.slice(-4)}}</span>
|
||||
</a>
|
||||
<table class="table table-borderless table-striped">
|
||||
<tbody>
|
||||
<tr *ngIf="addressInfo && addressInfo.unconfidential">
|
||||
<td i18n="address.unconfidential">Unconfidential</td>
|
||||
<td><a [routerLink]="['/address/' | relativeUrl, addressInfo.unconfidential]">
|
||||
<span class="d-inline d-lg-none">{{ addressInfo.unconfidential | shortenString : 14 }}</span>
|
||||
<span class="d-none d-lg-inline">{{ addressInfo.unconfidential }}</span>
|
||||
</a> <app-clipboard [text]="addressInfo.unconfidential"></app-clipboard></td>
|
||||
</tr>
|
||||
<ng-template [ngIf]="!address.electrum">
|
||||
<tr>
|
||||
<td i18n="address.total-received">Total received</td>
|
||||
<td *ngIf="address.chain_stats.funded_txo_sum !== undefined; else confidentialTd"><app-amount [satoshis]="received" [noFiat]="true"></app-amount></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="address.total-sent">Total sent</td>
|
||||
<td *ngIf="address.chain_stats.spent_txo_sum !== undefined; else confidentialTd"><app-amount [satoshis]="sent" [noFiat]="true"></app-amount></td>
|
||||
</tr>
|
||||
</ng-template>
|
||||
<tr>
|
||||
<td i18n="address.balance">Balance</td>
|
||||
<td *ngIf="address.chain_stats.funded_txo_sum !== undefined; else confidentialTd"><app-amount [satoshis]="received - sent" [noFiat]="true"></app-amount></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="address.transactions">Transactions</td>
|
||||
<td>{{ txCount | number }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="address.unspent_txos">Unspent TXOs</td>
|
||||
<td>{{ totalUnspent | number }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="w-100 d-block d-md-none"></div>
|
||||
<div class="col-md qrcode-col">
|
||||
<div class="qr-wrapper">
|
||||
<app-qrcode [data]="address.address" [size]="370"></app-qrcode>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<ng-template #confidentialTd>
|
||||
<td i18n="shared.confidential">Confidential</td>
|
||||
</ng-template>
|
@ -0,0 +1,46 @@
|
||||
h1 {
|
||||
font-size: 42px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.qr-wrapper {
|
||||
background-color: #FFF;
|
||||
padding: 10px;
|
||||
padding-bottom: 5px;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.qrcode-col {
|
||||
width: 420px;
|
||||
min-width: 420px;
|
||||
flex-grow: 0;
|
||||
flex-shrink: 0;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.table {
|
||||
font-size: 24px;
|
||||
|
||||
::ng-deep .symbol {
|
||||
font-size: 18px;
|
||||
}
|
||||
}
|
||||
|
||||
.address-link {
|
||||
font-size: 20px;
|
||||
margin-bottom: 0.5em;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: baseline;
|
||||
.truncated-address {
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
max-width: calc(505px - 4em);
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.last-four {
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
}
|
||||
}
|
116
frontend/src/app/components/address/address-preview.component.ts
Normal file
116
frontend/src/app/components/address/address-preview.component.ts
Normal file
@ -0,0 +1,116 @@
|
||||
import { Component, OnInit, OnDestroy } from '@angular/core';
|
||||
import { ActivatedRoute, ParamMap } from '@angular/router';
|
||||
import { ElectrsApiService } from '../../services/electrs-api.service';
|
||||
import { switchMap, filter, catchError, map, tap } from 'rxjs/operators';
|
||||
import { Address, Transaction } from '../../interfaces/electrs.interface';
|
||||
import { StateService } from 'src/app/services/state.service';
|
||||
import { OpenGraphService } from 'src/app/services/opengraph.service';
|
||||
import { AudioService } from 'src/app/services/audio.service';
|
||||
import { ApiService } from 'src/app/services/api.service';
|
||||
import { of, merge, Subscription, Observable } from 'rxjs';
|
||||
import { SeoService } from 'src/app/services/seo.service';
|
||||
import { AddressInformation } from 'src/app/interfaces/node-api.interface';
|
||||
|
||||
@Component({
|
||||
selector: 'app-address-preview',
|
||||
templateUrl: './address-preview.component.html',
|
||||
styleUrls: ['./address-preview.component.scss']
|
||||
})
|
||||
export class AddressPreviewComponent implements OnInit, OnDestroy {
|
||||
network = '';
|
||||
|
||||
address: Address;
|
||||
addressString: string;
|
||||
isLoadingAddress = true;
|
||||
error: any;
|
||||
mainSubscription: Subscription;
|
||||
addressLoadingStatus$: Observable<number>;
|
||||
addressInfo: null | AddressInformation = null;
|
||||
|
||||
totalConfirmedTxCount = 0;
|
||||
loadedConfirmedTxCount = 0;
|
||||
txCount = 0;
|
||||
received = 0;
|
||||
sent = 0;
|
||||
totalUnspent = 0;
|
||||
|
||||
constructor(
|
||||
private route: ActivatedRoute,
|
||||
private electrsApiService: ElectrsApiService,
|
||||
private stateService: StateService,
|
||||
private apiService: ApiService,
|
||||
private seoService: SeoService,
|
||||
private openGraphService: OpenGraphService,
|
||||
) { }
|
||||
|
||||
ngOnInit() {
|
||||
this.openGraphService.setPreviewLoading();
|
||||
this.stateService.networkChanged$.subscribe((network) => this.network = network);
|
||||
|
||||
this.addressLoadingStatus$ = this.route.paramMap
|
||||
.pipe(
|
||||
switchMap(() => this.stateService.loadingIndicators$),
|
||||
map((indicators) => indicators['address-' + this.addressString] !== undefined ? indicators['address-' + this.addressString] : 0)
|
||||
);
|
||||
|
||||
this.mainSubscription = this.route.paramMap
|
||||
.pipe(
|
||||
switchMap((params: ParamMap) => {
|
||||
this.error = undefined;
|
||||
this.isLoadingAddress = true;
|
||||
this.loadedConfirmedTxCount = 0;
|
||||
this.address = null;
|
||||
this.addressInfo = null;
|
||||
this.addressString = params.get('id') || '';
|
||||
if (/^[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100}$/.test(this.addressString)) {
|
||||
this.addressString = this.addressString.toLowerCase();
|
||||
}
|
||||
this.seoService.setTitle($localize`:@@address.component.browser-title:Address: ${this.addressString}:INTERPOLATION:`);
|
||||
|
||||
return this.electrsApiService.getAddress$(this.addressString)
|
||||
.pipe(
|
||||
catchError((err) => {
|
||||
this.isLoadingAddress = false;
|
||||
this.error = err;
|
||||
console.log(err);
|
||||
return of(null);
|
||||
})
|
||||
);
|
||||
})
|
||||
)
|
||||
.pipe(
|
||||
filter((address) => !!address),
|
||||
tap((address: Address) => {
|
||||
if ((this.stateService.network === 'liquid' || this.stateService.network === 'liquidtestnet') && /^([m-zA-HJ-NP-Z1-9]{26,35}|[a-z]{2,5}1[ac-hj-np-z02-9]{8,100}|[a-km-zA-HJ-NP-Z1-9]{80})$/.test(address.address)) {
|
||||
this.apiService.validateAddress$(address.address)
|
||||
.subscribe((addressInfo) => {
|
||||
this.addressInfo = addressInfo;
|
||||
});
|
||||
}
|
||||
this.address = address;
|
||||
this.updateChainStats();
|
||||
this.isLoadingAddress = false;
|
||||
this.openGraphService.setPreviewReady();
|
||||
})
|
||||
)
|
||||
.subscribe(() => {},
|
||||
(error) => {
|
||||
console.log(error);
|
||||
this.error = error;
|
||||
this.isLoadingAddress = false;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
updateChainStats() {
|
||||
this.received = this.address.chain_stats.funded_txo_sum + this.address.mempool_stats.funded_txo_sum;
|
||||
this.sent = this.address.chain_stats.spent_txo_sum + this.address.mempool_stats.spent_txo_sum;
|
||||
this.txCount = this.address.chain_stats.tx_count + this.address.mempool_stats.tx_count;
|
||||
this.totalConfirmedTxCount = this.address.chain_stats.tx_count;
|
||||
this.totalUnspent = this.address.chain_stats.funded_txo_count - this.address.chain_stats.spent_txo_count;
|
||||
}
|
||||
|
||||
ngOnDestroy() {
|
||||
this.mainSubscription.unsubscribe();
|
||||
}
|
||||
}
|
@ -2,6 +2,7 @@ import { Location } from '@angular/common';
|
||||
import { Component, HostListener, OnInit, Inject, LOCALE_ID, HostBinding } from '@angular/core';
|
||||
import { Router, NavigationEnd } from '@angular/router';
|
||||
import { StateService } from 'src/app/services/state.service';
|
||||
import { OpenGraphService } from 'src/app/services/opengraph.service';
|
||||
import { NgbTooltipConfig } from '@ng-bootstrap/ng-bootstrap';
|
||||
|
||||
@Component({
|
||||
@ -16,6 +17,7 @@ export class AppComponent implements OnInit {
|
||||
constructor(
|
||||
public router: Router,
|
||||
private stateService: StateService,
|
||||
private openGraphService: OpenGraphService,
|
||||
private location: Location,
|
||||
tooltipConfig: NgbTooltipConfig,
|
||||
@Inject(LOCALE_ID) private locale: string,
|
||||
|
@ -2,7 +2,7 @@
|
||||
<nav class="navbar navbar-expand-md navbar-dark bg-dark">
|
||||
<a class="navbar-brand" [routerLink]="['/' | relativeUrl]" style="position: relative;">
|
||||
<ng-container *ngIf="{ val: connectionState$ | async } as connectionState">
|
||||
<img src="./resources/bisq/bisq-markets-logo.png" height="35" width="140" class="logo" [ngStyle]="{'opacity': connectionState.val === 2 ? 1 : 0.5 }">
|
||||
<img src="/resources/bisq/bisq-markets-logo.png" height="35" width="140" class="logo" [ngStyle]="{'opacity': connectionState.val === 2 ? 1 : 0.5 }">
|
||||
<div class="connection-badge">
|
||||
<div class="badge badge-warning" *ngIf="connectionState.val === 0" i18n="master-page.offline">Offline</div>
|
||||
<div class="badge badge-warning" *ngIf="connectionState.val === 1" i18n="master-page.reconnecting">Reconnecting...</div>
|
||||
@ -12,16 +12,16 @@
|
||||
|
||||
<div ngbDropdown (window:resize)="onResize($event)" class="dropdown-container" *ngIf="env.TESTNET_ENABLED || env.SIGNET_ENABLED || env.LIQUID_ENABLED || env.BISQ_ENABLED || env.LIQUID_TESTNET_ENABLED">
|
||||
<button ngbDropdownToggle type="button" class="btn btn-secondary dropdown-toggle-split" aria-haspopup="true">
|
||||
<img src="./resources/bisq-logo.png" style="width: 25px; height: 25px;" class="mr-1">
|
||||
<img src="/resources/bisq-logo.png" style="width: 25px; height: 25px;" class="mr-1">
|
||||
</button>
|
||||
<div ngbDropdownMenu [ngClass]="{'dropdown-menu-right' : isMobile}">
|
||||
<a [href]="env.MEMPOOL_WEBSITE_URL + urlLanguage" ngbDropdownItem class="mainnet"><img src="./resources/bitcoin-logo.png" style="width: 30px;" class="mr-1"> Mainnet</a>
|
||||
<a [href]="env.MEMPOOL_WEBSITE_URL + urlLanguage + '/signet'" ngbDropdownItem *ngIf="env.SIGNET_ENABLED" class="signet"><img src="./resources/signet-logo.png" style="width: 30px;" class="mr-1"> Signet</a>
|
||||
<a [href]="env.MEMPOOL_WEBSITE_URL + urlLanguage + '/testnet'" ngbDropdownItem *ngIf="env.TESTNET_ENABLED" class="testnet"><img src="./resources/testnet-logo.png" style="width: 30px;" class="mr-1"> Testnet</a>
|
||||
<a [href]="env.MEMPOOL_WEBSITE_URL + urlLanguage" ngbDropdownItem class="mainnet"><img src="/resources/bitcoin-logo.png" style="width: 30px;" class="mr-1"> Mainnet</a>
|
||||
<a [href]="env.MEMPOOL_WEBSITE_URL + urlLanguage + '/signet'" ngbDropdownItem *ngIf="env.SIGNET_ENABLED" class="signet"><img src="/resources/signet-logo.png" style="width: 30px;" class="mr-1"> Signet</a>
|
||||
<a [href]="env.MEMPOOL_WEBSITE_URL + urlLanguage + '/testnet'" ngbDropdownItem *ngIf="env.TESTNET_ENABLED" class="testnet"><img src="/resources/testnet-logo.png" style="width: 30px;" class="mr-1"> Testnet</a>
|
||||
<h6 class="dropdown-header" i18n="master-page.layer2-networks-header">Layer 2 Networks</h6>
|
||||
<a ngbDropdownItem class="mainnet active" routerLink="/"><img src="./resources/bisq-logo.png" style="width: 30px;" class="mr-1"> Bisq</a>
|
||||
<a [href]="env.LIQUID_WEBSITE_URL + urlLanguage" ngbDropdownItem *ngIf="env.LIQUID_ENABLED" class="liquid"><img src="./resources/liquid-logo.png" style="width: 30px;" class="mr-1"> Liquid</a>
|
||||
<a [href]="env.LIQUID_WEBSITE_URL + urlLanguage + '/testnet'" ngbDropdownItem *ngIf="env.LIQUID_TESTNET_ENABLED" class="liquidtestnet"><img src="./resources/liquidtestnet-logo.png" style="width: 30px;" class="mr-1"> Liquid Testnet</a>
|
||||
<a ngbDropdownItem class="mainnet active" routerLink="/"><img src="/resources/bisq-logo.png" style="width: 30px;" class="mr-1"> Bisq</a>
|
||||
<a [href]="env.LIQUID_WEBSITE_URL + urlLanguage" ngbDropdownItem *ngIf="env.LIQUID_ENABLED" class="liquid"><img src="/resources/liquid-logo.png" style="width: 30px;" class="mr-1"> Liquid</a>
|
||||
<a [href]="env.LIQUID_WEBSITE_URL + urlLanguage + '/testnet'" ngbDropdownItem *ngIf="env.LIQUID_TESTNET_ENABLED" class="liquidtestnet"><img src="/resources/liquidtestnet-logo.png" style="width: 30px;" class="mr-1"> Liquid Testnet</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -0,0 +1,111 @@
|
||||
<div class="container-xl" (window:resize)="onResize($event)">
|
||||
|
||||
<div *ngIf="(auditObservable$ | async) as blockAudit; else skeleton">
|
||||
<div class="title-block" id="block">
|
||||
<h1>
|
||||
<span class="next-previous-blocks">
|
||||
<span i18n="shared.block-title">Block </span>
|
||||
|
||||
<a [routerLink]="['/block/' | relativeUrl, blockAudit.id]">{{ blockAudit.height }}</a>
|
||||
|
||||
<span i18n="shared.template-vs-mined">Template vs Mined</span>
|
||||
</span>
|
||||
</h1>
|
||||
|
||||
<div class="grow"></div>
|
||||
|
||||
<button [routerLink]="['/' | relativeUrl]" class="btn btn-sm">✕</button>
|
||||
</div>
|
||||
|
||||
<!-- OVERVIEW -->
|
||||
<div class="box mb-3">
|
||||
<div class="row">
|
||||
<!-- LEFT COLUMN -->
|
||||
<div class="col-sm">
|
||||
<table class="table table-borderless table-striped">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="td-width" i18n="block.hash">Hash</td>
|
||||
<td><a [routerLink]="['/block/' | relativeUrl, blockAudit.id]" title="{{ blockAudit.id }}">{{ blockAudit.id | shortenString : 13 }}</a>
|
||||
<app-clipboard class="d-none d-sm-inline-block" [text]="blockAudit.id"></app-clipboard>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="blockAudit.timestamp">Timestamp</td>
|
||||
<td>
|
||||
‎{{ blockAudit.timestamp * 1000 | date:'yyyy-MM-dd HH:mm' }}
|
||||
<div class="lg-inline">
|
||||
<i class="symbol">(<app-time-since [time]="blockAudit.timestamp" [fastRender]="true">
|
||||
</app-time-since>)</i>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="blockAudit.size">Size</td>
|
||||
<td [innerHTML]="'‎' + (blockAudit.size | bytes: 2)"></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="block.weight">Weight</td>
|
||||
<td [innerHTML]="'‎' + (blockAudit.weight | wuBytes: 2)"></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- RIGHT COLUMN -->
|
||||
<div class="col-sm" *ngIf="blockAudit">
|
||||
<table class="table table-borderless table-striped">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="td-width" i18n="shared.transaction-count">Transactions</td>
|
||||
<td>{{ blockAudit.tx_count }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="block.match-rate">Match rate</td>
|
||||
<td>{{ blockAudit.matchRate }}%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="block.missing-txs">Missing txs</td>
|
||||
<td>{{ blockAudit.missingTxs.length }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="block.added-txs">Added txs</td>
|
||||
<td>{{ blockAudit.addedTxs.length }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div> <!-- row -->
|
||||
</div> <!-- box -->
|
||||
|
||||
<!-- ADDED vs MISSING button -->
|
||||
<div class="d-flex justify-content-center menu mt-3" *ngIf="isMobile">
|
||||
<a routerLinkActive="active" class="btn btn-primary w-50 mr-1 ml-1 menu-button" i18n="block.missing-txs"
|
||||
fragment="missing" (click)="changeMode('missing')">Missing</a>
|
||||
<a routerLinkActive="active" class="btn btn-primary w-50 mr-1 ml-1 menu-button" i18n="block.added-txs"
|
||||
fragment="added" (click)="changeMode('added')">Added</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- VISUALIZATIONS -->
|
||||
<div class="box">
|
||||
<div class="row">
|
||||
<!-- MISSING TX RENDERING -->
|
||||
<div class="col-sm" *ngIf="webGlEnabled">
|
||||
<app-block-overview-graph #blockGraphTemplate [isLoading]="isLoading" [resolution]="75"
|
||||
[blockLimit]="stateService.blockVSize" [orientation]="'top'" [flip]="false"
|
||||
(txClickEvent)="onTxClick($event)"></app-block-overview-graph>
|
||||
</div>
|
||||
|
||||
<!-- ADDED TX RENDERING -->
|
||||
<div class="col-sm" *ngIf="webGlEnabled && !isMobile">
|
||||
<app-block-overview-graph #blockGraphMined [isLoading]="isLoading" [resolution]="75"
|
||||
[blockLimit]="stateService.blockVSize" [orientation]="'top'" [flip]="false"
|
||||
(txClickEvent)="onTxClick($event)"></app-block-overview-graph>
|
||||
</div>
|
||||
</div> <!-- row -->
|
||||
</div> <!-- box -->
|
||||
|
||||
<ng-template #skeleton></ng-template>
|
||||
|
||||
</div>
|
@ -0,0 +1,40 @@
|
||||
.title-block {
|
||||
border-top: none;
|
||||
}
|
||||
|
||||
.table {
|
||||
tr td {
|
||||
&:last-child {
|
||||
text-align: right;
|
||||
@media (min-width: 768px) {
|
||||
text-align: left;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.block-tx-title {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
flex-direction: column;
|
||||
position: relative;
|
||||
@media (min-width: 550px) {
|
||||
flex-direction: row;
|
||||
}
|
||||
h2 {
|
||||
line-height: 1;
|
||||
margin: 0;
|
||||
position: relative;
|
||||
padding-bottom: 10px;
|
||||
@media (min-width: 550px) {
|
||||
padding-bottom: 0px;
|
||||
align-self: end;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.menu-button {
|
||||
@media (min-width: 768px) {
|
||||
max-width: 150px;
|
||||
}
|
||||
}
|
120
frontend/src/app/components/block-audit/block-audit.component.ts
Normal file
120
frontend/src/app/components/block-audit/block-audit.component.ts
Normal file
@ -0,0 +1,120 @@
|
||||
import { Component, OnDestroy, OnInit, ViewChild } from '@angular/core';
|
||||
import { ActivatedRoute, ParamMap, Router } from '@angular/router';
|
||||
import { Observable } from 'rxjs';
|
||||
import { map, share, switchMap, tap } from 'rxjs/operators';
|
||||
import { BlockAudit, TransactionStripped } from 'src/app/interfaces/node-api.interface';
|
||||
import { ApiService } from 'src/app/services/api.service';
|
||||
import { StateService } from 'src/app/services/state.service';
|
||||
import { detectWebGL } from 'src/app/shared/graphs.utils';
|
||||
import { RelativeUrlPipe } from 'src/app/shared/pipes/relative-url/relative-url.pipe';
|
||||
import { BlockOverviewGraphComponent } from '../block-overview-graph/block-overview-graph.component';
|
||||
|
||||
@Component({
|
||||
selector: 'app-block-audit',
|
||||
templateUrl: './block-audit.component.html',
|
||||
styleUrls: ['./block-audit.component.scss'],
|
||||
styles: [`
|
||||
.loadingGraphs {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: calc(50% - 15px);
|
||||
z-index: 100;
|
||||
}
|
||||
`],
|
||||
})
|
||||
export class BlockAuditComponent implements OnInit, OnDestroy {
|
||||
blockAudit: BlockAudit = undefined;
|
||||
transactions: string[];
|
||||
auditObservable$: Observable<BlockAudit>;
|
||||
|
||||
paginationMaxSize: number;
|
||||
page = 1;
|
||||
itemsPerPage: number;
|
||||
|
||||
mode: 'missing' | 'added' = 'missing';
|
||||
isLoading = true;
|
||||
webGlEnabled = true;
|
||||
isMobile = window.innerWidth <= 767.98;
|
||||
|
||||
@ViewChild('blockGraphTemplate') blockGraphTemplate: BlockOverviewGraphComponent;
|
||||
@ViewChild('blockGraphMined') blockGraphMined: BlockOverviewGraphComponent;
|
||||
|
||||
constructor(
|
||||
private route: ActivatedRoute,
|
||||
public stateService: StateService,
|
||||
private router: Router,
|
||||
private apiService: ApiService
|
||||
) {
|
||||
this.webGlEnabled = detectWebGL();
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
this.paginationMaxSize = window.matchMedia('(max-width: 670px)').matches ? 3 : 5;
|
||||
this.itemsPerPage = this.stateService.env.ITEMS_PER_PAGE;
|
||||
|
||||
this.auditObservable$ = this.route.paramMap.pipe(
|
||||
switchMap((params: ParamMap) => {
|
||||
const blockHash: string = params.get('id') || '';
|
||||
return this.apiService.getBlockAudit$(blockHash)
|
||||
.pipe(
|
||||
map((response) => {
|
||||
const blockAudit = response.body;
|
||||
for (let i = 0; i < blockAudit.template.length; ++i) {
|
||||
if (blockAudit.missingTxs.includes(blockAudit.template[i].txid)) {
|
||||
blockAudit.template[i].status = 'missing';
|
||||
} else if (blockAudit.addedTxs.includes(blockAudit.template[i].txid)) {
|
||||
blockAudit.template[i].status = 'added';
|
||||
} else {
|
||||
blockAudit.template[i].status = 'found';
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < blockAudit.transactions.length; ++i) {
|
||||
if (blockAudit.missingTxs.includes(blockAudit.transactions[i].txid)) {
|
||||
blockAudit.transactions[i].status = 'missing';
|
||||
} else if (blockAudit.addedTxs.includes(blockAudit.transactions[i].txid)) {
|
||||
blockAudit.transactions[i].status = 'added';
|
||||
} else {
|
||||
blockAudit.transactions[i].status = 'found';
|
||||
}
|
||||
}
|
||||
return blockAudit;
|
||||
}),
|
||||
tap((blockAudit) => {
|
||||
this.changeMode(this.mode);
|
||||
if (this.blockGraphTemplate) {
|
||||
this.blockGraphTemplate.destroy();
|
||||
this.blockGraphTemplate.setup(blockAudit.template);
|
||||
}
|
||||
if (this.blockGraphMined) {
|
||||
this.blockGraphMined.destroy();
|
||||
this.blockGraphMined.setup(blockAudit.transactions);
|
||||
}
|
||||
this.isLoading = false;
|
||||
}),
|
||||
);
|
||||
}),
|
||||
share()
|
||||
);
|
||||
}
|
||||
|
||||
onResize(event: any) {
|
||||
this.isMobile = event.target.innerWidth <= 767.98;
|
||||
this.paginationMaxSize = event.target.innerWidth < 670 ? 3 : 5;
|
||||
}
|
||||
|
||||
changeMode(mode: 'missing' | 'added') {
|
||||
this.router.navigate([], { fragment: mode });
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
onTxClick(event: TransactionStripped): void {
|
||||
const url = new RelativeUrlPipe(this.stateService).transform(`/tx/${event.txid}`);
|
||||
this.router.navigate([url]);
|
||||
}
|
||||
|
||||
pageChange(page: number, target: HTMLElement) {
|
||||
}
|
||||
}
|
@ -2,10 +2,13 @@
|
||||
|
||||
<div class="full-container">
|
||||
<div class="card-header mb-0 mb-md-4">
|
||||
<span i18n="mining.block-fee-rates">Block Fee Rates</span>
|
||||
<button class="btn" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
<div class="d-flex d-md-block align-items-baseline">
|
||||
<span i18n="mining.block-fee-rates">Block Fee Rates</span>
|
||||
<button class="btn p-0 pl-2" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form [formGroup]="radioGroupForm" class="formRadioGroup" *ngIf="(statsObservable$ | async) as stats">
|
||||
<div class="btn-group btn-group-toggle" ngbRadioGroup name="radioBasic" formControlName="dateSpan">
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 144">
|
||||
|
@ -180,8 +180,8 @@ export class BlockFeeRatesGraphComponent implements OnInit {
|
||||
}
|
||||
let tooltip = `<b style="color: white; margin-left: 2px">${formatterXAxis(this.locale, this.timespan, parseInt(data[0].axisValue, 10))}</b><br>`;
|
||||
|
||||
for (const pool of data.reverse()) {
|
||||
tooltip += `${pool.marker} ${pool.seriesName}: ${pool.data[1]} sats/vByte<br>`;
|
||||
for (const rate of data.reverse()) {
|
||||
tooltip += `${rate.marker} ${rate.seriesName}: ${rate.data[1]} sats/vByte<br>`;
|
||||
}
|
||||
|
||||
if (['24h', '3d'].includes(this.timespan)) {
|
||||
|
@ -2,21 +2,15 @@
|
||||
|
||||
<div class="full-container">
|
||||
<div class="card-header mb-0 mb-md-4">
|
||||
<span i18n="mining.block-fees">Block Fees</span>
|
||||
<button class="btn" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
<div class="d-flex d-md-block align-items-baseline">
|
||||
<span i18n="mining.block-fees">Block Fees</span>
|
||||
<button class="btn p-0 pl-2" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form [formGroup]="radioGroupForm" class="formRadioGroup" *ngIf="(statsObservable$ | async) as stats">
|
||||
<div class="btn-group btn-group-toggle" ngbRadioGroup name="radioBasic" formControlName="dateSpan">
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 144">
|
||||
<input ngbButton type="radio" [value]="'24h'" fragment="24h" [routerLink]="['/graphs/mining/block-fees' | relativeUrl]"> 24h
|
||||
</label>
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 432">
|
||||
<input ngbButton type="radio" [value]="'3d'" fragment="3d" [routerLink]="['/graphs/mining/block-fees' | relativeUrl]"> 3D
|
||||
</label>
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 1008">
|
||||
<input ngbButton type="radio" [value]="'1w'" fragment="1w" [routerLink]="['/graphs/mining/block-fees' | relativeUrl]"> 1W
|
||||
</label>
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 4320">
|
||||
<input ngbButton type="radio" [value]="'1m'" fragment="1m" [routerLink]="['/graphs/mining/block-fees' | relativeUrl]"> 1M
|
||||
</label>
|
||||
|
@ -4,12 +4,13 @@ import { Observable } from 'rxjs';
|
||||
import { map, share, startWith, switchMap, tap } from 'rxjs/operators';
|
||||
import { ApiService } from 'src/app/services/api.service';
|
||||
import { SeoService } from 'src/app/services/seo.service';
|
||||
import { formatNumber } from '@angular/common';
|
||||
import { formatCurrency, formatNumber, getCurrencySymbol } from '@angular/common';
|
||||
import { FormBuilder, FormGroup } from '@angular/forms';
|
||||
import { download, formatterXAxis, formatterXAxisLabel } from 'src/app/shared/graphs.utils';
|
||||
import { download, formatterXAxis, formatterXAxisLabel, formatterXAxisTimeCategory } from 'src/app/shared/graphs.utils';
|
||||
import { StorageService } from 'src/app/services/storage.service';
|
||||
import { MiningService } from 'src/app/services/mining.service';
|
||||
import { ActivatedRoute } from '@angular/router';
|
||||
import { FiatShortenerPipe } from 'src/app/shared/pipes/fiat-shortener.pipe';
|
||||
|
||||
@Component({
|
||||
selector: 'app-block-fees-graph',
|
||||
@ -51,6 +52,7 @@ export class BlockFeesGraphComponent implements OnInit {
|
||||
private storageService: StorageService,
|
||||
private miningService: MiningService,
|
||||
private route: ActivatedRoute,
|
||||
private fiatShortenerPipe: FiatShortenerPipe,
|
||||
) {
|
||||
this.radioGroupForm = this.formBuilder.group({ dateSpan: '1y' });
|
||||
this.radioGroupForm.controls.dateSpan.setValue('1y');
|
||||
@ -58,14 +60,14 @@ export class BlockFeesGraphComponent implements OnInit {
|
||||
|
||||
ngOnInit(): void {
|
||||
this.seoService.setTitle($localize`:@@6c453b11fd7bd159ae30bc381f367bc736d86909:Block Fees`);
|
||||
this.miningWindowPreference = this.miningService.getDefaultTimespan('24h');
|
||||
this.miningWindowPreference = this.miningService.getDefaultTimespan('1m');
|
||||
this.radioGroupForm = this.formBuilder.group({ dateSpan: this.miningWindowPreference });
|
||||
this.radioGroupForm.controls.dateSpan.setValue(this.miningWindowPreference);
|
||||
|
||||
this.route
|
||||
.fragment
|
||||
.subscribe((fragment) => {
|
||||
if (['24h', '3d', '1w', '1m', '3m', '6m', '1y', '2y', '3y', 'all'].indexOf(fragment) > -1) {
|
||||
if (['1m', '3m', '6m', '1y', '2y', '3y', 'all'].indexOf(fragment) > -1) {
|
||||
this.radioGroupForm.controls.dateSpan.setValue(fragment, { emitEvent: false });
|
||||
}
|
||||
});
|
||||
@ -82,6 +84,7 @@ export class BlockFeesGraphComponent implements OnInit {
|
||||
tap((response) => {
|
||||
this.prepareChartOptions({
|
||||
blockFees: response.body.map(val => [val.timestamp * 1000, val.avgFees / 100000000, val.avgHeight]),
|
||||
blockFeesUSD: response.body.filter(val => val.USD > 0).map(val => [val.timestamp * 1000, val.avgFees / 100000000 * val.USD, val.avgHeight]),
|
||||
});
|
||||
this.isLoading = false;
|
||||
}),
|
||||
@ -97,17 +100,32 @@ export class BlockFeesGraphComponent implements OnInit {
|
||||
}
|
||||
|
||||
prepareChartOptions(data) {
|
||||
let title: object;
|
||||
if (data.blockFees.length === 0) {
|
||||
title = {
|
||||
textStyle: {
|
||||
color: 'grey',
|
||||
fontSize: 15
|
||||
},
|
||||
text: $localize`:@@23555386d8af1ff73f297e89dd4af3f4689fb9dd:Indexing blocks`,
|
||||
left: 'center',
|
||||
top: 'center'
|
||||
};
|
||||
}
|
||||
|
||||
this.chartOptions = {
|
||||
animation: false,
|
||||
title: title,
|
||||
color: [
|
||||
new graphic.LinearGradient(0, 0, 0, 0.65, [
|
||||
{ offset: 0, color: '#F4511E' },
|
||||
{ offset: 0.25, color: '#FB8C00' },
|
||||
{ offset: 0.5, color: '#FFB300' },
|
||||
{ offset: 0.75, color: '#FDD835' },
|
||||
{ offset: 1, color: '#7CB342' }
|
||||
new graphic.LinearGradient(0, 0, 0, 1, [
|
||||
{ offset: 0, color: '#FDD835' },
|
||||
{ offset: 1, color: '#FB8C00' },
|
||||
]),
|
||||
new graphic.LinearGradient(0, 0, 0, 1, [
|
||||
{ offset: 0, color: '#C0CA33' },
|
||||
{ offset: 1, color: '#1B5E20' },
|
||||
]),
|
||||
],
|
||||
animation: false,
|
||||
grid: {
|
||||
top: 30,
|
||||
bottom: 80,
|
||||
@ -128,30 +146,54 @@ export class BlockFeesGraphComponent implements OnInit {
|
||||
align: 'left',
|
||||
},
|
||||
borderColor: '#000',
|
||||
formatter: (ticks) => {
|
||||
let tooltip = `<b style="color: white; margin-left: 2px">${formatterXAxis(this.locale, this.timespan, parseInt(ticks[0].axisValue, 10))}</b><br>`;
|
||||
tooltip += `${ticks[0].marker} ${ticks[0].seriesName}: ${formatNumber(ticks[0].data[1], this.locale, '1.3-3')} BTC`;
|
||||
tooltip += `<br>`;
|
||||
formatter: function (data) {
|
||||
if (data.length <= 0) {
|
||||
return '';
|
||||
}
|
||||
let tooltip = `<b style="color: white; margin-left: 2px">
|
||||
${formatterXAxis(this.locale, this.timespan, parseInt(data[0].axisValue, 10))}</b><br>`;
|
||||
|
||||
if (['24h', '3d'].includes(this.timespan)) {
|
||||
tooltip += `<small>` + $localize`At block: ${ticks[0].data[2]}` + `</small>`;
|
||||
} else {
|
||||
tooltip += `<small>` + $localize`Around block: ${ticks[0].data[2]}` + `</small>`;
|
||||
for (const tick of data) {
|
||||
if (tick.seriesIndex === 0) {
|
||||
tooltip += `${tick.marker} ${tick.seriesName}: ${formatNumber(tick.data[1], this.locale, '1.3-3')} BTC<br>`;
|
||||
} else if (tick.seriesIndex === 1) {
|
||||
tooltip += `${tick.marker} ${tick.seriesName}: ${formatCurrency(tick.data[1], this.locale, getCurrencySymbol('USD', 'narrow'), 'USD', '1.0-0')}<br>`;
|
||||
}
|
||||
}
|
||||
|
||||
tooltip += `<small>* On average around block ${data[0].data[2]}</small>`;
|
||||
return tooltip;
|
||||
}
|
||||
}.bind(this)
|
||||
},
|
||||
xAxis: {
|
||||
name: formatterXAxisLabel(this.locale, this.timespan),
|
||||
nameLocation: 'middle',
|
||||
nameTextStyle: {
|
||||
padding: [10, 0, 0, 0],
|
||||
},
|
||||
xAxis: data.blockFees.length === 0 ? undefined :
|
||||
{
|
||||
type: 'time',
|
||||
splitNumber: this.isMobile() ? 5 : 10,
|
||||
axisLabel: {
|
||||
hideOverlap: true,
|
||||
}
|
||||
},
|
||||
yAxis: [
|
||||
legend: data.blockFees.length === 0 ? undefined : {
|
||||
data: [
|
||||
{
|
||||
name: 'Fees BTC',
|
||||
inactiveColor: 'rgb(110, 112, 121)',
|
||||
textStyle: {
|
||||
color: 'white',
|
||||
},
|
||||
icon: 'roundRect',
|
||||
},
|
||||
{
|
||||
name: 'Fees USD',
|
||||
inactiveColor: 'rgb(110, 112, 121)',
|
||||
textStyle: {
|
||||
color: 'white',
|
||||
},
|
||||
icon: 'roundRect',
|
||||
},
|
||||
],
|
||||
},
|
||||
yAxis: data.blockFees.length === 0 ? undefined : [
|
||||
{
|
||||
type: 'value',
|
||||
axisLabel: {
|
||||
@ -168,21 +210,51 @@ export class BlockFeesGraphComponent implements OnInit {
|
||||
}
|
||||
},
|
||||
},
|
||||
],
|
||||
series: [
|
||||
{
|
||||
zlevel: 0,
|
||||
name: $localize`:@@c20172223f84462032664d717d739297e5a9e2fe:Fees`,
|
||||
showSymbol: false,
|
||||
symbol: 'none',
|
||||
data: data.blockFees,
|
||||
type: 'line',
|
||||
lineStyle: {
|
||||
width: 2,
|
||||
type: 'value',
|
||||
position: 'right',
|
||||
axisLabel: {
|
||||
color: 'rgb(110, 112, 121)',
|
||||
formatter: function(val) {
|
||||
return this.fiatShortenerPipe.transform(val);
|
||||
}.bind(this)
|
||||
},
|
||||
splitLine: {
|
||||
show: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
dataZoom: [{
|
||||
series: data.blockFees.length === 0 ? undefined : [
|
||||
{
|
||||
legendHoverLink: false,
|
||||
zlevel: 0,
|
||||
yAxisIndex: 0,
|
||||
name: 'Fees BTC',
|
||||
data: data.blockFees,
|
||||
type: 'line',
|
||||
smooth: 0.25,
|
||||
symbol: 'none',
|
||||
lineStyle: {
|
||||
width: 1,
|
||||
opacity: 1,
|
||||
}
|
||||
},
|
||||
{
|
||||
legendHoverLink: false,
|
||||
zlevel: 1,
|
||||
yAxisIndex: 1,
|
||||
name: 'Fees USD',
|
||||
data: data.blockFeesUSD,
|
||||
type: 'line',
|
||||
smooth: 0.25,
|
||||
symbol: 'none',
|
||||
lineStyle: {
|
||||
width: 2,
|
||||
opacity: 1,
|
||||
}
|
||||
},
|
||||
],
|
||||
dataZoom: data.blockFees.length === 0 ? undefined : [{
|
||||
type: 'inside',
|
||||
realtime: true,
|
||||
zoomLock: true,
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { Component, ElementRef, ViewChild, HostListener, Input, Output, EventEmitter, NgZone, AfterViewInit, OnDestroy } from '@angular/core';
|
||||
import { MempoolBlockDelta, TransactionStripped } from 'src/app/interfaces/websocket.interface';
|
||||
import { WebsocketService } from 'src/app/services/websocket.service';
|
||||
import { TransactionStripped } from 'src/app/interfaces/websocket.interface';
|
||||
import { FastVertexArray } from './fast-vertex-array';
|
||||
import BlockScene from './block-scene';
|
||||
import TxSprite from './tx-sprite';
|
||||
|
@ -25,6 +25,7 @@ export default class TxView implements TransactionStripped {
|
||||
vsize: number;
|
||||
value: number;
|
||||
feerate: number;
|
||||
status?: 'found' | 'missing' | 'added';
|
||||
|
||||
initialised: boolean;
|
||||
vertexArray: FastVertexArray;
|
||||
@ -43,6 +44,7 @@ export default class TxView implements TransactionStripped {
|
||||
this.vsize = tx.vsize;
|
||||
this.value = tx.value;
|
||||
this.feerate = tx.fee / tx.vsize;
|
||||
this.status = tx.status;
|
||||
this.initialised = false;
|
||||
this.vertexArray = vertexArray;
|
||||
|
||||
@ -140,6 +142,14 @@ export default class TxView implements TransactionStripped {
|
||||
}
|
||||
|
||||
getColor(): Color {
|
||||
// Block audit
|
||||
if (this.status === 'missing') {
|
||||
return hexToColor('039BE5');
|
||||
} else if (this.status === 'added') {
|
||||
return hexToColor('D81B60');
|
||||
}
|
||||
|
||||
// Block component
|
||||
const feeLevelIndex = feeLevels.findIndex((feeLvl) => Math.max(1, this.feerate) < feeLvl) - 1;
|
||||
return hexToColor(mempoolFeeColors[feeLevelIndex] || mempoolFeeColors[mempoolFeeColors.length - 1]);
|
||||
}
|
||||
|
@ -2,10 +2,13 @@
|
||||
|
||||
<div class="full-container">
|
||||
<div class="card-header mb-0 mb-md-4">
|
||||
<span i18n="mining.block-prediction-accuracy">Block Prediction Accuracy</span>
|
||||
<button class="btn" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
<div class="d-flex d-md-block align-items-baseline">
|
||||
<span i18n="mining.block-prediction-accuracy">Block Prediction Accuracy</span>
|
||||
<button class="btn p-0 pl-2" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form [formGroup]="radioGroupForm" class="formRadioGroup" *ngIf="(statsObservable$ | async) as stats">
|
||||
<div class="btn-group btn-group-toggle" ngbRadioGroup name="radioBasic" formControlName="dateSpan">
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 144">
|
||||
|
@ -98,7 +98,21 @@ export class BlockPredictionGraphComponent implements OnInit {
|
||||
}
|
||||
|
||||
prepareChartOptions(data) {
|
||||
let title: object;
|
||||
if (data.length === 0) {
|
||||
title = {
|
||||
textStyle: {
|
||||
color: 'grey',
|
||||
fontSize: 15
|
||||
},
|
||||
text: $localize`No data to display yet. Try again later.`,
|
||||
left: 'center',
|
||||
top: 'center'
|
||||
};
|
||||
}
|
||||
|
||||
this.chartOptions = {
|
||||
title: data.length === 0 ? title : undefined,
|
||||
animation: false,
|
||||
grid: {
|
||||
top: 30,
|
||||
@ -133,17 +147,16 @@ export class BlockPredictionGraphComponent implements OnInit {
|
||||
return tooltip;
|
||||
}
|
||||
},
|
||||
xAxis: {
|
||||
xAxis: data.length === 0 ? undefined : {
|
||||
name: formatterXAxisLabel(this.locale, this.timespan),
|
||||
nameLocation: 'middle',
|
||||
nameTextStyle: {
|
||||
padding: [10, 0, 0, 0],
|
||||
},
|
||||
type: 'category',
|
||||
boundaryGap: false,
|
||||
axisLine: { onZero: true },
|
||||
axisLabel: {
|
||||
formatter: val => formatterXAxisTimeCategory(this.locale, this.timespan, parseInt(val, 10)),
|
||||
formatter: val => formatterXAxisTimeCategory(this.locale, this.timespan, parseInt(val, 10) * 1000),
|
||||
align: 'center',
|
||||
fontSize: 11,
|
||||
lineHeight: 12,
|
||||
@ -152,7 +165,7 @@ export class BlockPredictionGraphComponent implements OnInit {
|
||||
},
|
||||
data: data.map(prediction => prediction[0])
|
||||
},
|
||||
yAxis: [
|
||||
yAxis: data.length === 0 ? undefined : [
|
||||
{
|
||||
type: 'value',
|
||||
axisLabel: {
|
||||
@ -170,7 +183,7 @@ export class BlockPredictionGraphComponent implements OnInit {
|
||||
},
|
||||
},
|
||||
],
|
||||
series: [
|
||||
series: data.length === 0 ? undefined : [
|
||||
{
|
||||
zlevel: 0,
|
||||
name: $localize`Match rate`,
|
||||
@ -183,9 +196,10 @@ export class BlockPredictionGraphComponent implements OnInit {
|
||||
})),
|
||||
type: 'bar',
|
||||
barWidth: '90%',
|
||||
barMaxWidth: 50,
|
||||
},
|
||||
],
|
||||
dataZoom: [{
|
||||
dataZoom: data.length === 0 ? undefined : [{
|
||||
type: 'inside',
|
||||
realtime: true,
|
||||
zoomLock: true,
|
||||
|
@ -3,21 +3,15 @@
|
||||
<div class="full-container">
|
||||
|
||||
<div class="card-header mb-0 mb-md-4">
|
||||
<span i18n="mining.block-rewards">Block Rewards</span>
|
||||
<button class="btn" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
<div class="d-flex d-md-block align-items-baseline">
|
||||
<span i18n="mining.block-rewards">Block Rewards</span>
|
||||
<button class="btn p-0 pl-2" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form [formGroup]="radioGroupForm" class="formRadioGroup" *ngIf="(statsObservable$ | async) as stats">
|
||||
<div class="btn-group btn-group-toggle" ngbRadioGroup name="radioBasic" formControlName="dateSpan">
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 144">
|
||||
<input ngbButton type="radio" [value]="'24h'" fragment="24h" [routerLink]="['/graphs/mining/block-rewards' | relativeUrl]"> 24h
|
||||
</label>
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 432">
|
||||
<input ngbButton type="radio" [value]="'3d'" fragment="3d" [routerLink]="['/graphs/mining/block-rewards' | relativeUrl]"> 3D
|
||||
</label>
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 1008">
|
||||
<input ngbButton type="radio" [value]="'1w'" fragment="1w" [routerLink]="['/graphs/mining/block-rewards' | relativeUrl]"> 1W
|
||||
</label>
|
||||
<label ngbButtonLabel class="btn-primary btn-sm" *ngIf="stats.blockCount >= 4320">
|
||||
<input ngbButton type="radio" [value]="'1m'" fragment="1m" [routerLink]="['/graphs/mining/block-rewards' | relativeUrl]"> 1M
|
||||
</label>
|
||||
|
@ -4,12 +4,13 @@ import { Observable } from 'rxjs';
|
||||
import { map, share, startWith, switchMap, tap } from 'rxjs/operators';
|
||||
import { ApiService } from 'src/app/services/api.service';
|
||||
import { SeoService } from 'src/app/services/seo.service';
|
||||
import { formatNumber } from '@angular/common';
|
||||
import { formatCurrency, formatNumber, getCurrencySymbol } from '@angular/common';
|
||||
import { FormBuilder, FormGroup } from '@angular/forms';
|
||||
import { download, formatterXAxis, formatterXAxisLabel } from 'src/app/shared/graphs.utils';
|
||||
import { download, formatterXAxis, formatterXAxisLabel, formatterXAxisTimeCategory } from 'src/app/shared/graphs.utils';
|
||||
import { MiningService } from 'src/app/services/mining.service';
|
||||
import { StorageService } from 'src/app/services/storage.service';
|
||||
import { ActivatedRoute } from '@angular/router';
|
||||
import { FiatShortenerPipe } from 'src/app/shared/pipes/fiat-shortener.pipe';
|
||||
|
||||
@Component({
|
||||
selector: 'app-block-rewards-graph',
|
||||
@ -51,19 +52,20 @@ export class BlockRewardsGraphComponent implements OnInit {
|
||||
private miningService: MiningService,
|
||||
private storageService: StorageService,
|
||||
private route: ActivatedRoute,
|
||||
private fiatShortenerPipe: FiatShortenerPipe,
|
||||
) {
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
this.seoService.setTitle($localize`:@@8ba8fe810458280a83df7fdf4c614dfc1a826445:Block Rewards`);
|
||||
this.miningWindowPreference = this.miningService.getDefaultTimespan('24h');
|
||||
this.miningWindowPreference = this.miningService.getDefaultTimespan('3m');
|
||||
this.radioGroupForm = this.formBuilder.group({ dateSpan: this.miningWindowPreference });
|
||||
this.radioGroupForm.controls.dateSpan.setValue(this.miningWindowPreference);
|
||||
|
||||
this.route
|
||||
.fragment
|
||||
.subscribe((fragment) => {
|
||||
if (['24h', '3d', '1w', '1m', '3m', '6m', '1y', '2y', '3y', 'all'].indexOf(fragment) > -1) {
|
||||
if (['3m', '6m', '1y', '2y', '3y', 'all'].indexOf(fragment) > -1) {
|
||||
this.radioGroupForm.controls.dateSpan.setValue(fragment, { emitEvent: false });
|
||||
}
|
||||
});
|
||||
@ -80,6 +82,7 @@ export class BlockRewardsGraphComponent implements OnInit {
|
||||
tap((response) => {
|
||||
this.prepareChartOptions({
|
||||
blockRewards: response.body.map(val => [val.timestamp * 1000, val.avgRewards / 100000000, val.avgHeight]),
|
||||
blockRewardsUSD: response.body.filter(val => val.USD > 0).map(val => [val.timestamp * 1000, val.avgRewards / 100000000 * val.USD, val.avgHeight]),
|
||||
});
|
||||
this.isLoading = false;
|
||||
}),
|
||||
@ -95,15 +98,32 @@ export class BlockRewardsGraphComponent implements OnInit {
|
||||
}
|
||||
|
||||
prepareChartOptions(data) {
|
||||
let title: object;
|
||||
if (data.blockRewards.length === 0) {
|
||||
title = {
|
||||
textStyle: {
|
||||
color: 'grey',
|
||||
fontSize: 15
|
||||
},
|
||||
text: $localize`:@@23555386d8af1ff73f297e89dd4af3f4689fb9dd:Indexing blocks`,
|
||||
left: 'center',
|
||||
top: 'center'
|
||||
};
|
||||
}
|
||||
|
||||
const scaleFactor = 0.1;
|
||||
|
||||
this.chartOptions = {
|
||||
title: title,
|
||||
animation: false,
|
||||
color: [
|
||||
new graphic.LinearGradient(0, 0, 0, 0.65, [
|
||||
{ offset: 0, color: '#F4511E' },
|
||||
{ offset: 0.25, color: '#FB8C00' },
|
||||
{ offset: 0.5, color: '#FFB300' },
|
||||
{ offset: 0.75, color: '#FDD835' },
|
||||
{ offset: 1, color: '#7CB342' }
|
||||
new graphic.LinearGradient(0, 0, 0, 1, [
|
||||
{ offset: 0, color: '#FDD835' },
|
||||
{ offset: 1, color: '#FB8C00' },
|
||||
]),
|
||||
new graphic.LinearGradient(0, 0, 0, 1, [
|
||||
{ offset: 0, color: '#C0CA33' },
|
||||
{ offset: 1, color: '#1B5E20' },
|
||||
]),
|
||||
],
|
||||
grid: {
|
||||
@ -126,33 +146,55 @@ export class BlockRewardsGraphComponent implements OnInit {
|
||||
align: 'left',
|
||||
},
|
||||
borderColor: '#000',
|
||||
formatter: (ticks) => {
|
||||
let tooltip = `<b style="color: white; margin-left: 2px">${formatterXAxis(this.locale, this.timespan, parseInt(ticks[0].axisValue, 10))}</b><br>`;
|
||||
tooltip += `${ticks[0].marker} ${ticks[0].seriesName}: ${formatNumber(ticks[0].data[1], this.locale, '1.3-3')} BTC`;
|
||||
tooltip += `<br>`;
|
||||
formatter: function (data) {
|
||||
if (data.length <= 0) {
|
||||
return '';
|
||||
}
|
||||
let tooltip = `<b style="color: white; margin-left: 2px">
|
||||
${formatterXAxis(this.locale, this.timespan, parseInt(data[0].axisValue, 10))}</b><br>`;
|
||||
|
||||
if (['24h', '3d'].includes(this.timespan)) {
|
||||
tooltip += `<small>` + $localize`At block: ${ticks[0].data[2]}` + `</small>`;
|
||||
} else {
|
||||
tooltip += `<small>` + $localize`Around block: ${ticks[0].data[2]}` + `</small>`;
|
||||
for (const tick of data) {
|
||||
if (tick.seriesIndex === 0) {
|
||||
tooltip += `${tick.marker} ${tick.seriesName}: ${formatNumber(tick.data[1], this.locale, '1.3-3')} BTC<br>`;
|
||||
} else if (tick.seriesIndex === 1) {
|
||||
tooltip += `${tick.marker} ${tick.seriesName}: ${formatCurrency(tick.data[1], this.locale, getCurrencySymbol('USD', 'narrow'), 'USD', '1.0-0')}<br>`;
|
||||
}
|
||||
}
|
||||
|
||||
tooltip += `<small>* On average around block ${data[0].data[2]}</small>`;
|
||||
return tooltip;
|
||||
}
|
||||
}.bind(this)
|
||||
},
|
||||
xAxis: {
|
||||
name: formatterXAxisLabel(this.locale, this.timespan),
|
||||
nameLocation: 'middle',
|
||||
nameTextStyle: {
|
||||
padding: [10, 0, 0, 0],
|
||||
},
|
||||
xAxis: data.blockRewards.length === 0 ? undefined :
|
||||
{
|
||||
type: 'time',
|
||||
splitNumber: this.isMobile() ? 5 : 10,
|
||||
axisLabel: {
|
||||
hideOverlap: true,
|
||||
}
|
||||
},
|
||||
yAxis: [
|
||||
legend: data.blockRewards.length === 0 ? undefined : {
|
||||
data: [
|
||||
{
|
||||
name: 'Rewards BTC',
|
||||
inactiveColor: 'rgb(110, 112, 121)',
|
||||
textStyle: {
|
||||
color: 'white',
|
||||
},
|
||||
icon: 'roundRect',
|
||||
},
|
||||
{
|
||||
name: 'Rewards USD',
|
||||
inactiveColor: 'rgb(110, 112, 121)',
|
||||
textStyle: {
|
||||
color: 'white',
|
||||
},
|
||||
icon: 'roundRect',
|
||||
},
|
||||
],
|
||||
},
|
||||
yAxis: data.blockRewards.length === 0 ? undefined : [
|
||||
{
|
||||
min: value => Math.round(10 * value.min * 0.99) / 10,
|
||||
max: value => Math.round(10 * value.max * 1.01) / 10,
|
||||
type: 'value',
|
||||
axisLabel: {
|
||||
color: 'rgb(110, 112, 121)',
|
||||
@ -160,6 +202,12 @@ export class BlockRewardsGraphComponent implements OnInit {
|
||||
return `${val} BTC`;
|
||||
}
|
||||
},
|
||||
min: (value) => {
|
||||
return Math.round(value.min * (1.0 - scaleFactor) * 10) / 10;
|
||||
},
|
||||
max: (value) => {
|
||||
return Math.round(value.max * (1.0 + scaleFactor) * 10) / 10;
|
||||
},
|
||||
splitLine: {
|
||||
lineStyle: {
|
||||
type: 'dotted',
|
||||
@ -168,21 +216,56 @@ export class BlockRewardsGraphComponent implements OnInit {
|
||||
}
|
||||
},
|
||||
},
|
||||
],
|
||||
series: [
|
||||
{
|
||||
zlevel: 0,
|
||||
name: $localize`:@@12f86e6747a5ad39e62d3480ddc472b1aeab5b76:Reward`,
|
||||
showSymbol: false,
|
||||
symbol: 'none',
|
||||
data: data.blockRewards,
|
||||
type: 'line',
|
||||
lineStyle: {
|
||||
width: 2,
|
||||
min: (value) => {
|
||||
return Math.round(value.min * (1.0 - scaleFactor) * 10) / 10;
|
||||
},
|
||||
max: (value) => {
|
||||
return Math.round(value.max * (1.0 + scaleFactor) * 10) / 10;
|
||||
},
|
||||
type: 'value',
|
||||
position: 'right',
|
||||
axisLabel: {
|
||||
color: 'rgb(110, 112, 121)',
|
||||
formatter: function(val) {
|
||||
return this.fiatShortenerPipe.transform(val);
|
||||
}.bind(this)
|
||||
},
|
||||
splitLine: {
|
||||
show: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
dataZoom: [{
|
||||
series: data.blockRewards.length === 0 ? undefined : [
|
||||
{
|
||||
legendHoverLink: false,
|
||||
zlevel: 0,
|
||||
yAxisIndex: 0,
|
||||
name: 'Rewards BTC',
|
||||
data: data.blockRewards,
|
||||
type: 'line',
|
||||
smooth: 0.25,
|
||||
symbol: 'none',
|
||||
},
|
||||
{
|
||||
legendHoverLink: false,
|
||||
zlevel: 1,
|
||||
yAxisIndex: 1,
|
||||
name: 'Rewards USD',
|
||||
data: data.blockRewardsUSD,
|
||||
type: 'line',
|
||||
smooth: 0.25,
|
||||
symbol: 'none',
|
||||
lineStyle: {
|
||||
width: 2,
|
||||
opacity: 0.75,
|
||||
},
|
||||
areaStyle: {
|
||||
opacity: 0.05,
|
||||
}
|
||||
},
|
||||
],
|
||||
dataZoom: data.blockRewards.length === 0 ? undefined : [{
|
||||
type: 'inside',
|
||||
realtime: true,
|
||||
zoomLock: true,
|
||||
|
@ -1,10 +1,12 @@
|
||||
<div class="full-container">
|
||||
|
||||
<div class="card-header mb-0 mb-md-4">
|
||||
<span i18n="mining.block-sizes-weights">Block Sizes and Weights</span>
|
||||
<button class="btn" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
<div class="d-flex d-md-block align-items-baseline">
|
||||
<span i18n="mining.block-sizes-weights">Block Sizes and Weights</span>
|
||||
<button class="btn p-0 pl-2" style="margin: 0 0 4px 0px" (click)="onSaveChart()">
|
||||
<fa-icon [icon]="['fas', 'download']" [fixedWidth]="true"></fa-icon>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form [formGroup]="radioGroupForm" class="formRadioGroup" *ngIf="(blockSizesWeightsObservable$ | async) as stats">
|
||||
<div class="btn-group btn-group-toggle" ngbRadioGroup name="radioBasic" formControlName="dateSpan">
|
||||
|
@ -0,0 +1,82 @@
|
||||
<div class="box preview-box" *ngIf="!error">
|
||||
<div class="row">
|
||||
<div class="col-sm">
|
||||
<h1 class="block-title">
|
||||
<ng-template [ngIf]="blockHeight === 0"><ng-container i18n="@@2303359202781425764">Genesis</ng-container>
|
||||
<span class="next-previous-blocks">
|
||||
<a [routerLink]="['/block/' | relativeUrl, blockHash]">{{ blockHeight }}</a>
|
||||
</span>
|
||||
</ng-template>
|
||||
<ng-template [ngIf]="blockHeight" i18n="shared.block-title">Block <ng-container *ngTemplateOutlet="blockTemplateContent"></ng-container></ng-template>
|
||||
<ng-template #blockTemplateContent>
|
||||
<span class="next-previous-blocks">
|
||||
<a [routerLink]="['/block/' | relativeUrl, blockHash]">{{ blockHeight }}</a>
|
||||
</span>
|
||||
</ng-template>
|
||||
</h1>
|
||||
<table class="table table-borderless table-striped">
|
||||
<tbody>
|
||||
<!-- <tr>
|
||||
<td class="td-width" i18n="block.hash">Hash</td>
|
||||
<td>‎<a [routerLink]="['/block/' | relativeUrl, block?.id]" title="{{ block?.id }}">{{ block?.id | shortenString : 13 }}</a></td>
|
||||
</tr> -->
|
||||
<tr>
|
||||
<td i18n="block.timestamp">Timestamp</td>
|
||||
<td>
|
||||
{{ block?.timestamp * 1000 | date:'yyyy-MM-dd HH:mm' }}
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="block.weight">Weight</td>
|
||||
<td [innerHTML]="'‎' + (block?.weight | wuBytes: 2)"></td>
|
||||
</tr>
|
||||
<ng-template [ngIf]="webGlEnabled">
|
||||
<tr *ngIf="block?.extras?.medianFee != undefined">
|
||||
<td class="td-width" i18n="block.median-fee">Median fee</td>
|
||||
<td>~{{ block?.extras?.medianFee | number:'1.0-0' }} <span class="symbol" i18n="shared.sat-vbyte|sat/vB">sat/vB</span></td>
|
||||
</tr>
|
||||
<ng-template [ngIf]="fees !== undefined">
|
||||
<tr>
|
||||
<td i18n="block.total-fees|Total fees in a block">Total fees</td>
|
||||
<td *ngIf="network !== 'liquid' && network !== 'liquidtestnet'; else liquidTotalFees">
|
||||
<app-amount [satoshis]="block?.extras.totalFees" digitsInfo="1.2-3" [noFiat]="true"></app-amount>
|
||||
</td>
|
||||
<ng-template #liquidTotalFees>
|
||||
<td>
|
||||
<app-amount [satoshis]="fees * 100000000" digitsInfo="1.2-2" [noFiat]="true"></app-amount>
|
||||
</td>
|
||||
</ng-template>
|
||||
</tr>
|
||||
</ng-template>
|
||||
<tr *ngIf="network !== 'liquid' && network !== 'liquidtestnet'">
|
||||
<td i18n="block.miner">Miner</td>
|
||||
<td *ngIf="stateService.env.MINING_DASHBOARD">
|
||||
<a [attr.data-cy]="'block-details-miner-badge'" placement="bottom" [routerLink]="['/mining/pool' | relativeUrl, block?.extras.pool.slug]" class="badge"
|
||||
[class]="block?.extras.pool.name === 'Unknown' ? 'badge-secondary' : 'badge-primary'">
|
||||
{{ block?.extras.pool.name }}
|
||||
</a>
|
||||
</td>
|
||||
<td *ngIf="!stateService.env.MINING_DASHBOARD && stateService.env.BASE_MODULE === 'mempool'">
|
||||
<span [attr.data-cy]="'block-details-miner-badge'" placement="bottom" class="badge"
|
||||
[class]="block?.extras.pool.name === 'Unknown' ? 'badge-secondary' : 'badge-primary'">
|
||||
{{ block?.extras.pool.name }}
|
||||
</span>
|
||||
</td>
|
||||
</tr>
|
||||
</ng-template>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="col-sm chart-container" *ngIf="webGlEnabled">
|
||||
<app-block-overview-graph
|
||||
#blockGraph
|
||||
[isLoading]="false"
|
||||
[resolution]="75"
|
||||
[blockLimit]="stateService.blockVSize"
|
||||
[orientation]="'top'"
|
||||
[flip]="false"
|
||||
(txClickEvent)="onTxClick($event)"
|
||||
></app-block-overview-graph>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
@ -0,0 +1,23 @@
|
||||
.block-title {
|
||||
margin-bottom: 0.75em;
|
||||
font-size: 42px;
|
||||
|
||||
::ng-deep .next-previous-blocks {
|
||||
font-size: 42px;
|
||||
}
|
||||
}
|
||||
|
||||
.table {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.chart-container {
|
||||
flex-grow: 0;
|
||||
flex-shrink: 0;
|
||||
width: 420px;
|
||||
min-width: 420px;
|
||||
}
|
||||
|
||||
::ng-deep .symbol {
|
||||
font-size: 18px;
|
||||
}
|
11
frontend/src/app/components/block/block-preview.component.ts
Normal file
11
frontend/src/app/components/block/block-preview.component.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { Component } from '@angular/core';
|
||||
import { BlockComponent } from './block.component';
|
||||
|
||||
@Component({
|
||||
selector: 'app-block-preview',
|
||||
templateUrl: './block-preview.component.html',
|
||||
styleUrls: ['./block.component.scss', './block-preview.component.scss']
|
||||
})
|
||||
export class BlockPreviewComponent extends BlockComponent {
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user