Merge branch 'master' into mononaut/optimize-mempool-block-7
This commit is contained in:
@@ -160,7 +160,7 @@ npm install -g ts-node nodemon
|
||||
Then, run the watcher:
|
||||
|
||||
```
|
||||
nodemon src/index.ts --ignore cache/ --ignore pools.json
|
||||
nodemon src/index.ts --ignore cache/
|
||||
```
|
||||
|
||||
`nodemon` should be in npm's global binary folder. If needed, you can determine where that is with `npm -g bin`.
|
||||
@@ -219,6 +219,16 @@ Generate block at regular interval (every 10 seconds in this example):
|
||||
watch -n 10 "./src/bitcoin-cli -regtest -rpcport=8332 generatetoaddress 1 $address"
|
||||
```
|
||||
|
||||
### Mining pools update
|
||||
|
||||
By default, mining pools will be not automatically updated regularly (`config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING` is set to `false`).
|
||||
|
||||
To manually update your mining pools, you can use the `--update-pools` command line flag when you run the nodejs backend. For example `npm run start --update-pools`. This will trigger the mining pools update and automatically re-index appropriate blocks.
|
||||
|
||||
You can enabled the automatic mining pools update by settings `config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING` to `true` in your `mempool-config.json`.
|
||||
|
||||
When a `coinbase tag` or `coinbase address` change is detected, all blocks tagged to the `unknown` mining pools (starting from height 130635) will be deleted from the `blocks` table. Additionaly, all blocks which were tagged to the pool which has been updated will also be deleted from the `blocks` table. Of course, those blocks will be automatically reindexed.
|
||||
|
||||
### Re-index tables
|
||||
|
||||
You can manually force the nodejs backend to drop all data from a specified set of tables for future re-index. This is mostly useful for the mining dashboard and the lightning explorer.
|
||||
@@ -235,4 +245,4 @@ Feb 13 14:55:27 [63246] WARN: <lightning> Indexed data for "hashrates" tables wi
|
||||
Feb 13 14:55:32 [63246] NOTICE: <lightning> Table hashrates has been truncated
|
||||
```
|
||||
|
||||
Reference: https://github.com/mempool/mempool/pull/1269
|
||||
Reference: https://github.com/mempool/mempool/pull/1269
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
"USER_AGENT": "mempool",
|
||||
"STDOUT_LOG_MIN_PRIORITY": "debug",
|
||||
"AUTOMATIC_BLOCK_REINDEXING": false,
|
||||
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
|
||||
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json",
|
||||
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master",
|
||||
"AUDIT": false,
|
||||
"ADVANCED_GBT_AUDIT": false,
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps",
|
||||
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)",
|
||||
"start": "node --max-old-space-size=2048 dist/index.js",
|
||||
"start-production": "node --max-old-space-size=4096 dist/index.js",
|
||||
"start-production": "node --max-old-space-size=16384 dist/index.js",
|
||||
"test": "./node_modules/.bin/jest --coverage",
|
||||
"lint": "./node_modules/.bin/eslint . --ext .ts",
|
||||
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
|
||||
|
||||
@@ -3,12 +3,11 @@
|
||||
"ENABLED": true,
|
||||
"NETWORK": "__MEMPOOL_NETWORK__",
|
||||
"BACKEND": "__MEMPOOL_BACKEND__",
|
||||
"ENABLED": true,
|
||||
"BLOCKS_SUMMARIES_INDEXING": true,
|
||||
"HTTP_PORT": 1,
|
||||
"SPAWN_CLUSTER_PROCS": 2,
|
||||
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
|
||||
"AUTOMATIC_BLOCK_REINDEXING": true,
|
||||
"AUTOMATIC_BLOCK_REINDEXING": false,
|
||||
"POLL_RATE_MS": 3,
|
||||
"CACHE_DIR": "__MEMPOOL_CACHE_DIR__",
|
||||
"CLEAR_PROTECTION_MINUTES": 4,
|
||||
@@ -28,7 +27,8 @@
|
||||
"AUDIT": "__MEMPOOL_AUDIT__",
|
||||
"ADVANCED_GBT_AUDIT": "__MEMPOOL_ADVANCED_GBT_AUDIT__",
|
||||
"ADVANCED_GBT_MEMPOOL": "__MEMPOOL_ADVANCED_GBT_MEMPOOL__",
|
||||
"CPFP_INDEXING": "__MEMPOOL_CPFP_INDEXING__"
|
||||
"CPFP_INDEXING": "__MEMPOOL_CPFP_INDEXING__",
|
||||
"MAX_BLOCKS_BULK_QUERY": "__MEMPOOL_MAX_BLOCKS_BULK_QUERY__"
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "__CORE_RPC_HOST__",
|
||||
|
||||
@@ -36,11 +36,12 @@ describe('Mempool Backend Config', () => {
|
||||
USER_AGENT: 'mempool',
|
||||
STDOUT_LOG_MIN_PRIORITY: 'debug',
|
||||
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
||||
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
|
||||
AUDIT: false,
|
||||
ADVANCED_GBT_AUDIT: false,
|
||||
ADVANCED_GBT_MEMPOOL: false,
|
||||
CPFP_INDEXING: false,
|
||||
MAX_BLOCKS_BULK_QUERY: 0,
|
||||
});
|
||||
|
||||
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
|
||||
|
||||
@@ -119,7 +119,8 @@ class Audit {
|
||||
}
|
||||
|
||||
const numCensored = Object.keys(isCensored).length;
|
||||
const score = matches.length > 0 ? (matches.length / (matches.length + numCensored)) : 0;
|
||||
const numMatches = matches.length - 1; // adjust for coinbase tx
|
||||
const score = numMatches > 0 ? (numMatches / (numMatches + numCensored)) : 0;
|
||||
|
||||
return {
|
||||
censored: Object.keys(isCensored),
|
||||
|
||||
@@ -172,4 +172,35 @@ export namespace IBitcoinApi {
|
||||
}
|
||||
}
|
||||
|
||||
export interface BlockStats {
|
||||
"avgfee": number;
|
||||
"avgfeerate": number;
|
||||
"avgtxsize": number;
|
||||
"blockhash": string;
|
||||
"feerate_percentiles": [number, number, number, number, number];
|
||||
"height": number;
|
||||
"ins": number;
|
||||
"maxfee": number;
|
||||
"maxfeerate": number;
|
||||
"maxtxsize": number;
|
||||
"medianfee": number;
|
||||
"mediantime": number;
|
||||
"mediantxsize": number;
|
||||
"minfee": number;
|
||||
"minfeerate": number;
|
||||
"mintxsize": number;
|
||||
"outs": number;
|
||||
"subsidy": number;
|
||||
"swtotal_size": number;
|
||||
"swtotal_weight": number;
|
||||
"swtxs": number;
|
||||
"time": number;
|
||||
"total_out": number;
|
||||
"total_size": number;
|
||||
"total_weight": number;
|
||||
"totalfee": number;
|
||||
"txs": number;
|
||||
"utxo_increase": number;
|
||||
"utxo_size_inc": number;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
size: block.size,
|
||||
weight: block.weight,
|
||||
previousblockhash: block.previousblockhash,
|
||||
mediantime: block.mediantime,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -95,6 +95,8 @@ class BitcoinRoutes {
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from', this.getBlocksByBulk.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from/:to', this.getBlocksByBulk.bind(this))
|
||||
;
|
||||
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
@@ -215,7 +217,15 @@ class BitcoinRoutes {
|
||||
res.json(cpfpInfo);
|
||||
return;
|
||||
} else {
|
||||
const cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
|
||||
let cpfpInfo;
|
||||
if (config.DATABASE.ENABLED) {
|
||||
cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
|
||||
} else {
|
||||
res.json({
|
||||
ancestors: []
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (cpfpInfo) {
|
||||
res.json(cpfpInfo);
|
||||
return;
|
||||
@@ -402,6 +412,41 @@ class BitcoinRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlocksByBulk(req: Request, res: Response) {
|
||||
try {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) { // Liquid, Bisq - Not implemented
|
||||
return res.status(404).send(`This API is only available for Bitcoin networks`);
|
||||
}
|
||||
if (config.MEMPOOL.MAX_BLOCKS_BULK_QUERY <= 0) {
|
||||
return res.status(404).send(`This API is disabled. Set config.MEMPOOL.MAX_BLOCKS_BULK_QUERY to a positive number to enable it.`);
|
||||
}
|
||||
if (!Common.indexingEnabled()) {
|
||||
return res.status(404).send(`Indexing is required for this API`);
|
||||
}
|
||||
|
||||
const from = parseInt(req.params.from, 10);
|
||||
if (!req.params.from || from < 0) {
|
||||
return res.status(400).send(`Parameter 'from' must be a block height (integer)`);
|
||||
}
|
||||
const to = req.params.to === undefined ? await bitcoinApi.$getBlockHeightTip() : parseInt(req.params.to, 10);
|
||||
if (to < 0) {
|
||||
return res.status(400).send(`Parameter 'to' must be a block height (integer)`);
|
||||
}
|
||||
if (from > to) {
|
||||
return res.status(400).send(`Parameter 'to' must be a higher block height than 'from'`);
|
||||
}
|
||||
if ((to - from + 1) > config.MEMPOOL.MAX_BLOCKS_BULK_QUERY) {
|
||||
return res.status(400).send(`You can only query ${config.MEMPOOL.MAX_BLOCKS_BULK_QUERY} blocks at once.`);
|
||||
}
|
||||
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(await blocks.$getBlocksBetweenHeight(from, to));
|
||||
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getLegacyBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
const returnBlocks: IEsploraApi.Block[] = [];
|
||||
|
||||
@@ -88,6 +88,7 @@ export namespace IEsploraApi {
|
||||
size: number;
|
||||
weight: number;
|
||||
previousblockhash: string;
|
||||
mediantime: number;
|
||||
}
|
||||
|
||||
export interface Address {
|
||||
|
||||
@@ -2,7 +2,7 @@ import config from '../config';
|
||||
import bitcoinApi from './bitcoin/bitcoin-api-factory';
|
||||
import logger from '../logger';
|
||||
import memPool from './mempool';
|
||||
import { BlockExtended, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
|
||||
import { BlockExtended, BlockExtension, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
|
||||
import { Common } from './common';
|
||||
import diskCache from './disk-cache';
|
||||
import transactionUtils from './transaction-utils';
|
||||
@@ -13,7 +13,6 @@ import poolsRepository from '../repositories/PoolsRepository';
|
||||
import blocksRepository from '../repositories/BlocksRepository';
|
||||
import loadingIndicators from './loading-indicators';
|
||||
import BitcoinApi from './bitcoin/bitcoin-api';
|
||||
import { prepareBlock } from '../utils/blocks-utils';
|
||||
import BlocksRepository from '../repositories/BlocksRepository';
|
||||
import HashratesRepository from '../repositories/HashratesRepository';
|
||||
import indexer from '../indexer';
|
||||
@@ -25,6 +24,7 @@ import mining from './mining/mining';
|
||||
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
|
||||
import PricesRepository from '../repositories/PricesRepository';
|
||||
import priceUpdater from '../tasks/price-updater';
|
||||
import chainTips from './chain-tips';
|
||||
|
||||
class Blocks {
|
||||
private blocks: BlockExtended[] = [];
|
||||
@@ -142,7 +142,7 @@ class Blocks {
|
||||
* @param block
|
||||
* @returns BlockSummary
|
||||
*/
|
||||
private summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary {
|
||||
public summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary {
|
||||
const stripped = block.tx.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
@@ -165,33 +165,81 @@ class Blocks {
|
||||
* @returns BlockExtended
|
||||
*/
|
||||
private async $getBlockExtended(block: IEsploraApi.Block, transactions: TransactionExtended[]): Promise<BlockExtended> {
|
||||
const blockExtended: BlockExtended = Object.assign({ extras: {} }, block);
|
||||
blockExtended.extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
|
||||
blockExtended.extras.coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
|
||||
blockExtended.extras.coinbaseRaw = blockExtended.extras.coinbaseTx.vin[0].scriptsig;
|
||||
blockExtended.extras.usd = priceUpdater.latestPrices.USD;
|
||||
const coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
|
||||
|
||||
const blk: Partial<BlockExtended> = Object.assign({}, block);
|
||||
const extras: Partial<BlockExtension> = {};
|
||||
|
||||
extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
|
||||
extras.coinbaseRaw = coinbaseTx.vin[0].scriptsig;
|
||||
extras.orphans = chainTips.getOrphanedBlocksAtHeight(blk.height);
|
||||
|
||||
if (block.height === 0) {
|
||||
blockExtended.extras.medianFee = 0; // 50th percentiles
|
||||
blockExtended.extras.feeRange = [0, 0, 0, 0, 0, 0, 0];
|
||||
blockExtended.extras.totalFees = 0;
|
||||
blockExtended.extras.avgFee = 0;
|
||||
blockExtended.extras.avgFeeRate = 0;
|
||||
extras.medianFee = 0; // 50th percentiles
|
||||
extras.feeRange = [0, 0, 0, 0, 0, 0, 0];
|
||||
extras.totalFees = 0;
|
||||
extras.avgFee = 0;
|
||||
extras.avgFeeRate = 0;
|
||||
extras.utxoSetChange = 0;
|
||||
extras.avgTxSize = 0;
|
||||
extras.totalInputs = 0;
|
||||
extras.totalOutputs = 1;
|
||||
extras.totalOutputAmt = 0;
|
||||
extras.segwitTotalTxs = 0;
|
||||
extras.segwitTotalSize = 0;
|
||||
extras.segwitTotalWeight = 0;
|
||||
} else {
|
||||
const stats = await bitcoinClient.getBlockStats(block.id, [
|
||||
'feerate_percentiles', 'minfeerate', 'maxfeerate', 'totalfee', 'avgfee', 'avgfeerate'
|
||||
]);
|
||||
blockExtended.extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
|
||||
blockExtended.extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
|
||||
blockExtended.extras.totalFees = stats.totalfee;
|
||||
blockExtended.extras.avgFee = stats.avgfee;
|
||||
blockExtended.extras.avgFeeRate = stats.avgfeerate;
|
||||
const stats: IBitcoinApi.BlockStats = await bitcoinClient.getBlockStats(block.id);
|
||||
extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
|
||||
extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
|
||||
extras.totalFees = stats.totalfee;
|
||||
extras.avgFee = stats.avgfee;
|
||||
extras.avgFeeRate = stats.avgfeerate;
|
||||
extras.utxoSetChange = stats.utxo_increase;
|
||||
extras.avgTxSize = Math.round(stats.total_size / stats.txs * 100) * 0.01;
|
||||
extras.totalInputs = stats.ins;
|
||||
extras.totalOutputs = stats.outs;
|
||||
extras.totalOutputAmt = stats.total_out;
|
||||
extras.segwitTotalTxs = stats.swtxs;
|
||||
extras.segwitTotalSize = stats.swtotal_size;
|
||||
extras.segwitTotalWeight = stats.swtotal_weight;
|
||||
}
|
||||
|
||||
if (Common.blocksSummariesIndexingEnabled()) {
|
||||
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(block.id);
|
||||
if (extras.feePercentiles !== null) {
|
||||
extras.medianFeeAmt = extras.feePercentiles[3];
|
||||
}
|
||||
}
|
||||
|
||||
extras.virtualSize = block.weight / 4.0;
|
||||
if (coinbaseTx?.vout.length > 0) {
|
||||
extras.coinbaseAddress = coinbaseTx.vout[0].scriptpubkey_address ?? null;
|
||||
extras.coinbaseSignature = coinbaseTx.vout[0].scriptpubkey_asm ?? null;
|
||||
extras.coinbaseSignatureAscii = transactionUtils.hex2ascii(coinbaseTx.vin[0].scriptsig) ?? null;
|
||||
} else {
|
||||
extras.coinbaseAddress = null;
|
||||
extras.coinbaseSignature = null;
|
||||
extras.coinbaseSignatureAscii = null;
|
||||
}
|
||||
|
||||
const header = await bitcoinClient.getBlockHeader(block.id, false);
|
||||
extras.header = header;
|
||||
|
||||
const coinStatsIndex = indexer.isCoreIndexReady('coinstatsindex');
|
||||
if (coinStatsIndex !== null && coinStatsIndex.best_block_height >= block.height) {
|
||||
const txoutset = await bitcoinClient.getTxoutSetinfo('none', block.height);
|
||||
extras.utxoSetSize = txoutset.txouts,
|
||||
extras.totalInputAmt = Math.round(txoutset.block_info.prevout_spent * 100000000);
|
||||
} else {
|
||||
extras.utxoSetSize = null;
|
||||
extras.totalInputAmt = null;
|
||||
}
|
||||
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||
let pool: PoolTag;
|
||||
if (blockExtended.extras?.coinbaseTx !== undefined) {
|
||||
pool = await this.$findBlockMiner(blockExtended.extras?.coinbaseTx);
|
||||
if (coinbaseTx !== undefined) {
|
||||
pool = await this.$findBlockMiner(coinbaseTx);
|
||||
} else {
|
||||
if (config.DATABASE.ENABLED === true) {
|
||||
pool = await poolsRepository.$getUnknownPool();
|
||||
@@ -201,25 +249,27 @@ class Blocks {
|
||||
}
|
||||
|
||||
if (!pool) { // We should never have this situation in practise
|
||||
logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. ` +
|
||||
logger.warn(`Cannot assign pool to block ${blk.height} and 'unknown' pool does not exist. ` +
|
||||
`Check your "pools" table entries`);
|
||||
} else {
|
||||
blockExtended.extras.pool = {
|
||||
id: pool.id,
|
||||
extras.pool = {
|
||||
id: pool.uniqueId,
|
||||
name: pool.name,
|
||||
slug: pool.slug,
|
||||
};
|
||||
}
|
||||
|
||||
extras.matchRate = null;
|
||||
if (config.MEMPOOL.AUDIT) {
|
||||
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
|
||||
if (auditScore != null) {
|
||||
blockExtended.extras.matchRate = auditScore.matchRate;
|
||||
extras.matchRate = auditScore.matchRate;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return blockExtended;
|
||||
blk.extras = <BlockExtension>extras;
|
||||
return <BlockExtended>blk;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -245,15 +295,18 @@ class Blocks {
|
||||
} else {
|
||||
pools = poolsParser.miningPools;
|
||||
}
|
||||
|
||||
for (let i = 0; i < pools.length; ++i) {
|
||||
if (address !== undefined) {
|
||||
const addresses: string[] = JSON.parse(pools[i].addresses);
|
||||
const addresses: string[] = typeof pools[i].addresses === 'string' ?
|
||||
JSON.parse(pools[i].addresses) : pools[i].addresses;
|
||||
if (addresses.indexOf(address) !== -1) {
|
||||
return pools[i];
|
||||
}
|
||||
}
|
||||
|
||||
const regexes: string[] = JSON.parse(pools[i].regexes);
|
||||
const regexes: string[] = typeof pools[i].regexes === 'string' ?
|
||||
JSON.parse(pools[i].regexes) : pools[i].regexes;
|
||||
for (let y = 0; y < regexes.length; ++y) {
|
||||
const regex = new RegExp(regexes[y], 'i');
|
||||
const match = asciiScriptSig.match(regex);
|
||||
@@ -431,7 +484,7 @@ class Blocks {
|
||||
loadingIndicators.setProgress('block-indexing', progress, false);
|
||||
}
|
||||
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
|
||||
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
|
||||
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
|
||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
|
||||
const blockExtended = await this.$getBlockExtended(block, transactions);
|
||||
|
||||
@@ -479,13 +532,13 @@ class Blocks {
|
||||
if (blockchainInfo.blocks === blockchainInfo.headers) {
|
||||
const heightDiff = blockHeightTip % 2016;
|
||||
const blockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff);
|
||||
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
|
||||
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
|
||||
this.lastDifficultyAdjustmentTime = block.timestamp;
|
||||
this.currentDifficulty = block.difficulty;
|
||||
|
||||
if (blockHeightTip >= 2016) {
|
||||
const previousPeriodBlockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff - 2016);
|
||||
const previousPeriodBlock = await bitcoinClient.getBlock(previousPeriodBlockHash)
|
||||
const previousPeriodBlock: IEsploraApi.Block = await bitcoinApi.$getBlock(previousPeriodBlockHash);
|
||||
this.previousDifficultyRetarget = (block.difficulty - previousPeriodBlock.difficulty) / previousPeriodBlock.difficulty * 100;
|
||||
logger.debug(`Initial difficulty adjustment data set.`);
|
||||
}
|
||||
@@ -500,6 +553,7 @@ class Blocks {
|
||||
} else {
|
||||
this.currentBlockHeight++;
|
||||
logger.debug(`New block found (#${this.currentBlockHeight})!`);
|
||||
await chainTips.updateOrphanedBlocks();
|
||||
}
|
||||
|
||||
const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight);
|
||||
@@ -516,18 +570,18 @@ class Blocks {
|
||||
if (Common.indexingEnabled()) {
|
||||
if (!fastForwarded) {
|
||||
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
|
||||
if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock['hash']) {
|
||||
logger.warn(`Chain divergence detected at block ${lastBlock['height']}, re-indexing most recent data`);
|
||||
if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock.id) {
|
||||
logger.warn(`Chain divergence detected at block ${lastBlock.height}, re-indexing most recent data`);
|
||||
// We assume there won't be a reorg with more than 10 block depth
|
||||
await BlocksRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
|
||||
await BlocksRepository.$deleteBlocksFrom(lastBlock.height - 10);
|
||||
await HashratesRepository.$deleteLastEntries();
|
||||
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
|
||||
await cpfpRepository.$deleteClustersFrom(lastBlock['height'] - 10);
|
||||
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock.height - 10);
|
||||
await cpfpRepository.$deleteClustersFrom(lastBlock.height - 10);
|
||||
for (let i = 10; i >= 0; --i) {
|
||||
const newBlock = await this.$indexBlock(lastBlock['height'] - i);
|
||||
const newBlock = await this.$indexBlock(lastBlock.height - i);
|
||||
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
|
||||
if (config.MEMPOOL.CPFP_INDEXING) {
|
||||
await this.$indexCPFP(newBlock.id, lastBlock['height'] - i);
|
||||
await this.$indexCPFP(newBlock.id, lastBlock.height - i);
|
||||
}
|
||||
}
|
||||
await mining.$indexDifficultyAdjustments();
|
||||
@@ -603,12 +657,12 @@ class Blocks {
|
||||
if (Common.indexingEnabled()) {
|
||||
const dbBlock = await blocksRepository.$getBlockByHeight(height);
|
||||
if (dbBlock !== null) {
|
||||
return prepareBlock(dbBlock);
|
||||
return dbBlock;
|
||||
}
|
||||
}
|
||||
|
||||
const blockHash = await bitcoinApi.$getBlockHash(height);
|
||||
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
|
||||
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
|
||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true);
|
||||
const blockExtended = await this.$getBlockExtended(block, transactions);
|
||||
|
||||
@@ -616,11 +670,11 @@ class Blocks {
|
||||
await blocksRepository.$saveBlockInDatabase(blockExtended);
|
||||
}
|
||||
|
||||
return prepareBlock(blockExtended);
|
||||
return blockExtended;
|
||||
}
|
||||
|
||||
/**
|
||||
* Index a block by hash if it's missing from the database. Returns the block after indexing
|
||||
* Get one block by its hash
|
||||
*/
|
||||
public async $getBlock(hash: string): Promise<BlockExtended | IEsploraApi.Block> {
|
||||
// Check the memory cache
|
||||
@@ -629,31 +683,14 @@ class Blocks {
|
||||
return blockByHash;
|
||||
}
|
||||
|
||||
// Block has already been indexed
|
||||
if (Common.indexingEnabled()) {
|
||||
const dbBlock = await blocksRepository.$getBlockByHash(hash);
|
||||
if (dbBlock != null) {
|
||||
return prepareBlock(dbBlock);
|
||||
}
|
||||
}
|
||||
|
||||
// Not Bitcoin network, return the block as it
|
||||
// Not Bitcoin network, return the block as it from the bitcoin backend
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||
return await bitcoinApi.$getBlock(hash);
|
||||
}
|
||||
|
||||
let block = await bitcoinClient.getBlock(hash);
|
||||
block = prepareBlock(block);
|
||||
|
||||
// Bitcoin network, add our custom data on top
|
||||
const transactions = await this.$getTransactionsExtended(hash, block.height, true);
|
||||
const blockExtended = await this.$getBlockExtended(block, transactions);
|
||||
if (Common.indexingEnabled()) {
|
||||
delete(blockExtended['coinbaseTx']);
|
||||
await blocksRepository.$saveBlockInDatabase(blockExtended);
|
||||
}
|
||||
|
||||
return blockExtended;
|
||||
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(hash);
|
||||
return await this.$indexBlock(block.height);
|
||||
}
|
||||
|
||||
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache = false,
|
||||
@@ -687,8 +724,19 @@ class Blocks {
|
||||
return summary.transactions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get 15 blocks
|
||||
*
|
||||
* Internally this function uses two methods to get the blocks, and
|
||||
* the method is automatically selected:
|
||||
* - Using previous block hash links
|
||||
* - Using block height
|
||||
*
|
||||
* @param fromHeight
|
||||
* @param limit
|
||||
* @returns
|
||||
*/
|
||||
public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> {
|
||||
|
||||
let currentHeight = fromHeight !== undefined ? fromHeight : this.currentBlockHeight;
|
||||
if (currentHeight > this.currentBlockHeight) {
|
||||
limit -= currentHeight - this.currentBlockHeight;
|
||||
@@ -700,27 +748,15 @@ class Blocks {
|
||||
return returnBlocks;
|
||||
}
|
||||
|
||||
// Check if block height exist in local cache to skip the hash lookup
|
||||
const blockByHeight = this.getBlocks().find((b) => b.height === currentHeight);
|
||||
let startFromHash: string | null = null;
|
||||
if (blockByHeight) {
|
||||
startFromHash = blockByHeight.id;
|
||||
} else if (!Common.indexingEnabled()) {
|
||||
startFromHash = await bitcoinApi.$getBlockHash(currentHeight);
|
||||
}
|
||||
|
||||
let nextHash = startFromHash;
|
||||
for (let i = 0; i < limit && currentHeight >= 0; i++) {
|
||||
let block = this.getBlocks().find((b) => b.height === currentHeight);
|
||||
if (block) {
|
||||
// Using the memory cache (find by height)
|
||||
returnBlocks.push(block);
|
||||
} else if (Common.indexingEnabled()) {
|
||||
} else {
|
||||
// Using indexing (find by height, index on the fly, save in database)
|
||||
block = await this.$indexBlock(currentHeight);
|
||||
returnBlocks.push(block);
|
||||
} else if (nextHash != null) {
|
||||
block = await this.$indexBlock(currentHeight);
|
||||
nextHash = block.previousblockhash;
|
||||
returnBlocks.push(block);
|
||||
}
|
||||
currentHeight--;
|
||||
}
|
||||
@@ -728,6 +764,114 @@ class Blocks {
|
||||
return returnBlocks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used for bulk block data query
|
||||
*
|
||||
* @param fromHeight
|
||||
* @param toHeight
|
||||
*/
|
||||
public async $getBlocksBetweenHeight(fromHeight: number, toHeight: number): Promise<any> {
|
||||
if (!Common.indexingEnabled()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const blocks: any[] = [];
|
||||
|
||||
while (fromHeight <= toHeight) {
|
||||
let block: BlockExtended | null = await blocksRepository.$getBlockByHeight(fromHeight);
|
||||
if (!block) {
|
||||
await this.$indexBlock(fromHeight);
|
||||
block = await blocksRepository.$getBlockByHeight(fromHeight);
|
||||
if (!block) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup fields before sending the response
|
||||
const cleanBlock: any = {
|
||||
height: block.height ?? null,
|
||||
hash: block.id ?? null,
|
||||
timestamp: block.timestamp ?? null,
|
||||
median_timestamp: block.mediantime ?? null,
|
||||
previous_block_hash: block.previousblockhash ?? null,
|
||||
difficulty: block.difficulty ?? null,
|
||||
header: block.extras.header ?? null,
|
||||
version: block.version ?? null,
|
||||
bits: block.bits ?? null,
|
||||
nonce: block.nonce ?? null,
|
||||
size: block.size ?? null,
|
||||
weight: block.weight ?? null,
|
||||
tx_count: block.tx_count ?? null,
|
||||
merkle_root: block.merkle_root ?? null,
|
||||
reward: block.extras.reward ?? null,
|
||||
total_fee_amt: block.extras.totalFees ?? null,
|
||||
avg_fee_amt: block.extras.avgFee ?? null,
|
||||
median_fee_amt: block.extras.medianFeeAmt ?? null,
|
||||
fee_amt_percentiles: block.extras.feePercentiles ?? null,
|
||||
avg_fee_rate: block.extras.avgFeeRate ?? null,
|
||||
median_fee_rate: block.extras.medianFee ?? null,
|
||||
fee_rate_percentiles: block.extras.feeRange ?? null,
|
||||
total_inputs: block.extras.totalInputs ?? null,
|
||||
total_input_amt: block.extras.totalInputAmt ?? null,
|
||||
total_outputs: block.extras.totalOutputs ?? null,
|
||||
total_output_amt: block.extras.totalOutputAmt ?? null,
|
||||
segwit_total_txs: block.extras.segwitTotalTxs ?? null,
|
||||
segwit_total_size: block.extras.segwitTotalSize ?? null,
|
||||
segwit_total_weight: block.extras.segwitTotalWeight ?? null,
|
||||
avg_tx_size: block.extras.avgTxSize ?? null,
|
||||
utxoset_change: block.extras.utxoSetChange ?? null,
|
||||
utxoset_size: block.extras.utxoSetSize ?? null,
|
||||
coinbase_raw: block.extras.coinbaseRaw ?? null,
|
||||
coinbase_address: block.extras.coinbaseAddress ?? null,
|
||||
coinbase_signature: block.extras.coinbaseSignature ?? null,
|
||||
coinbase_signature_ascii: block.extras.coinbaseSignatureAscii ?? null,
|
||||
pool_slug: block.extras.pool.slug ?? null,
|
||||
pool_id: block.extras.pool.id ?? null,
|
||||
};
|
||||
|
||||
if (Common.blocksSummariesIndexingEnabled() && cleanBlock.fee_amt_percentiles === null) {
|
||||
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
|
||||
if (cleanBlock.fee_amt_percentiles === null) {
|
||||
const block = await bitcoinClient.getBlock(cleanBlock.hash, 2);
|
||||
const summary = this.summarizeBlock(block);
|
||||
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
|
||||
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
|
||||
}
|
||||
if (cleanBlock.fee_amt_percentiles !== null) {
|
||||
cleanBlock.median_fee_amt = cleanBlock.fee_amt_percentiles[3];
|
||||
}
|
||||
}
|
||||
|
||||
cleanBlock.fee_amt_percentiles = {
|
||||
'min': cleanBlock.fee_amt_percentiles[0],
|
||||
'perc_10': cleanBlock.fee_amt_percentiles[1],
|
||||
'perc_25': cleanBlock.fee_amt_percentiles[2],
|
||||
'perc_50': cleanBlock.fee_amt_percentiles[3],
|
||||
'perc_75': cleanBlock.fee_amt_percentiles[4],
|
||||
'perc_90': cleanBlock.fee_amt_percentiles[5],
|
||||
'max': cleanBlock.fee_amt_percentiles[6],
|
||||
};
|
||||
cleanBlock.fee_rate_percentiles = {
|
||||
'min': cleanBlock.fee_rate_percentiles[0],
|
||||
'perc_10': cleanBlock.fee_rate_percentiles[1],
|
||||
'perc_25': cleanBlock.fee_rate_percentiles[2],
|
||||
'perc_50': cleanBlock.fee_rate_percentiles[3],
|
||||
'perc_75': cleanBlock.fee_rate_percentiles[4],
|
||||
'perc_90': cleanBlock.fee_rate_percentiles[5],
|
||||
'max': cleanBlock.fee_rate_percentiles[6],
|
||||
};
|
||||
|
||||
// Re-org can happen after indexing so we need to always get the
|
||||
// latest state from core
|
||||
cleanBlock.orphans = chainTips.getOrphanedBlocksAtHeight(cleanBlock.height);
|
||||
|
||||
blocks.push(cleanBlock);
|
||||
fromHeight++;
|
||||
}
|
||||
|
||||
return blocks;
|
||||
}
|
||||
|
||||
public async $getBlockAuditSummary(hash: string): Promise<any> {
|
||||
let summary;
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||
|
||||
61
backend/src/api/chain-tips.ts
Normal file
61
backend/src/api/chain-tips.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import logger from '../logger';
|
||||
import bitcoinClient from './bitcoin/bitcoin-client';
|
||||
|
||||
export interface ChainTip {
|
||||
height: number;
|
||||
hash: string;
|
||||
branchlen: number;
|
||||
status: 'invalid' | 'active' | 'valid-fork' | 'valid-headers' | 'headers-only';
|
||||
};
|
||||
|
||||
export interface OrphanedBlock {
|
||||
height: number;
|
||||
hash: string;
|
||||
status: 'valid-fork' | 'valid-headers' | 'headers-only';
|
||||
}
|
||||
|
||||
class ChainTips {
|
||||
private chainTips: ChainTip[] = [];
|
||||
private orphanedBlocks: OrphanedBlock[] = [];
|
||||
|
||||
public async updateOrphanedBlocks(): Promise<void> {
|
||||
try {
|
||||
this.chainTips = await bitcoinClient.getChainTips();
|
||||
this.orphanedBlocks = [];
|
||||
|
||||
for (const chain of this.chainTips) {
|
||||
if (chain.status === 'valid-fork' || chain.status === 'valid-headers') {
|
||||
let block = await bitcoinClient.getBlock(chain.hash);
|
||||
while (block && block.confirmations === -1) {
|
||||
this.orphanedBlocks.push({
|
||||
height: block.height,
|
||||
hash: block.hash,
|
||||
status: chain.status
|
||||
});
|
||||
block = await bitcoinClient.getBlock(block.previousblockhash);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`Updated orphaned blocks cache. Found ${this.orphanedBlocks.length} orphaned blocks`);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get fetch orphaned blocks. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
}
|
||||
|
||||
public getOrphanedBlocksAtHeight(height: number | undefined): OrphanedBlock[] {
|
||||
if (height === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const orphans: OrphanedBlock[] = [];
|
||||
for (const block of this.orphanedBlocks) {
|
||||
if (block.height === height) {
|
||||
orphans.push(block);
|
||||
}
|
||||
}
|
||||
return orphans;
|
||||
}
|
||||
}
|
||||
|
||||
export default new ChainTips();
|
||||
@@ -237,14 +237,21 @@ export class Common {
|
||||
].join('x');
|
||||
}
|
||||
|
||||
static utcDateToMysql(date?: number): string {
|
||||
static utcDateToMysql(date?: number | null): string | null {
|
||||
if (date === null) {
|
||||
return null;
|
||||
}
|
||||
const d = new Date((date || 0) * 1000);
|
||||
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
|
||||
}
|
||||
|
||||
static findSocketNetwork(addr: string): {network: string | null, url: string} {
|
||||
let network: string | null = null;
|
||||
let url = addr.split('://')[1];
|
||||
let url: string = addr;
|
||||
|
||||
if (config.LIGHTNING.BACKEND === 'cln') {
|
||||
url = addr.split('://')[1];
|
||||
}
|
||||
|
||||
if (!url) {
|
||||
return {
|
||||
@@ -261,7 +268,7 @@ export class Common {
|
||||
}
|
||||
} else if (addr.indexOf('i2p') !== -1) {
|
||||
network = 'i2p';
|
||||
} else if (addr.indexOf('ipv4') !== -1) {
|
||||
} else if (addr.indexOf('ipv4') !== -1 || (config.LIGHTNING.BACKEND === 'lnd' && isIP(url.split(':')[0]) === 4)) {
|
||||
const ipv = isIP(url.split(':')[0]);
|
||||
if (ipv === 4) {
|
||||
network = 'ipv4';
|
||||
@@ -271,7 +278,7 @@ export class Common {
|
||||
url: addr,
|
||||
};
|
||||
}
|
||||
} else if (addr.indexOf('ipv6') !== -1) {
|
||||
} else if (addr.indexOf('ipv6') !== -1 || (config.LIGHTNING.BACKEND === 'lnd' && url.indexOf(']:'))) {
|
||||
url = url.split('[')[1].split(']')[0];
|
||||
const ipv = isIP(url);
|
||||
if (ipv === 6) {
|
||||
|
||||
@@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
|
||||
import { RowDataPacket } from 'mysql2';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 52;
|
||||
private static currentVersion = 57;
|
||||
private queryTimeout = 3600_000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
@@ -62,8 +62,8 @@ class DatabaseMigration {
|
||||
|
||||
if (databaseSchemaVersion <= 2) {
|
||||
// Disable some spam logs when they're not relevant
|
||||
this.uniqueLogs.push(this.blocksTruncatedMessage);
|
||||
this.uniqueLogs.push(this.hashratesTruncatedMessage);
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
}
|
||||
|
||||
logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
|
||||
@@ -86,7 +86,7 @@ class DatabaseMigration {
|
||||
try {
|
||||
await this.$migrateTableSchemaFromVersion(databaseSchemaVersion);
|
||||
if (databaseSchemaVersion === 0) {
|
||||
logger.notice(`MIGRATIONS: OK. Database schema has been properly initialized to version ${DatabaseMigration.currentVersion} (latest version)`);
|
||||
logger.notice(`MIGRATIONS: OK. Database schema has been properly initialized to version ${DatabaseMigration.currentVersion} (latest version)`);
|
||||
} else {
|
||||
logger.notice(`MIGRATIONS: OK. Database schema have been migrated from version ${databaseSchemaVersion} to ${DatabaseMigration.currentVersion} (latest version)`);
|
||||
}
|
||||
@@ -300,7 +300,7 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(27);
|
||||
}
|
||||
|
||||
|
||||
if (databaseSchemaVersion < 28 && isBitcoin === true) {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.uniqueLog(logger.notice, `'lightning_stats' and 'node_stats' tables have been truncated.`);
|
||||
@@ -464,10 +464,47 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `transactions`');
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `cpfp_clusters`');
|
||||
await this.updateToSchemaVersion(52);
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
logger.warn('' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 53) {
|
||||
await this.$executeQuery('ALTER TABLE statistics MODIFY mempool_byte_weight bigint(20) UNSIGNED NOT NULL');
|
||||
await this.updateToSchemaVersion(53);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 54) {
|
||||
this.uniqueLog(logger.notice, `'prices' table has been truncated`);
|
||||
await this.$executeQuery(`TRUNCATE prices`);
|
||||
if (isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, `'blocks_prices' table has been truncated`);
|
||||
await this.$executeQuery(`TRUNCATE blocks_prices`);
|
||||
}
|
||||
await this.updateToSchemaVersion(54);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 55) {
|
||||
await this.$executeQuery(this.getAdditionalBlocksDataQuery());
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.updateToSchemaVersion(55);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 56) {
|
||||
await this.$executeQuery('ALTER TABLE pools ADD unique_id int NOT NULL DEFAULT -1');
|
||||
await this.$executeQuery('TRUNCATE TABLE `blocks`');
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('DELETE FROM `pools`');
|
||||
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
|
||||
this.uniqueLog(logger.notice, '`pools` table has been truncated`');
|
||||
await this.updateToSchemaVersion(56);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 57 && isBitcoin === true) {
|
||||
await this.$executeQuery(`ALTER TABLE nodes MODIFY updated_at datetime NULL`);
|
||||
await this.updateToSchemaVersion(57);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -591,7 +628,7 @@ class DatabaseMigration {
|
||||
queries.push(`INSERT INTO state(name, number, string) VALUES ('last_hashrates_indexing', 0, NULL)`);
|
||||
}
|
||||
|
||||
if (version < 9 && isBitcoin === true) {
|
||||
if (version < 9 && isBitcoin === true) {
|
||||
queries.push(`INSERT INTO state(name, number, string) VALUES ('last_weekly_hashrates_indexing', 0, NULL)`);
|
||||
}
|
||||
|
||||
@@ -741,6 +778,28 @@ class DatabaseMigration {
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getAdditionalBlocksDataQuery(): string {
|
||||
return `ALTER TABLE blocks
|
||||
ADD median_timestamp timestamp NOT NULL,
|
||||
ADD coinbase_address varchar(100) NULL,
|
||||
ADD coinbase_signature varchar(500) NULL,
|
||||
ADD coinbase_signature_ascii varchar(500) NULL,
|
||||
ADD avg_tx_size double unsigned NOT NULL,
|
||||
ADD total_inputs int unsigned NOT NULL,
|
||||
ADD total_outputs int unsigned NOT NULL,
|
||||
ADD total_output_amt bigint unsigned NOT NULL,
|
||||
ADD fee_percentiles longtext NULL,
|
||||
ADD median_fee_amt int unsigned NULL,
|
||||
ADD segwit_total_txs int unsigned NOT NULL,
|
||||
ADD segwit_total_size int unsigned NOT NULL,
|
||||
ADD segwit_total_weight int unsigned NOT NULL,
|
||||
ADD header varchar(160) NOT NULL,
|
||||
ADD utxoset_change int NOT NULL,
|
||||
ADD utxoset_size int unsigned NULL,
|
||||
ADD total_input_amt bigint unsigned NULL
|
||||
`;
|
||||
}
|
||||
|
||||
private getCreateDailyStatsTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS hashrates (
|
||||
hashrate_timestamp timestamp NOT NULL,
|
||||
@@ -958,26 +1017,16 @@ class DatabaseMigration {
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
public async $truncateIndexedData(tables: string[]) {
|
||||
const allowedTables = ['blocks', 'hashrates', 'prices'];
|
||||
public async $blocksReindexingTruncate(): Promise<void> {
|
||||
logger.warn(`Truncating pools, blocks and hashrates for re-indexing (using '--reindex-blocks'). You can cancel this command within 5 seconds`);
|
||||
await Common.sleep$(5000);
|
||||
|
||||
try {
|
||||
for (const table of tables) {
|
||||
if (!allowedTables.includes(table)) {
|
||||
logger.debug(`Table ${table} cannot to be re-indexed (not allowed)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
await this.$executeQuery(`TRUNCATE ${table}`, true);
|
||||
if (table === 'hashrates') {
|
||||
await this.$executeQuery('UPDATE state set number = 0 where name = "last_hashrates_indexing"', true);
|
||||
}
|
||||
logger.notice(`Table ${table} has been truncated`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn(`Unable to erase indexed data`);
|
||||
}
|
||||
}
|
||||
await this.$executeQuery(`TRUNCATE blocks`);
|
||||
await this.$executeQuery(`TRUNCATE hashrates`);
|
||||
await this.$executeQuery('DELETE FROM `pools`');
|
||||
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
|
||||
await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`);
|
||||
}
|
||||
|
||||
private async $convertCompactCpfpTables(): Promise<void> {
|
||||
try {
|
||||
|
||||
@@ -9,7 +9,7 @@ import { TransactionExtended } from '../mempool.interfaces';
|
||||
import { Common } from './common';
|
||||
|
||||
class DiskCache {
|
||||
private cacheSchemaVersion = 1;
|
||||
private cacheSchemaVersion = 3;
|
||||
|
||||
private static FILE_NAME = config.MEMPOOL.CACHE_DIR + '/cache.json';
|
||||
private static FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/cache{number}.json';
|
||||
@@ -62,9 +62,24 @@ class DiskCache {
|
||||
}
|
||||
|
||||
wipeCache() {
|
||||
fs.unlinkSync(DiskCache.FILE_NAME);
|
||||
logger.notice(`Wipping nodejs backend cache/cache*.json files`);
|
||||
try {
|
||||
fs.unlinkSync(DiskCache.FILE_NAME);
|
||||
} catch (e: any) {
|
||||
if (e?.code !== 'ENOENT') {
|
||||
logger.err(`Cannot wipe cache file ${DiskCache.FILE_NAME}. Exception ${JSON.stringify(e)}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
fs.unlinkSync(DiskCache.FILE_NAMES.replace('{number}', i.toString()));
|
||||
const filename = DiskCache.FILE_NAMES.replace('{number}', i.toString());
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e: any) {
|
||||
if (e?.code !== 'ENOENT') {
|
||||
logger.err(`Cannot wipe cache file ${filename}. Exception ${JSON.stringify(e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -559,6 +559,17 @@ class ChannelsApi {
|
||||
const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
|
||||
const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
|
||||
|
||||
// https://github.com/mempool/mempool/issues/3006
|
||||
if ((channel.last_update ?? 0) < 1514736061) { // January 1st 2018
|
||||
channel.last_update = null;
|
||||
}
|
||||
if ((policy1.last_update ?? 0) < 1514736061) { // January 1st 2018
|
||||
policy1.last_update = null;
|
||||
}
|
||||
if ((policy2.last_update ?? 0) < 1514736061) { // January 1st 2018
|
||||
policy2.last_update = null;
|
||||
}
|
||||
|
||||
const query = `INSERT INTO channels
|
||||
(
|
||||
id,
|
||||
|
||||
@@ -228,7 +228,7 @@ class NodesApi {
|
||||
nodes.capacity
|
||||
FROM nodes
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 100
|
||||
LIMIT 6
|
||||
`;
|
||||
|
||||
[rows] = await DB.query(query);
|
||||
@@ -269,14 +269,26 @@ class NodesApi {
|
||||
let query: string;
|
||||
if (full === false) {
|
||||
query = `
|
||||
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
nodes.channels
|
||||
SELECT
|
||||
nodes.public_key as publicKey,
|
||||
IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
nodes.channels,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
|
||||
ORDER BY channels DESC
|
||||
LIMIT 100;
|
||||
LIMIT 6;
|
||||
`;
|
||||
|
||||
[rows] = await DB.query(query);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
}
|
||||
} else {
|
||||
query = `
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
@@ -362,7 +374,13 @@ class NodesApi {
|
||||
public async $searchNodeByPublicKeyOrAlias(search: string) {
|
||||
try {
|
||||
const publicKeySearch = search.replace('%', '') + '%';
|
||||
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
|
||||
const aliasSearch = search
|
||||
.replace(/[-_.]/g, ' ') // Replace all -_. characters with empty space. Eg: "ln.nicehash" becomes "ln nicehash".
|
||||
.replace(/[^a-zA-Z0-9 ]/g, '') // Remove all special characters and keep just A to Z, 0 to 9.
|
||||
.split(' ')
|
||||
.filter(key => key.length)
|
||||
.map((search) => '+' + search + '*').join(' ');
|
||||
// %keyword% is wildcard search and can't be indexed so it's slower as the node database grow. keyword% can be indexed but then you can't search for "Nicehash" and get result for ln.nicehash.com. So we use fulltext index for words "ln, nicehash, com" and nicehash* will find it instantly.
|
||||
const query = `SELECT public_key, alias, capacity, channels, status FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]);
|
||||
return rows;
|
||||
@@ -624,6 +642,11 @@ class NodesApi {
|
||||
*/
|
||||
public async $saveNode(node: ILightningApi.Node): Promise<void> {
|
||||
try {
|
||||
// https://github.com/mempool/mempool/issues/3006
|
||||
if ((node.last_update ?? 0) < 1514736061) { // January 1st 2018
|
||||
node.last_update = null;
|
||||
}
|
||||
|
||||
const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
|
||||
const query = `INSERT INTO nodes(
|
||||
public_key,
|
||||
|
||||
@@ -21,7 +21,7 @@ export namespace ILightningApi {
|
||||
export interface Channel {
|
||||
channel_id: string;
|
||||
chan_point: string;
|
||||
last_update: number;
|
||||
last_update: number | null;
|
||||
node1_pub: string;
|
||||
node2_pub: string;
|
||||
capacity: string;
|
||||
@@ -36,11 +36,11 @@ export namespace ILightningApi {
|
||||
fee_rate_milli_msat: string;
|
||||
disabled: boolean;
|
||||
max_htlc_msat: string;
|
||||
last_update: number;
|
||||
last_update: number | null;
|
||||
}
|
||||
|
||||
export interface Node {
|
||||
last_update: number;
|
||||
last_update: number | null;
|
||||
pub_key: string;
|
||||
alias: string;
|
||||
addresses: {
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from "../../config";
|
||||
import logger from '../../logger';
|
||||
import audits from '../audit';
|
||||
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import BlocksRepository from '../../repositories/BlocksRepository';
|
||||
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
|
||||
import HashratesRepository from '../../repositories/HashratesRepository';
|
||||
import bitcoinClient from '../bitcoin/bitcoin-client';
|
||||
import mining from "./mining";
|
||||
import PricesRepository from '../../repositories/PricesRepository';
|
||||
|
||||
class MiningRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
@@ -32,9 +32,27 @@ class MiningRoutes {
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/timestamp/:timestamp', this.$getHeightFromTimestamp)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'historical-price', this.$getHistoricalPrice)
|
||||
;
|
||||
}
|
||||
|
||||
private async $getHistoricalPrice(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
if (req.query.timestamp) {
|
||||
res.status(200).send(await PricesRepository.$getNearestHistoricalPrice(
|
||||
parseInt(<string>req.query.timestamp ?? 0, 10)
|
||||
));
|
||||
} else {
|
||||
res.status(200).send(await PricesRepository.$getHistoricalPrices());
|
||||
}
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPool(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const stats = await mining.$getPoolStat(req.params.slug);
|
||||
|
||||
@@ -11,6 +11,8 @@ import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjust
|
||||
import config from '../../config';
|
||||
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import PricesRepository from '../../repositories/PricesRepository';
|
||||
import bitcoinApiFactory from '../bitcoin/bitcoin-api-factory';
|
||||
import { IEsploraApi } from '../bitcoin/esplora-api.interface';
|
||||
|
||||
class Mining {
|
||||
blocksPriceIndexingRunning = false;
|
||||
@@ -100,6 +102,7 @@ class Mining {
|
||||
rank: rank++,
|
||||
emptyBlocks: emptyBlocksCount.length > 0 ? emptyBlocksCount[0]['count'] : 0,
|
||||
slug: poolInfo.slug,
|
||||
avgMatchRate: poolInfo.avgMatchRate !== null ? Math.round(100 * poolInfo.avgMatchRate) / 100 : null,
|
||||
};
|
||||
poolsStats.push(poolStat);
|
||||
});
|
||||
@@ -171,7 +174,7 @@ class Mining {
|
||||
}
|
||||
|
||||
/**
|
||||
* [INDEXING] Generate weekly mining pool hashrate history
|
||||
* Generate weekly mining pool hashrate history
|
||||
*/
|
||||
public async $generatePoolHashrateHistory(): Promise<void> {
|
||||
const now = new Date();
|
||||
@@ -188,8 +191,8 @@ class Mining {
|
||||
try {
|
||||
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
|
||||
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisTimestamp = genesisBlock.time * 1000;
|
||||
const genesisBlock: IEsploraApi.Block = await bitcoinApiFactory.$getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisTimestamp = genesisBlock.timestamp * 1000;
|
||||
|
||||
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
|
||||
const hashrates: any[] = [];
|
||||
@@ -278,7 +281,7 @@ class Mining {
|
||||
}
|
||||
|
||||
/**
|
||||
* [INDEXING] Generate daily hashrate data
|
||||
* Generate daily hashrate data
|
||||
*/
|
||||
public async $generateNetworkHashrateHistory(): Promise<void> {
|
||||
// We only run this once a day around midnight
|
||||
@@ -291,8 +294,8 @@ class Mining {
|
||||
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
|
||||
|
||||
try {
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisTimestamp = genesisBlock.time * 1000;
|
||||
const genesisBlock: IEsploraApi.Block = await bitcoinApiFactory.$getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisTimestamp = genesisBlock.timestamp * 1000;
|
||||
const indexedTimestamp = (await HashratesRepository.$getRawNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
||||
const lastMidnight = this.getDateMidnight(new Date());
|
||||
let toTimestamp = Math.round(lastMidnight.getTime());
|
||||
@@ -393,13 +396,13 @@ class Mining {
|
||||
}
|
||||
|
||||
const blocks: any = await BlocksRepository.$getBlocksDifficulty();
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisBlock: IEsploraApi.Block = await bitcoinApiFactory.$getBlock(await bitcoinClient.getBlockHash(0));
|
||||
let currentDifficulty = genesisBlock.difficulty;
|
||||
let totalIndexed = 0;
|
||||
|
||||
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && indexedHeights[0] !== true) {
|
||||
await DifficultyAdjustmentsRepository.$saveAdjustments({
|
||||
time: genesisBlock.time,
|
||||
time: genesisBlock.timestamp,
|
||||
height: 0,
|
||||
difficulty: currentDifficulty,
|
||||
adjustment: 0.0,
|
||||
@@ -458,7 +461,7 @@ class Mining {
|
||||
/**
|
||||
* Create a link between blocks and the latest price at when they were mined
|
||||
*/
|
||||
public async $indexBlockPrices() {
|
||||
public async $indexBlockPrices(): Promise<void> {
|
||||
if (this.blocksPriceIndexingRunning === true) {
|
||||
return;
|
||||
}
|
||||
@@ -519,6 +522,41 @@ class Mining {
|
||||
this.blocksPriceIndexingRunning = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Index core coinstatsindex
|
||||
*/
|
||||
public async $indexCoinStatsIndex(): Promise<void> {
|
||||
let timer = new Date().getTime() / 1000;
|
||||
let totalIndexed = 0;
|
||||
|
||||
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
|
||||
let currentBlockHeight = blockchainInfo.blocks;
|
||||
|
||||
while (currentBlockHeight > 0) {
|
||||
const indexedBlocks = await BlocksRepository.$getBlocksMissingCoinStatsIndex(
|
||||
currentBlockHeight, currentBlockHeight - 10000);
|
||||
|
||||
for (const block of indexedBlocks) {
|
||||
const txoutset = await bitcoinClient.getTxoutSetinfo('none', block.height);
|
||||
await BlocksRepository.$updateCoinStatsIndexData(block.hash, txoutset.txouts,
|
||||
Math.round(txoutset.block_info.prevout_spent * 100000000));
|
||||
++totalIndexed;
|
||||
|
||||
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
|
||||
if (elapsedSeconds > 5) {
|
||||
logger.info(`Indexing coinstatsindex data for block #${block.height}. Indexed ${totalIndexed} blocks.`, logger.tags.mining);
|
||||
timer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
currentBlockHeight -= 10000;
|
||||
}
|
||||
|
||||
if (totalIndexed) {
|
||||
logger.info(`Indexing missing coinstatsindex data completed`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
|
||||
private getDateMidnight(date: Date): Date {
|
||||
date.setUTCHours(0);
|
||||
date.setUTCMinutes(0);
|
||||
|
||||
@@ -1,289 +1,161 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import config from '../config';
|
||||
import BlocksRepository from '../repositories/BlocksRepository';
|
||||
|
||||
interface Pool {
|
||||
name: string;
|
||||
link: string;
|
||||
regexes: string[];
|
||||
addresses: string[];
|
||||
slug: string;
|
||||
}
|
||||
import PoolsRepository from '../repositories/PoolsRepository';
|
||||
import { PoolTag } from '../mempool.interfaces';
|
||||
import diskCache from './disk-cache';
|
||||
|
||||
class PoolsParser {
|
||||
miningPools: any[] = [];
|
||||
unknownPool: any = {
|
||||
'id': 0,
|
||||
'name': 'Unknown',
|
||||
'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction',
|
||||
'regexes': '[]',
|
||||
'addresses': '[]',
|
||||
'slug': 'unknown'
|
||||
};
|
||||
slugWarnFlag = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
|
||||
private uniqueLog(loggerFunction: any, msg: string): void {
|
||||
if (this.uniqueLogs.includes(msg)) {
|
||||
return;
|
||||
}
|
||||
this.uniqueLogs.push(msg);
|
||||
loggerFunction(msg);
|
||||
}
|
||||
|
||||
public setMiningPools(pools): void {
|
||||
for (const pool of pools) {
|
||||
pool.regexes = pool.tags;
|
||||
pool.slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
|
||||
delete(pool.tags);
|
||||
}
|
||||
this.miningPools = pools;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the pools.json file, consolidate the data and dump it into the database
|
||||
* Populate our db with updated mining pool definition
|
||||
* @param pools
|
||||
*/
|
||||
public async migratePoolsJson(poolsJson: object): Promise<void> {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||
return;
|
||||
}
|
||||
public async migratePoolsJson(): Promise<void> {
|
||||
// We also need to wipe the backend cache to make sure we don't serve blocks with
|
||||
// the wrong mining pool (usually happen with unknown blocks)
|
||||
diskCache.wipeCache();
|
||||
|
||||
// First we save every entries without paying attention to pool duplication
|
||||
const poolsDuplicated: Pool[] = [];
|
||||
await this.$insertUnknownPool();
|
||||
|
||||
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
|
||||
for (let i = 0; i < coinbaseTags.length; ++i) {
|
||||
poolsDuplicated.push({
|
||||
'name': (<Pool>coinbaseTags[i][1]).name,
|
||||
'link': (<Pool>coinbaseTags[i][1]).link,
|
||||
'regexes': [coinbaseTags[i][0]],
|
||||
'addresses': [],
|
||||
'slug': ''
|
||||
});
|
||||
}
|
||||
const addressesTags = Object.entries(poolsJson['payout_addresses']);
|
||||
for (let i = 0; i < addressesTags.length; ++i) {
|
||||
poolsDuplicated.push({
|
||||
'name': (<Pool>addressesTags[i][1]).name,
|
||||
'link': (<Pool>addressesTags[i][1]).link,
|
||||
'regexes': [],
|
||||
'addresses': [addressesTags[i][0]],
|
||||
'slug': ''
|
||||
});
|
||||
}
|
||||
|
||||
// Then, we find unique mining pool names
|
||||
const poolNames: string[] = [];
|
||||
for (let i = 0; i < poolsDuplicated.length; ++i) {
|
||||
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
|
||||
poolNames.push(poolsDuplicated[i].name);
|
||||
for (const pool of this.miningPools) {
|
||||
if (!pool.id) {
|
||||
logger.info(`Mining pool ${pool.name} has no unique 'id' defined. Skipping.`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
logger.debug(`Found ${poolNames.length} unique mining pools`, logger.tags.mining);
|
||||
|
||||
// Get existing pools from the db
|
||||
let existingPools;
|
||||
try {
|
||||
if (config.DATABASE.ENABLED === true) {
|
||||
[existingPools] = await DB.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
|
||||
const poolDB = await PoolsRepository.$getPoolByUniqueId(pool.id, false);
|
||||
if (!poolDB) {
|
||||
// New mining pool
|
||||
const slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
|
||||
logger.debug(`Inserting new mining pool ${pool.name}`);
|
||||
await PoolsRepository.$insertNewMiningPool(pool, slug);
|
||||
await this.$deleteUnknownBlocks();
|
||||
} else {
|
||||
existingPools = [];
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Cannot get existing pools from the database, skipping pools.json import', logger.tags.mining);
|
||||
return;
|
||||
}
|
||||
|
||||
this.miningPools = [];
|
||||
|
||||
// Finally, we generate the final consolidated pools data
|
||||
const finalPoolDataAdd: Pool[] = [];
|
||||
const finalPoolDataUpdate: Pool[] = [];
|
||||
const finalPoolDataRename: Pool[] = [];
|
||||
for (let i = 0; i < poolNames.length; ++i) {
|
||||
let allAddresses: string[] = [];
|
||||
let allRegexes: string[] = [];
|
||||
const match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
|
||||
|
||||
for (let y = 0; y < match.length; ++y) {
|
||||
allAddresses = allAddresses.concat(match[y].addresses);
|
||||
allRegexes = allRegexes.concat(match[y].regexes);
|
||||
}
|
||||
|
||||
const finalPoolName = poolNames[i].replace(`'`, `''`); // To support single quote in names when doing db queries
|
||||
|
||||
let slug: string | undefined;
|
||||
try {
|
||||
slug = poolsJson['slugs'][poolNames[i]];
|
||||
} catch (e) {
|
||||
if (this.slugWarnFlag === false) {
|
||||
logger.warn(`pools.json does not seem to contain the 'slugs' object`, logger.tags.mining);
|
||||
this.slugWarnFlag = true;
|
||||
if (poolDB.name !== pool.name) {
|
||||
// Pool has been renamed
|
||||
const newSlug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
|
||||
logger.warn(`Renaming ${poolDB.name} mining pool to ${pool.name}. Slug has been updated. Maybe you want to make a redirection from 'https://mempool.space/mining/pool/${poolDB.slug}' to 'https://mempool.space/mining/pool/${newSlug}`);
|
||||
await PoolsRepository.$renameMiningPool(poolDB.id, newSlug, pool.name);
|
||||
}
|
||||
}
|
||||
|
||||
if (slug === undefined) {
|
||||
// Only keep alphanumerical
|
||||
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
|
||||
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`, logger.tags.mining);
|
||||
}
|
||||
|
||||
const poolObj = {
|
||||
'name': finalPoolName,
|
||||
'link': match[0].link,
|
||||
'regexes': allRegexes,
|
||||
'addresses': allAddresses,
|
||||
'slug': slug
|
||||
};
|
||||
|
||||
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
|
||||
if (existingPool !== undefined) {
|
||||
// Check if any data was actually updated
|
||||
const equals = (a, b) =>
|
||||
a.length === b.length &&
|
||||
a.every((v, i) => v === b[i]);
|
||||
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
|
||||
finalPoolDataUpdate.push(poolObj);
|
||||
if (poolDB.link !== pool.link) {
|
||||
// Pool link has changed
|
||||
logger.debug(`Updating link for ${pool.name} mining pool`);
|
||||
await PoolsRepository.$updateMiningPoolLink(poolDB.id, pool.link);
|
||||
}
|
||||
} else if (config.DATABASE.ENABLED) {
|
||||
// Double check that if we're not just renaming a pool (same address same regex)
|
||||
const [poolToRename]: any[] = await DB.query(`
|
||||
SELECT * FROM pools
|
||||
WHERE addresses = ? OR regexes = ?`,
|
||||
[JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)]
|
||||
);
|
||||
if (poolToRename && poolToRename.length > 0) {
|
||||
// We're actually renaming an existing pool
|
||||
finalPoolDataRename.push({
|
||||
'name': poolObj.name,
|
||||
'link': poolObj.link,
|
||||
'regexes': allRegexes,
|
||||
'addresses': allAddresses,
|
||||
'slug': slug
|
||||
});
|
||||
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`, logger.tags.mining);
|
||||
finalPoolDataAdd.push(poolObj);
|
||||
if (JSON.stringify(pool.addresses) !== poolDB.addresses ||
|
||||
JSON.stringify(pool.regexes) !== poolDB.regexes) {
|
||||
// Pool addresses changed or coinbase tags changed
|
||||
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool. If 'AUTOMATIC_BLOCK_REINDEXING' is enabled, we will re-index its blocks and 'unknown' blocks`);
|
||||
await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes);
|
||||
await this.$deleteBlocksForPool(poolDB);
|
||||
}
|
||||
}
|
||||
|
||||
this.miningPools.push({
|
||||
'name': finalPoolName,
|
||||
'link': match[0].link,
|
||||
'regexes': JSON.stringify(allRegexes),
|
||||
'addresses': JSON.stringify(allAddresses),
|
||||
'slug': slug
|
||||
});
|
||||
}
|
||||
|
||||
if (config.DATABASE.ENABLED === false) { // Don't run db operations
|
||||
logger.info('Mining pools.json import completed (no database)', logger.tags.mining);
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
|
||||
finalPoolDataRename.length > 0
|
||||
) {
|
||||
logger.debug(`Update pools table now`, logger.tags.mining);
|
||||
|
||||
// Add new mining pools into the database
|
||||
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
|
||||
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
|
||||
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
|
||||
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
|
||||
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
|
||||
}
|
||||
queryAdd = queryAdd.slice(0, -1) + ';';
|
||||
|
||||
// Updated existing mining pools in the database
|
||||
const updateQueries: string[] = [];
|
||||
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
|
||||
updateQueries.push(`
|
||||
UPDATE pools
|
||||
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
|
||||
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
|
||||
slug='${finalPoolDataUpdate[i].slug}'
|
||||
WHERE name='${finalPoolDataUpdate[i].name}'
|
||||
;`);
|
||||
}
|
||||
|
||||
// Rename mining pools
|
||||
const renameQueries: string[] = [];
|
||||
for (let i = 0; i < finalPoolDataRename.length; ++i) {
|
||||
renameQueries.push(`
|
||||
UPDATE pools
|
||||
SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}',
|
||||
slug='${finalPoolDataRename[i].slug}'
|
||||
WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}'
|
||||
AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}'
|
||||
;`);
|
||||
}
|
||||
|
||||
try {
|
||||
if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) {
|
||||
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
||||
}
|
||||
|
||||
if (finalPoolDataAdd.length > 0) {
|
||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||
}
|
||||
for (const query of updateQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
for (const query of renameQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
logger.info('Mining pools.json import completed', logger.tags.mining);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot import pools in the database`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await this.insertUnknownPool();
|
||||
} catch (e) {
|
||||
logger.err(`Cannot insert unknown pool in the database`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
logger.info('Mining pools-v2.json import completed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually add the 'unknown pool'
|
||||
*/
|
||||
private async insertUnknownPool() {
|
||||
public async $insertUnknownPool(): Promise<void> {
|
||||
if (!config.DATABASE.ENABLED) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
|
||||
if (rows.length === 0) {
|
||||
await DB.query({
|
||||
sql: `INSERT INTO pools(name, link, regexes, addresses, slug)
|
||||
VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]", "unknown");
|
||||
sql: `INSERT INTO pools(name, link, regexes, addresses, slug, unique_id)
|
||||
VALUES("${this.unknownPool.name}", "${this.unknownPool.link}", "[]", "[]", "${this.unknownPool.slug}", 0);
|
||||
`});
|
||||
} else {
|
||||
await DB.query(`UPDATE pools
|
||||
SET name='Unknown', link='https://learnmeabitcoin.com/technical/coinbase-transaction',
|
||||
SET name='${this.unknownPool.name}', link='${this.unknownPool.link}',
|
||||
regexes='[]', addresses='[]',
|
||||
slug='unknown'
|
||||
WHERE name='Unknown'
|
||||
slug='${this.unknownPool.slug}',
|
||||
unique_id=0
|
||||
WHERE slug='${this.unknownPool.slug}'
|
||||
`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert "Unknown" mining pool', logger.tags.mining);
|
||||
logger.err(`Unable to insert or update "Unknown" mining pool. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete blocks which needs to be reindexed
|
||||
* Delete indexed blocks for an updated mining pool
|
||||
*
|
||||
* @param pool
|
||||
*/
|
||||
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
|
||||
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> {
|
||||
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
if (blockCount === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const updatedPool of finalPoolDataUpdate) {
|
||||
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
|
||||
if (pool.length > 0) {
|
||||
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`, logger.tags.mining);
|
||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore early days of Bitcoin as there were not mining pool yet
|
||||
logger.notice(`Deleting blocks with unknown mining pool from height 130635 for future re-indexing`, logger.tags.mining);
|
||||
// Get oldest blocks mined by the pool and assume pools-v2.json updates only concern most recent years
|
||||
// Ignore early days of Bitcoin as there were no mining pool yet
|
||||
const [oldestPoolBlock]: any[] = await DB.query(`
|
||||
SELECT height
|
||||
FROM blocks
|
||||
WHERE pool_id = ?
|
||||
ORDER BY height
|
||||
LIMIT 1`,
|
||||
[pool.id]
|
||||
);
|
||||
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : 130635;
|
||||
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
|
||||
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${oldestBlockHeight} for re-indexing`);
|
||||
await DB.query(`
|
||||
DELETE FROM blocks
|
||||
WHERE pool_id = ? AND height >= ${oldestBlockHeight}`,
|
||||
[unknownPool[0].id]
|
||||
);
|
||||
logger.notice(`Deleting blocks from ${pool.name} mining pool for re-indexing`);
|
||||
await DB.query(`
|
||||
DELETE FROM blocks
|
||||
WHERE pool_id = ?`,
|
||||
[pool.id]
|
||||
);
|
||||
}
|
||||
|
||||
logger.notice(`Truncating hashrates for future re-indexing`, logger.tags.mining);
|
||||
await DB.query(`DELETE FROM hashrates`);
|
||||
private async $deleteUnknownBlocks(): Promise<void> {
|
||||
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
||||
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height 130635 for re-indexing`);
|
||||
await DB.query(`
|
||||
DELETE FROM blocks
|
||||
WHERE pool_id = ? AND height >= 130635`,
|
||||
[unknownPool[0].id]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ class TransactionUtils {
|
||||
vout: tx.vout
|
||||
.map((vout) => ({
|
||||
scriptpubkey_address: vout.scriptpubkey_address,
|
||||
scriptpubkey_asm: vout.scriptpubkey_asm,
|
||||
value: vout.value
|
||||
}))
|
||||
.filter((vout) => vout.value)
|
||||
|
||||
@@ -32,6 +32,7 @@ interface IConfig {
|
||||
ADVANCED_GBT_AUDIT: boolean;
|
||||
ADVANCED_GBT_MEMPOOL: boolean;
|
||||
CPFP_INDEXING: boolean;
|
||||
MAX_BLOCKS_BULK_QUERY: number;
|
||||
};
|
||||
ESPLORA: {
|
||||
REST_API_URL: string;
|
||||
@@ -147,12 +148,13 @@ const defaults: IConfig = {
|
||||
'USER_AGENT': 'mempool',
|
||||
'STDOUT_LOG_MIN_PRIORITY': 'debug',
|
||||
'AUTOMATIC_BLOCK_REINDEXING': false,
|
||||
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
||||
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
|
||||
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||
'AUDIT': false,
|
||||
'ADVANCED_GBT_AUDIT': false,
|
||||
'ADVANCED_GBT_MEMPOOL': false,
|
||||
'CPFP_INDEXING': false,
|
||||
'MAX_BLOCKS_BULK_QUERY': 0,
|
||||
},
|
||||
'ESPLORA': {
|
||||
'REST_API_URL': 'http://127.0.0.1:3000',
|
||||
|
||||
@@ -24,7 +24,8 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
|
||||
|
||||
private checkDBFlag() {
|
||||
if (config.DATABASE.ENABLED === false) {
|
||||
logger.err('Trying to use DB feature but config.DATABASE.ENABLED is set to false, please open an issue');
|
||||
const stack = new Error().stack;
|
||||
logger.err(`Trying to use DB feature but config.DATABASE.ENABLED is set to false, please open an issue.\nStack trace: ${stack}}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
|
||||
import fundingTxFetcher from './tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||
import forensicsService from './tasks/lightning/forensics.service';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
import chainTips from './api/chain-tips';
|
||||
import { AxiosError } from 'axios';
|
||||
|
||||
class Server {
|
||||
@@ -82,11 +83,8 @@ class Server {
|
||||
if (config.DATABASE.ENABLED) {
|
||||
await DB.checkDbConnection();
|
||||
try {
|
||||
if (process.env.npm_config_reindex !== undefined) { // Re-index requests
|
||||
const tables = process.env.npm_config_reindex.split(',');
|
||||
logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds (using '--reindex')`);
|
||||
await Common.sleep$(5000);
|
||||
await databaseMigration.$truncateIndexedData(tables);
|
||||
if (process.env.npm_config_reindex_blocks === 'true') { // Re-index requests
|
||||
await databaseMigration.$blocksReindexingTruncate();
|
||||
}
|
||||
await databaseMigration.$initializeOrMigrateDatabase();
|
||||
if (Common.indexingEnabled()) {
|
||||
@@ -115,6 +113,7 @@ class Server {
|
||||
|
||||
this.setUpWebsocketHandling();
|
||||
|
||||
await poolsUpdater.updatePoolsJson(); // Needs to be done before loading the disk cache because we sometimes wipe it
|
||||
await syncAssets.syncAssets$();
|
||||
if (config.MEMPOOL.ENABLED) {
|
||||
diskCache.loadMempoolCache();
|
||||
@@ -133,6 +132,7 @@ class Server {
|
||||
}
|
||||
|
||||
priceUpdater.$run();
|
||||
await chainTips.updateOrphanedBlocks();
|
||||
|
||||
this.setUpHttpApiRoutes();
|
||||
|
||||
@@ -172,7 +172,6 @@ class Server {
|
||||
logger.debug(msg);
|
||||
}
|
||||
}
|
||||
await poolsUpdater.updatePoolsJson();
|
||||
await blocks.$updateBlocks();
|
||||
await memPool.$updateMempool();
|
||||
indexer.$run();
|
||||
@@ -180,7 +179,14 @@ class Server {
|
||||
setTimeout(this.runMainUpdateLoop.bind(this), config.MEMPOOL.POLL_RATE_MS);
|
||||
this.currentBackendRetryInterval = 5;
|
||||
} catch (e: any) {
|
||||
const loggerMsg = `runMainLoop error: ${(e instanceof Error ? e.message : e)}. Retrying in ${this.currentBackendRetryInterval} sec.`;
|
||||
let loggerMsg = `Exception in runMainUpdateLoop(). Retrying in ${this.currentBackendRetryInterval} sec.`;
|
||||
loggerMsg += ` Reason: ${(e instanceof Error ? e.message : e)}.`;
|
||||
if (e?.stack) {
|
||||
loggerMsg += ` Stack trace: ${e.stack}`;
|
||||
}
|
||||
// When we get a first Exception, only `logger.debug` it and retry after 5 seconds
|
||||
// From the second Exception, `logger.warn` the Exception and increase the retry delay
|
||||
// Maximum retry delay is 60 seconds
|
||||
if (this.currentBackendRetryInterval > 5) {
|
||||
logger.warn(loggerMsg);
|
||||
mempool.setOutOfSync();
|
||||
@@ -200,8 +206,8 @@ class Server {
|
||||
try {
|
||||
await fundingTxFetcher.$init();
|
||||
await networkSyncService.$startService();
|
||||
await forensicsService.$startService();
|
||||
await lightningStatsUpdater.$startService();
|
||||
await forensicsService.$startService();
|
||||
} catch(e) {
|
||||
logger.err(`Nodejs lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
await Common.sleep$(1000 * 60);
|
||||
|
||||
@@ -8,18 +8,67 @@ import bitcoinClient from './api/bitcoin/bitcoin-client';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
import PricesRepository from './repositories/PricesRepository';
|
||||
|
||||
export interface CoreIndex {
|
||||
name: string;
|
||||
synced: boolean;
|
||||
best_block_height: number;
|
||||
}
|
||||
|
||||
class Indexer {
|
||||
runIndexer = true;
|
||||
indexerRunning = false;
|
||||
tasksRunning: string[] = [];
|
||||
coreIndexes: CoreIndex[] = [];
|
||||
|
||||
public reindex() {
|
||||
/**
|
||||
* Check which core index is available for indexing
|
||||
*/
|
||||
public async checkAvailableCoreIndexes(): Promise<void> {
|
||||
const updatedCoreIndexes: CoreIndex[] = [];
|
||||
|
||||
const indexes: any = await bitcoinClient.getIndexInfo();
|
||||
for (const indexName in indexes) {
|
||||
const newState = {
|
||||
name: indexName,
|
||||
synced: indexes[indexName].synced,
|
||||
best_block_height: indexes[indexName].best_block_height,
|
||||
};
|
||||
logger.info(`Core index '${indexName}' is ${indexes[indexName].synced ? 'synced' : 'not synced'}. Best block height is ${indexes[indexName].best_block_height}`);
|
||||
updatedCoreIndexes.push(newState);
|
||||
|
||||
if (indexName === 'coinstatsindex' && newState.synced === true) {
|
||||
const previousState = this.isCoreIndexReady('coinstatsindex');
|
||||
// if (!previousState || previousState.synced === false) {
|
||||
this.runSingleTask('coinStatsIndex');
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
this.coreIndexes = updatedCoreIndexes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the best block height if a core index is available, or 0 if not
|
||||
*
|
||||
* @param name
|
||||
* @returns
|
||||
*/
|
||||
public isCoreIndexReady(name: string): CoreIndex | null {
|
||||
for (const index of this.coreIndexes) {
|
||||
if (index.name === name && index.synced === true) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public reindex(): void {
|
||||
if (Common.indexingEnabled()) {
|
||||
this.runIndexer = true;
|
||||
}
|
||||
}
|
||||
|
||||
public async runSingleTask(task: 'blocksPrices') {
|
||||
public async runSingleTask(task: 'blocksPrices' | 'coinStatsIndex'): Promise<void> {
|
||||
if (!Common.indexingEnabled()) {
|
||||
return;
|
||||
}
|
||||
@@ -28,20 +77,27 @@ class Indexer {
|
||||
this.tasksRunning.push(task);
|
||||
const lastestPriceId = await PricesRepository.$getLatestPriceId();
|
||||
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
|
||||
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`)
|
||||
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`);
|
||||
setTimeout(() => {
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
|
||||
this.runSingleTask('blocksPrices');
|
||||
}, 10000);
|
||||
} else {
|
||||
logger.debug(`Blocks prices indexer will run now`)
|
||||
logger.debug(`Blocks prices indexer will run now`);
|
||||
await mining.$indexBlockPrices();
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
|
||||
}
|
||||
}
|
||||
|
||||
if (task === 'coinStatsIndex' && !this.tasksRunning.includes(task)) {
|
||||
this.tasksRunning.push(task);
|
||||
logger.debug(`Indexing coinStatsIndex now`);
|
||||
await mining.$indexCoinStatsIndex();
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
|
||||
}
|
||||
}
|
||||
|
||||
public async $run() {
|
||||
public async $run(): Promise<void> {
|
||||
if (!Common.indexingEnabled() || this.runIndexer === false ||
|
||||
this.indexerRunning === true || mempool.hasPriority()
|
||||
) {
|
||||
@@ -57,7 +113,9 @@ class Indexer {
|
||||
this.runIndexer = false;
|
||||
this.indexerRunning = true;
|
||||
|
||||
logger.debug(`Running mining indexer`);
|
||||
logger.info(`Running mining indexer`);
|
||||
|
||||
await this.checkAvailableCoreIndexes();
|
||||
|
||||
try {
|
||||
await priceUpdater.$run();
|
||||
@@ -93,7 +151,7 @@ class Indexer {
|
||||
setTimeout(() => this.reindex(), runEvery);
|
||||
}
|
||||
|
||||
async $resetHashratesIndexingState() {
|
||||
async $resetHashratesIndexingState(): Promise<void> {
|
||||
try {
|
||||
await HashratesRepository.$setLatestRun('last_hashrates_indexing', 0);
|
||||
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', 0);
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { IEsploraApi } from './api/bitcoin/esplora-api.interface';
|
||||
import { HeapNode } from "./utils/pairing-heap";
|
||||
import { OrphanedBlock } from './api/chain-tips';
|
||||
import { HeapNode } from './utils/pairing-heap';
|
||||
|
||||
export interface PoolTag {
|
||||
id: number; // mysql row id
|
||||
id: number;
|
||||
uniqueId: number;
|
||||
name: string;
|
||||
link: string;
|
||||
regexes: string; // JSON array
|
||||
@@ -16,6 +18,7 @@ export interface PoolInfo {
|
||||
link: string;
|
||||
blockCount: number;
|
||||
slug: string;
|
||||
avgMatchRate: number | null;
|
||||
}
|
||||
|
||||
export interface PoolStats extends PoolInfo {
|
||||
@@ -63,6 +66,7 @@ interface VinStrippedToScriptsig {
|
||||
|
||||
interface VoutStrippedToScriptPubkey {
|
||||
scriptpubkey_address: string | undefined;
|
||||
scriptpubkey_asm: string | undefined;
|
||||
value: number;
|
||||
}
|
||||
|
||||
@@ -144,23 +148,44 @@ export interface TransactionStripped {
|
||||
}
|
||||
|
||||
export interface BlockExtension {
|
||||
totalFees?: number;
|
||||
medianFee?: number;
|
||||
feeRange?: number[];
|
||||
reward?: number;
|
||||
coinbaseTx?: TransactionMinerInfo;
|
||||
matchRate?: number;
|
||||
pool?: {
|
||||
id: number;
|
||||
totalFees: number;
|
||||
medianFee: number; // median fee rate
|
||||
feeRange: number[]; // fee rate percentiles
|
||||
reward: number;
|
||||
matchRate: number | null;
|
||||
pool: {
|
||||
id: number; // Note - This is the `unique_id`, not to mix with the auto increment `id`
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
avgFee?: number;
|
||||
avgFeeRate?: number;
|
||||
coinbaseRaw?: string;
|
||||
usd?: number | null;
|
||||
avgFee: number;
|
||||
avgFeeRate: number;
|
||||
coinbaseRaw: string;
|
||||
orphans: OrphanedBlock[] | null;
|
||||
coinbaseAddress: string | null;
|
||||
coinbaseSignature: string | null;
|
||||
coinbaseSignatureAscii: string | null;
|
||||
virtualSize: number;
|
||||
avgTxSize: number;
|
||||
totalInputs: number;
|
||||
totalOutputs: number;
|
||||
totalOutputAmt: number;
|
||||
medianFeeAmt: number | null; // median fee in sats
|
||||
feePercentiles: number[] | null, // fee percentiles in sats
|
||||
segwitTotalTxs: number;
|
||||
segwitTotalSize: number;
|
||||
segwitTotalWeight: number;
|
||||
header: string;
|
||||
utxoSetChange: number;
|
||||
// Requires coinstatsindex, will be set to NULL otherwise
|
||||
utxoSetSize: number | null;
|
||||
totalInputAmt: number | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Note: Everything that is added in here will be automatically returned through
|
||||
* /api/v1/block and /api/v1/blocks APIs
|
||||
*/
|
||||
export interface BlockExtended extends IEsploraApi.Block {
|
||||
extras: BlockExtension;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { BlockExtended, BlockPrice } from '../mempool.interfaces';
|
||||
import { BlockExtended, BlockExtension, BlockPrice } from '../mempool.interfaces';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { Common } from '../api/common';
|
||||
import { prepareBlock } from '../utils/blocks-utils';
|
||||
import PoolsRepository from './PoolsRepository';
|
||||
import HashratesRepository from './HashratesRepository';
|
||||
import { escape } from 'mysql2';
|
||||
@@ -10,27 +9,90 @@ import BlocksSummariesRepository from './BlocksSummariesRepository';
|
||||
import DifficultyAdjustmentsRepository from './DifficultyAdjustmentsRepository';
|
||||
import bitcoinClient from '../api/bitcoin/bitcoin-client';
|
||||
import config from '../config';
|
||||
import chainTips from '../api/chain-tips';
|
||||
import blocks from '../api/blocks';
|
||||
import BlocksAuditsRepository from './BlocksAuditsRepository';
|
||||
|
||||
const BLOCK_DB_FIELDS = `
|
||||
blocks.hash AS id,
|
||||
blocks.height,
|
||||
blocks.version,
|
||||
UNIX_TIMESTAMP(blocks.blockTimestamp) AS timestamp,
|
||||
blocks.bits,
|
||||
blocks.nonce,
|
||||
blocks.difficulty,
|
||||
blocks.merkle_root,
|
||||
blocks.tx_count,
|
||||
blocks.size,
|
||||
blocks.weight,
|
||||
blocks.previous_block_hash AS previousblockhash,
|
||||
UNIX_TIMESTAMP(blocks.median_timestamp) AS mediantime,
|
||||
blocks.fees AS totalFees,
|
||||
blocks.median_fee AS medianFee,
|
||||
blocks.fee_span AS feeRange,
|
||||
blocks.reward,
|
||||
pools.unique_id AS poolId,
|
||||
pools.name AS poolName,
|
||||
pools.slug AS poolSlug,
|
||||
blocks.avg_fee AS avgFee,
|
||||
blocks.avg_fee_rate AS avgFeeRate,
|
||||
blocks.coinbase_raw AS coinbaseRaw,
|
||||
blocks.coinbase_address AS coinbaseAddress,
|
||||
blocks.coinbase_signature AS coinbaseSignature,
|
||||
blocks.coinbase_signature_ascii AS coinbaseSignatureAscii,
|
||||
blocks.avg_tx_size AS avgTxSize,
|
||||
blocks.total_inputs AS totalInputs,
|
||||
blocks.total_outputs AS totalOutputs,
|
||||
blocks.total_output_amt AS totalOutputAmt,
|
||||
blocks.median_fee_amt AS medianFeeAmt,
|
||||
blocks.fee_percentiles AS feePercentiles,
|
||||
blocks.segwit_total_txs AS segwitTotalTxs,
|
||||
blocks.segwit_total_size AS segwitTotalSize,
|
||||
blocks.segwit_total_weight AS segwitTotalWeight,
|
||||
blocks.header,
|
||||
blocks.utxoset_change AS utxoSetChange,
|
||||
blocks.utxoset_size AS utxoSetSize,
|
||||
blocks.total_input_amt AS totalInputAmts
|
||||
`;
|
||||
|
||||
class BlocksRepository {
|
||||
/**
|
||||
* Save indexed block data in the database
|
||||
*/
|
||||
public async $saveBlockInDatabase(block: BlockExtended) {
|
||||
const truncatedCoinbaseSignature = block?.extras?.coinbaseSignature?.substring(0, 500);
|
||||
const truncatedCoinbaseSignatureAscii = block?.extras?.coinbaseSignatureAscii?.substring(0, 500);
|
||||
|
||||
try {
|
||||
const query = `INSERT INTO blocks(
|
||||
height, hash, blockTimestamp, size,
|
||||
weight, tx_count, coinbase_raw, difficulty,
|
||||
pool_id, fees, fee_span, median_fee,
|
||||
reward, version, bits, nonce,
|
||||
merkle_root, previous_block_hash, avg_fee, avg_fee_rate
|
||||
height, hash, blockTimestamp, size,
|
||||
weight, tx_count, coinbase_raw, difficulty,
|
||||
pool_id, fees, fee_span, median_fee,
|
||||
reward, version, bits, nonce,
|
||||
merkle_root, previous_block_hash, avg_fee, avg_fee_rate,
|
||||
median_timestamp, header, coinbase_address,
|
||||
coinbase_signature, utxoset_size, utxoset_change, avg_tx_size,
|
||||
total_inputs, total_outputs, total_input_amt, total_output_amt,
|
||||
fee_percentiles, segwit_total_txs, segwit_total_size, segwit_total_weight,
|
||||
median_fee_amt, coinbase_signature_ascii
|
||||
) VALUE (
|
||||
?, ?, FROM_UNIXTIME(?), ?,
|
||||
?, ?, ?, ?,
|
||||
?, ?, ?, ?,
|
||||
?, ?, ?, ?,
|
||||
?, ?, ?, ?
|
||||
?, ?, ?, ?,
|
||||
FROM_UNIXTIME(?), ?, ?,
|
||||
?, ?, ?, ?,
|
||||
?, ?, ?, ?,
|
||||
?, ?, ?, ?,
|
||||
?, ?
|
||||
)`;
|
||||
|
||||
const poolDbId = await PoolsRepository.$getPoolByUniqueId(block.extras.pool.id);
|
||||
if (!poolDbId) {
|
||||
throw Error(`Could not find a mining pool with the unique_id = ${block.extras.pool.id}. This error should never be printed.`);
|
||||
}
|
||||
|
||||
const params: any[] = [
|
||||
block.height,
|
||||
block.id,
|
||||
@@ -40,7 +102,7 @@ class BlocksRepository {
|
||||
block.tx_count,
|
||||
block.extras.coinbaseRaw,
|
||||
block.difficulty,
|
||||
block.extras.pool?.id, // Should always be set to something
|
||||
poolDbId.id,
|
||||
block.extras.totalFees,
|
||||
JSON.stringify(block.extras.feeRange),
|
||||
block.extras.medianFee,
|
||||
@@ -52,19 +114,63 @@ class BlocksRepository {
|
||||
block.previousblockhash,
|
||||
block.extras.avgFee,
|
||||
block.extras.avgFeeRate,
|
||||
block.mediantime,
|
||||
block.extras.header,
|
||||
block.extras.coinbaseAddress,
|
||||
truncatedCoinbaseSignature,
|
||||
block.extras.utxoSetSize,
|
||||
block.extras.utxoSetChange,
|
||||
block.extras.avgTxSize,
|
||||
block.extras.totalInputs,
|
||||
block.extras.totalOutputs,
|
||||
block.extras.totalInputAmt,
|
||||
block.extras.totalOutputAmt,
|
||||
block.extras.feePercentiles ? JSON.stringify(block.extras.feePercentiles) : null,
|
||||
block.extras.segwitTotalTxs,
|
||||
block.extras.segwitTotalSize,
|
||||
block.extras.segwitTotalWeight,
|
||||
block.extras.medianFeeAmt,
|
||||
truncatedCoinbaseSignatureAscii,
|
||||
];
|
||||
|
||||
await DB.query(query, params);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`);
|
||||
logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`, logger.tags.mining);
|
||||
} else {
|
||||
logger.err('Cannot save indexed block into db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot save indexed block into db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save newly indexed data from core coinstatsindex
|
||||
*
|
||||
* @param utxoSetSize
|
||||
* @param totalInputAmt
|
||||
*/
|
||||
public async $updateCoinStatsIndexData(blockHash: string, utxoSetSize: number,
|
||||
totalInputAmt: number
|
||||
) : Promise<void> {
|
||||
try {
|
||||
const query = `
|
||||
UPDATE blocks
|
||||
SET utxoset_size = ?, total_input_amt = ?
|
||||
WHERE hash = ?
|
||||
`;
|
||||
const params: any[] = [
|
||||
utxoSetSize,
|
||||
totalInputAmt,
|
||||
blockHash
|
||||
];
|
||||
await DB.query(query, params);
|
||||
} catch (e: any) {
|
||||
logger.err('Cannot update indexed block coinstatsindex. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all block height that have not been indexed between [startHeight, endHeight]
|
||||
*/
|
||||
@@ -250,34 +356,17 @@ class BlocksRepository {
|
||||
/**
|
||||
* Get blocks mined by a specific mining pool
|
||||
*/
|
||||
public async $getBlocksByPool(slug: string, startHeight?: number): Promise<object[]> {
|
||||
public async $getBlocksByPool(slug: string, startHeight?: number): Promise<BlockExtended[]> {
|
||||
const pool = await PoolsRepository.$getPool(slug);
|
||||
if (!pool) {
|
||||
throw new Error('This mining pool does not exist ' + escape(slug));
|
||||
}
|
||||
|
||||
const params: any[] = [];
|
||||
let query = ` SELECT
|
||||
blocks.height,
|
||||
hash as id,
|
||||
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
|
||||
size,
|
||||
weight,
|
||||
tx_count,
|
||||
coinbase_raw,
|
||||
difficulty,
|
||||
fees,
|
||||
fee_span,
|
||||
median_fee,
|
||||
reward,
|
||||
version,
|
||||
bits,
|
||||
nonce,
|
||||
merkle_root,
|
||||
previous_block_hash as previousblockhash,
|
||||
avg_fee,
|
||||
avg_fee_rate
|
||||
let query = `
|
||||
SELECT ${BLOCK_DB_FIELDS}
|
||||
FROM blocks
|
||||
JOIN pools ON blocks.pool_id = pools.id
|
||||
WHERE pool_id = ?`;
|
||||
params.push(pool.id);
|
||||
|
||||
@@ -290,11 +379,11 @@ class BlocksRepository {
|
||||
LIMIT 10`;
|
||||
|
||||
try {
|
||||
const [rows] = await DB.query(query, params);
|
||||
const [rows]: any[] = await DB.query(query, params);
|
||||
|
||||
const blocks: BlockExtended[] = [];
|
||||
for (const block of <object[]>rows) {
|
||||
blocks.push(prepareBlock(block));
|
||||
for (const block of rows) {
|
||||
blocks.push(await this.formatDbBlockIntoExtendedBlock(block));
|
||||
}
|
||||
|
||||
return blocks;
|
||||
@@ -307,46 +396,21 @@ class BlocksRepository {
|
||||
/**
|
||||
* Get one block by height
|
||||
*/
|
||||
public async $getBlockByHeight(height: number): Promise<object | null> {
|
||||
public async $getBlockByHeight(height: number): Promise<BlockExtended | null> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT
|
||||
blocks.height,
|
||||
hash,
|
||||
hash as id,
|
||||
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
|
||||
size,
|
||||
weight,
|
||||
tx_count,
|
||||
coinbase_raw,
|
||||
difficulty,
|
||||
pools.id as pool_id,
|
||||
pools.name as pool_name,
|
||||
pools.link as pool_link,
|
||||
pools.slug as pool_slug,
|
||||
pools.addresses as pool_addresses,
|
||||
pools.regexes as pool_regexes,
|
||||
fees,
|
||||
fee_span,
|
||||
median_fee,
|
||||
reward,
|
||||
version,
|
||||
bits,
|
||||
nonce,
|
||||
merkle_root,
|
||||
previous_block_hash as previousblockhash,
|
||||
avg_fee,
|
||||
avg_fee_rate
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT ${BLOCK_DB_FIELDS}
|
||||
FROM blocks
|
||||
JOIN pools ON blocks.pool_id = pools.id
|
||||
WHERE blocks.height = ${height}
|
||||
`);
|
||||
WHERE blocks.height = ?`,
|
||||
[height]
|
||||
);
|
||||
|
||||
if (rows.length <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
rows[0].fee_span = JSON.parse(rows[0].fee_span);
|
||||
return rows[0];
|
||||
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
@@ -359,10 +423,7 @@ class BlocksRepository {
|
||||
public async $getBlockByHash(hash: string): Promise<object | null> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT *, blocks.height, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp, hash as id,
|
||||
pools.id as pool_id, pools.name as pool_name, pools.link as pool_link, pools.slug as pool_slug,
|
||||
pools.addresses as pool_addresses, pools.regexes as pool_regexes,
|
||||
previous_block_hash as previousblockhash
|
||||
SELECT ${BLOCK_DB_FIELDS}
|
||||
FROM blocks
|
||||
JOIN pools ON blocks.pool_id = pools.id
|
||||
WHERE hash = ?;
|
||||
@@ -372,9 +433,8 @@ class BlocksRepository {
|
||||
if (rows.length <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
rows[0].fee_span = JSON.parse(rows[0].fee_span);
|
||||
return rows[0];
|
||||
|
||||
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get indexed block ${hash}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
@@ -465,8 +525,15 @@ class BlocksRepository {
|
||||
public async $validateChain(): Promise<boolean> {
|
||||
try {
|
||||
const start = new Date().getTime();
|
||||
const [blocks]: any[] = await DB.query(`SELECT height, hash, previous_block_hash,
|
||||
UNIX_TIMESTAMP(blockTimestamp) as timestamp FROM blocks ORDER BY height`);
|
||||
const [blocks]: any[] = await DB.query(`
|
||||
SELECT
|
||||
height,
|
||||
hash,
|
||||
previous_block_hash,
|
||||
UNIX_TIMESTAMP(blockTimestamp) AS timestamp
|
||||
FROM blocks
|
||||
ORDER BY height
|
||||
`);
|
||||
|
||||
let partialMsg = false;
|
||||
let idx = 1;
|
||||
@@ -521,7 +588,7 @@ class BlocksRepository {
|
||||
CAST(AVG(blocks.height) as INT) as avgHeight,
|
||||
CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
|
||||
CAST(AVG(fees) as INT) as avgFees,
|
||||
prices.*
|
||||
prices.USD
|
||||
FROM blocks
|
||||
JOIN blocks_prices on blocks_prices.height = blocks.height
|
||||
JOIN prices on prices.id = blocks_prices.price_id
|
||||
@@ -550,7 +617,7 @@ class BlocksRepository {
|
||||
CAST(AVG(blocks.height) as INT) as avgHeight,
|
||||
CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
|
||||
CAST(AVG(reward) as INT) as avgRewards,
|
||||
prices.*
|
||||
prices.USD
|
||||
FROM blocks
|
||||
JOIN blocks_prices on blocks_prices.height = blocks.height
|
||||
JOIN prices on prices.id = blocks_prices.price_id
|
||||
@@ -694,7 +761,6 @@ class BlocksRepository {
|
||||
logger.err('Cannot fetch CPFP unindexed blocks. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -741,7 +807,7 @@ class BlocksRepository {
|
||||
try {
|
||||
let query = `INSERT INTO blocks_prices(height, price_id) VALUES`;
|
||||
for (const price of blockPrices) {
|
||||
query += ` (${price.height}, ${price.priceId}),`
|
||||
query += ` (${price.height}, ${price.priceId}),`;
|
||||
}
|
||||
query = query.slice(0, -1);
|
||||
await DB.query(query);
|
||||
@@ -754,6 +820,132 @@ class BlocksRepository {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all indexed blocsk with missing coinstatsindex data
|
||||
*/
|
||||
public async $getBlocksMissingCoinStatsIndex(maxHeight: number, minHeight: number): Promise<any> {
|
||||
try {
|
||||
const [blocks] = await DB.query(`
|
||||
SELECT height, hash
|
||||
FROM blocks
|
||||
WHERE height >= ${minHeight} AND height <= ${maxHeight} AND
|
||||
(utxoset_size IS NULL OR total_input_amt IS NULL)
|
||||
`);
|
||||
return blocks;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get blocks with missing coinstatsindex. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save indexed median fee to avoid recomputing it later
|
||||
*
|
||||
* @param id
|
||||
* @param feePercentiles
|
||||
*/
|
||||
public async $saveFeePercentilesForBlockId(id: string, feePercentiles: number[]): Promise<void> {
|
||||
try {
|
||||
await DB.query(`
|
||||
UPDATE blocks SET fee_percentiles = ?, median_fee_amt = ?
|
||||
WHERE hash = ?`,
|
||||
[JSON.stringify(feePercentiles), feePercentiles[3], id]
|
||||
);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot update block fee_percentiles. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a mysql row block into a BlockExtended. Note that you
|
||||
* must provide the correct field into dbBlk object param
|
||||
*
|
||||
* @param dbBlk
|
||||
*/
|
||||
private async formatDbBlockIntoExtendedBlock(dbBlk: any): Promise<BlockExtended> {
|
||||
const blk: Partial<BlockExtended> = {};
|
||||
const extras: Partial<BlockExtension> = {};
|
||||
|
||||
// IEsploraApi.Block
|
||||
blk.id = dbBlk.id;
|
||||
blk.height = dbBlk.height;
|
||||
blk.version = dbBlk.version;
|
||||
blk.timestamp = dbBlk.timestamp;
|
||||
blk.bits = dbBlk.bits;
|
||||
blk.nonce = dbBlk.nonce;
|
||||
blk.difficulty = dbBlk.difficulty;
|
||||
blk.merkle_root = dbBlk.merkle_root;
|
||||
blk.tx_count = dbBlk.tx_count;
|
||||
blk.size = dbBlk.size;
|
||||
blk.weight = dbBlk.weight;
|
||||
blk.previousblockhash = dbBlk.previousblockhash;
|
||||
blk.mediantime = dbBlk.mediantime;
|
||||
|
||||
// BlockExtension
|
||||
extras.totalFees = dbBlk.totalFees;
|
||||
extras.medianFee = dbBlk.medianFee;
|
||||
extras.feeRange = JSON.parse(dbBlk.feeRange);
|
||||
extras.reward = dbBlk.reward;
|
||||
extras.pool = {
|
||||
id: dbBlk.poolId,
|
||||
name: dbBlk.poolName,
|
||||
slug: dbBlk.poolSlug,
|
||||
};
|
||||
extras.avgFee = dbBlk.avgFee;
|
||||
extras.avgFeeRate = dbBlk.avgFeeRate;
|
||||
extras.coinbaseRaw = dbBlk.coinbaseRaw;
|
||||
extras.coinbaseAddress = dbBlk.coinbaseAddress;
|
||||
extras.coinbaseSignature = dbBlk.coinbaseSignature;
|
||||
extras.coinbaseSignatureAscii = dbBlk.coinbaseSignatureAscii;
|
||||
extras.avgTxSize = dbBlk.avgTxSize;
|
||||
extras.totalInputs = dbBlk.totalInputs;
|
||||
extras.totalOutputs = dbBlk.totalOutputs;
|
||||
extras.totalOutputAmt = dbBlk.totalOutputAmt;
|
||||
extras.medianFeeAmt = dbBlk.medianFeeAmt;
|
||||
extras.feePercentiles = JSON.parse(dbBlk.feePercentiles);
|
||||
extras.segwitTotalTxs = dbBlk.segwitTotalTxs;
|
||||
extras.segwitTotalSize = dbBlk.segwitTotalSize;
|
||||
extras.segwitTotalWeight = dbBlk.segwitTotalWeight;
|
||||
extras.header = dbBlk.header,
|
||||
extras.utxoSetChange = dbBlk.utxoSetChange;
|
||||
extras.utxoSetSize = dbBlk.utxoSetSize;
|
||||
extras.totalInputAmt = dbBlk.totalInputAmt;
|
||||
extras.virtualSize = dbBlk.weight / 4.0;
|
||||
|
||||
// Re-org can happen after indexing so we need to always get the
|
||||
// latest state from core
|
||||
extras.orphans = chainTips.getOrphanedBlocksAtHeight(dbBlk.height);
|
||||
|
||||
// Match rate is not part of the blocks table, but it is part of APIs so we must include it
|
||||
extras.matchRate = null;
|
||||
if (config.MEMPOOL.AUDIT) {
|
||||
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(dbBlk.id);
|
||||
if (auditScore != null) {
|
||||
extras.matchRate = auditScore.matchRate;
|
||||
}
|
||||
}
|
||||
|
||||
// If we're missing block summary related field, check if we can populate them on the fly now
|
||||
if (Common.blocksSummariesIndexingEnabled() &&
|
||||
(extras.medianFeeAmt === null || extras.feePercentiles === null))
|
||||
{
|
||||
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
|
||||
if (extras.feePercentiles === null) {
|
||||
const block = await bitcoinClient.getBlock(dbBlk.id, 2);
|
||||
const summary = blocks.summarizeBlock(block);
|
||||
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
|
||||
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
|
||||
}
|
||||
if (extras.feePercentiles !== null) {
|
||||
extras.medianFeeAmt = extras.feePercentiles[3];
|
||||
}
|
||||
}
|
||||
|
||||
blk.extras = <BlockExtension>extras;
|
||||
return <BlockExtended>blk;
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksRepository();
|
||||
|
||||
@@ -80,6 +80,48 @@ class BlocksSummariesRepository {
|
||||
logger.err('Cannot delete indexed blocks summaries. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the fee percentiles if the block has already been indexed, [] otherwise
|
||||
*
|
||||
* @param id
|
||||
*/
|
||||
public async $getFeePercentilesByBlockId(id: string): Promise<number[] | null> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT transactions
|
||||
FROM blocks_summaries
|
||||
WHERE id = ?`,
|
||||
[id]
|
||||
);
|
||||
if (rows === null || rows.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const transactions = JSON.parse(rows[0].transactions);
|
||||
if (transactions === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
transactions.shift(); // Ignore coinbase
|
||||
transactions.sort((a: any, b: any) => a.fee - b.fee);
|
||||
const fees = transactions.map((t: any) => t.fee);
|
||||
|
||||
return [
|
||||
fees[0] ?? 0, // min
|
||||
fees[Math.max(0, Math.floor(fees.length * 0.1) - 1)] ?? 0, // 10th
|
||||
fees[Math.max(0, Math.floor(fees.length * 0.25) - 1)] ?? 0, // 25th
|
||||
fees[Math.max(0, Math.floor(fees.length * 0.5) - 1)] ?? 0, // median
|
||||
fees[Math.max(0, Math.floor(fees.length * 0.75) - 1)] ?? 0, // 75th
|
||||
fees[Math.max(0, Math.floor(fees.length * 0.9) - 1)] ?? 0, // 90th
|
||||
fees[fees.length - 1] ?? 0, // max
|
||||
];
|
||||
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get block summaries transactions. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksSummariesRepository();
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Common } from '../api/common';
|
||||
import poolsParser from '../api/pools-parser';
|
||||
import config from '../config';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
@@ -9,7 +10,7 @@ class PoolsRepository {
|
||||
* Get all pools tagging info
|
||||
*/
|
||||
public async $getPools(): Promise<PoolTag[]> {
|
||||
const [rows] = await DB.query('SELECT id, name, addresses, regexes, slug FROM pools;');
|
||||
const [rows] = await DB.query('SELECT id, unique_id as uniqueId, name, addresses, regexes, slug FROM pools');
|
||||
return <PoolTag[]>rows;
|
||||
}
|
||||
|
||||
@@ -17,7 +18,11 @@ class PoolsRepository {
|
||||
* Get unknown pool tagging info
|
||||
*/
|
||||
public async $getUnknownPool(): Promise<PoolTag> {
|
||||
const [rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
|
||||
let [rows]: any[] = await DB.query('SELECT id, unique_id as uniqueId, name, slug FROM pools where name = "Unknown"');
|
||||
if (rows && rows.length === 0 && config.DATABASE.ENABLED) {
|
||||
await poolsParser.$insertUnknownPool();
|
||||
[rows] = await DB.query('SELECT id, unique_id as uniqueId, name, slug FROM pools where name = "Unknown"');
|
||||
}
|
||||
return <PoolTag>rows[0];
|
||||
}
|
||||
|
||||
@@ -27,16 +32,25 @@ class PoolsRepository {
|
||||
public async $getPoolsInfo(interval: string | null = null): Promise<PoolInfo[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT COUNT(height) as blockCount, pool_id as poolId, pools.name as name, pools.link as link, slug
|
||||
let query = `
|
||||
SELECT
|
||||
COUNT(blocks.height) As blockCount,
|
||||
pool_id AS poolId,
|
||||
pools.name AS name,
|
||||
pools.link AS link,
|
||||
slug,
|
||||
AVG(blocks_audits.match_rate) AS avgMatchRate
|
||||
FROM blocks
|
||||
JOIN pools on pools.id = pool_id`;
|
||||
JOIN pools on pools.id = pool_id
|
||||
LEFT JOIN blocks_audits ON blocks_audits.height = blocks.height
|
||||
`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE blocks.blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
query += ` GROUP BY pool_id
|
||||
ORDER BY COUNT(height) DESC`;
|
||||
ORDER BY COUNT(blocks.height) DESC`;
|
||||
|
||||
try {
|
||||
const [rows] = await DB.query(query);
|
||||
@@ -50,7 +64,7 @@ class PoolsRepository {
|
||||
/**
|
||||
* Get basic pool info and block count between two timestamp
|
||||
*/
|
||||
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
|
||||
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
|
||||
const query = `SELECT COUNT(height) as blockCount, pools.id as poolId, pools.name as poolName
|
||||
FROM pools
|
||||
LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)
|
||||
@@ -66,9 +80,9 @@ class PoolsRepository {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get mining pool statistics for one pool
|
||||
* Get a mining pool info
|
||||
*/
|
||||
public async $getPool(slug: string): Promise<PoolTag | null> {
|
||||
public async $getPool(slug: string, parse: boolean = true): Promise<PoolTag | null> {
|
||||
const query = `
|
||||
SELECT *
|
||||
FROM pools
|
||||
@@ -81,10 +95,12 @@ class PoolsRepository {
|
||||
return null;
|
||||
}
|
||||
|
||||
rows[0].regexes = JSON.parse(rows[0].regexes);
|
||||
if (parse) {
|
||||
rows[0].regexes = JSON.parse(rows[0].regexes);
|
||||
}
|
||||
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||
rows[0].addresses = []; // pools.json only contains mainnet addresses
|
||||
} else {
|
||||
rows[0].addresses = []; // pools-v2.json only contains mainnet addresses
|
||||
} else if (parse) {
|
||||
rows[0].addresses = JSON.parse(rows[0].addresses);
|
||||
}
|
||||
|
||||
@@ -94,6 +110,116 @@ class PoolsRepository {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a mining pool info by its unique id
|
||||
*/
|
||||
public async $getPoolByUniqueId(id: number, parse: boolean = true): Promise<PoolTag | null> {
|
||||
const query = `
|
||||
SELECT *
|
||||
FROM pools
|
||||
WHERE pools.unique_id = ?`;
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(query, [id]);
|
||||
|
||||
if (rows.length < 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (parse) {
|
||||
rows[0].regexes = JSON.parse(rows[0].regexes);
|
||||
}
|
||||
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||
rows[0].addresses = []; // pools.json only contains mainnet addresses
|
||||
} else if (parse) {
|
||||
rows[0].addresses = JSON.parse(rows[0].addresses);
|
||||
}
|
||||
|
||||
return rows[0];
|
||||
} catch (e) {
|
||||
logger.err('Cannot get pool from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a new mining pool in the database
|
||||
*
|
||||
* @param pool
|
||||
*/
|
||||
public async $insertNewMiningPool(pool: any, slug: string): Promise<void> {
|
||||
try {
|
||||
await DB.query(`
|
||||
INSERT INTO pools
|
||||
SET name = ?, link = ?, addresses = ?, regexes = ?, slug = ?, unique_id = ?`,
|
||||
[pool.name, pool.link, JSON.stringify(pool.addresses), JSON.stringify(pool.regexes), slug, pool.id]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot insert new mining pool into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rename an existing mining pool
|
||||
*
|
||||
* @param dbId
|
||||
* @param newSlug
|
||||
* @param newName
|
||||
*/
|
||||
public async $renameMiningPool(dbId: number, newSlug: string, newName: string): Promise<void> {
|
||||
try {
|
||||
await DB.query(`
|
||||
UPDATE pools
|
||||
SET slug = ?, name = ?
|
||||
WHERE id = ?`,
|
||||
[newSlug, newName, dbId]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot rename mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an exisiting mining pool link
|
||||
*
|
||||
* @param dbId
|
||||
* @param newLink
|
||||
*/
|
||||
public async $updateMiningPoolLink(dbId: number, newLink: string): Promise<void> {
|
||||
try {
|
||||
await DB.query(`
|
||||
UPDATE pools
|
||||
SET link = ?
|
||||
WHERE id = ?`,
|
||||
[newLink, dbId]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot update link for mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an existing mining pool addresses or coinbase tags
|
||||
*
|
||||
* @param dbId
|
||||
* @param addresses
|
||||
* @param regexes
|
||||
*/
|
||||
public async $updateMiningPoolTags(dbId: number, addresses: string, regexes: string): Promise<void> {
|
||||
try {
|
||||
await DB.query(`
|
||||
UPDATE pools
|
||||
SET addresses = ?, regexes = ?
|
||||
WHERE id = ?`,
|
||||
[JSON.stringify(addresses), JSON.stringify(regexes), dbId]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot update mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new PoolsRepository();
|
||||
|
||||
@@ -3,14 +3,57 @@ import logger from '../logger';
|
||||
import { IConversionRates } from '../mempool.interfaces';
|
||||
import priceUpdater from '../tasks/price-updater';
|
||||
|
||||
export interface ApiPrice {
|
||||
time?: number,
|
||||
USD: number,
|
||||
EUR: number,
|
||||
GBP: number,
|
||||
CAD: number,
|
||||
CHF: number,
|
||||
AUD: number,
|
||||
JPY: number,
|
||||
}
|
||||
|
||||
export interface ExchangeRates {
|
||||
USDEUR: number,
|
||||
USDGBP: number,
|
||||
USDCAD: number,
|
||||
USDCHF: number,
|
||||
USDAUD: number,
|
||||
USDJPY: number,
|
||||
}
|
||||
|
||||
export interface Conversion {
|
||||
prices: ApiPrice[],
|
||||
exchangeRates: ExchangeRates;
|
||||
}
|
||||
|
||||
export const MAX_PRICES = {
|
||||
USD: 100000000,
|
||||
EUR: 100000000,
|
||||
GBP: 100000000,
|
||||
CAD: 100000000,
|
||||
CHF: 100000000,
|
||||
AUD: 100000000,
|
||||
JPY: 10000000000,
|
||||
};
|
||||
|
||||
class PricesRepository {
|
||||
public async $savePrices(time: number, prices: IConversionRates): Promise<void> {
|
||||
if (prices.USD === 0) {
|
||||
if (prices.USD === -1) {
|
||||
// Some historical price entries have no USD prices, so we just ignore them to avoid future UX issues
|
||||
// As of today there are only 4 (on 2013-09-05, 2013-0909, 2013-09-12 and 2013-09-26) so that's fine
|
||||
return;
|
||||
}
|
||||
|
||||
// Sanity check
|
||||
for (const currency of Object.keys(prices)) {
|
||||
if (prices[currency] < -1 || prices[currency] > MAX_PRICES[currency]) { // We use -1 to mark a "missing data, so it's a valid entry"
|
||||
logger.info(`Ignore BTC${currency} price of ${prices[currency]}`);
|
||||
prices[currency] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await DB.query(`
|
||||
INSERT INTO prices(time, USD, EUR, GBP, CAD, CHF, AUD, JPY)
|
||||
@@ -60,6 +103,73 @@ class PricesRepository {
|
||||
}
|
||||
return rates[0];
|
||||
}
|
||||
|
||||
public async $getNearestHistoricalPrice(timestamp: number | undefined): Promise<Conversion | null> {
|
||||
try {
|
||||
const [rates]: any[] = await DB.query(`
|
||||
SELECT *, UNIX_TIMESTAMP(time) AS time
|
||||
FROM prices
|
||||
WHERE UNIX_TIMESTAMP(time) < ?
|
||||
ORDER BY time DESC
|
||||
LIMIT 1`,
|
||||
[timestamp]
|
||||
);
|
||||
if (!rates) {
|
||||
throw Error(`Cannot get single historical price from the database`);
|
||||
}
|
||||
|
||||
// Compute fiat exchange rates
|
||||
const latestPrice = await this.$getLatestConversionRates();
|
||||
const exchangeRates: ExchangeRates = {
|
||||
USDEUR: Math.round(latestPrice.EUR / latestPrice.USD * 100) / 100,
|
||||
USDGBP: Math.round(latestPrice.GBP / latestPrice.USD * 100) / 100,
|
||||
USDCAD: Math.round(latestPrice.CAD / latestPrice.USD * 100) / 100,
|
||||
USDCHF: Math.round(latestPrice.CHF / latestPrice.USD * 100) / 100,
|
||||
USDAUD: Math.round(latestPrice.AUD / latestPrice.USD * 100) / 100,
|
||||
USDJPY: Math.round(latestPrice.JPY / latestPrice.USD * 100) / 100,
|
||||
};
|
||||
|
||||
return {
|
||||
prices: rates,
|
||||
exchangeRates: exchangeRates
|
||||
};
|
||||
} catch (e) {
|
||||
logger.err(`Cannot fetch single historical prices from the db. Reason ${e instanceof Error ? e.message : e}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getHistoricalPrices(): Promise<Conversion | null> {
|
||||
try {
|
||||
const [rates]: any[] = await DB.query(`
|
||||
SELECT *, UNIX_TIMESTAMP(time) AS time
|
||||
FROM prices
|
||||
ORDER BY time DESC
|
||||
`);
|
||||
if (!rates) {
|
||||
throw Error(`Cannot get average historical price from the database`);
|
||||
}
|
||||
|
||||
// Compute fiat exchange rates
|
||||
const latestPrice: ApiPrice = rates[0];
|
||||
const exchangeRates: ExchangeRates = {
|
||||
USDEUR: Math.round(latestPrice.EUR / latestPrice.USD * 100) / 100,
|
||||
USDGBP: Math.round(latestPrice.GBP / latestPrice.USD * 100) / 100,
|
||||
USDCAD: Math.round(latestPrice.CAD / latestPrice.USD * 100) / 100,
|
||||
USDCHF: Math.round(latestPrice.CHF / latestPrice.USD * 100) / 100,
|
||||
USDAUD: Math.round(latestPrice.AUD / latestPrice.USD * 100) / 100,
|
||||
USDJPY: Math.round(latestPrice.JPY / latestPrice.USD * 100) / 100,
|
||||
};
|
||||
|
||||
return {
|
||||
prices: rates,
|
||||
exchangeRates: exchangeRates
|
||||
};
|
||||
} catch (e) {
|
||||
logger.err(`Cannot fetch historical prices from the db. Reason ${e instanceof Error ? e.message : e}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new PricesRepository();
|
||||
|
||||
@@ -88,5 +88,7 @@ module.exports = {
|
||||
verifyTxOutProof: 'verifytxoutproof', // bitcoind v0.11.0+
|
||||
walletLock: 'walletlock',
|
||||
walletPassphrase: 'walletpassphrase',
|
||||
walletPassphraseChange: 'walletpassphrasechange'
|
||||
}
|
||||
walletPassphraseChange: 'walletpassphrasechange',
|
||||
getTxoutSetinfo: 'gettxoutsetinfo',
|
||||
getIndexInfo: 'getindexinfo',
|
||||
};
|
||||
|
||||
@@ -72,7 +72,7 @@ class NetworkSyncService {
|
||||
const graphNodesPubkeys: string[] = [];
|
||||
for (const node of nodes) {
|
||||
const latestUpdated = await channelsApi.$getLatestChannelUpdateForNode(node.pub_key);
|
||||
node.last_update = Math.max(node.last_update, latestUpdated);
|
||||
node.last_update = Math.max(node.last_update ?? 0, latestUpdated);
|
||||
|
||||
await nodesApi.$saveNode(node);
|
||||
graphNodesPubkeys.push(node.pub_key);
|
||||
|
||||
@@ -8,7 +8,7 @@ import { SocksProxyAgent } from 'socks-proxy-agent';
|
||||
import * as https from 'https';
|
||||
|
||||
/**
|
||||
* Maintain the most recent version of pools.json
|
||||
* Maintain the most recent version of pools-v2.json
|
||||
*/
|
||||
class PoolsUpdater {
|
||||
lastRun: number = 0;
|
||||
@@ -31,14 +31,8 @@ class PoolsUpdater {
|
||||
|
||||
this.lastRun = now;
|
||||
|
||||
if (config.SOCKS5PROXY.ENABLED) {
|
||||
logger.info(`Updating latest mining pools from ${this.poolsUrl} over the Tor network`, logger.tags.mining);
|
||||
} else {
|
||||
logger.info(`Updating latest mining pools from ${this.poolsUrl} over clearnet`, logger.tags.mining);
|
||||
}
|
||||
|
||||
try {
|
||||
const githubSha = await this.fetchPoolsSha(); // Fetch pools.json sha from github
|
||||
const githubSha = await this.fetchPoolsSha(); // Fetch pools-v2.json sha from github
|
||||
if (githubSha === undefined) {
|
||||
return;
|
||||
}
|
||||
@@ -47,32 +41,57 @@ class PoolsUpdater {
|
||||
this.currentSha = await this.getShaFromDb();
|
||||
}
|
||||
|
||||
logger.debug(`Pools.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
|
||||
logger.debug(`pools-v2.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
|
||||
if (this.currentSha !== undefined && this.currentSha === githubSha) {
|
||||
return;
|
||||
}
|
||||
|
||||
// See backend README for more details about the mining pools update process
|
||||
if (this.currentSha !== undefined && // If we don't have any mining pool, download it at least once
|
||||
config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING !== true && // Automatic pools update is disabled
|
||||
!process.env.npm_config_update_pools // We're not manually updating mining pool
|
||||
) {
|
||||
logger.warn(`Updated mining pools data is available (${githubSha}) but AUTOMATIC_BLOCK_REINDEXING is disabled`);
|
||||
logger.info(`You can update your mining pools using the --update-pools command flag. You may want to clear your nginx cache as well if applicable`);
|
||||
return;
|
||||
}
|
||||
|
||||
const network = config.SOCKS5PROXY.ENABLED ? 'tor' : 'clearnet';
|
||||
if (this.currentSha === undefined) {
|
||||
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
|
||||
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl} over ${network}`, logger.tags.mining);
|
||||
} else {
|
||||
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
|
||||
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
|
||||
}
|
||||
const poolsJson = await this.query(this.poolsUrl);
|
||||
if (poolsJson === undefined) {
|
||||
return;
|
||||
}
|
||||
await poolsParser.migratePoolsJson(poolsJson);
|
||||
await this.updateDBSha(githubSha);
|
||||
logger.notice(`PoolsUpdater completed`, logger.tags.mining);
|
||||
poolsParser.setMiningPools(poolsJson);
|
||||
|
||||
if (config.DATABASE.ENABLED === false) { // Don't run db operations
|
||||
logger.info('Mining pools-v2.json import completed (no database)');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await DB.query('START TRANSACTION;');
|
||||
await poolsParser.migratePoolsJson();
|
||||
await this.updateDBSha(githubSha);
|
||||
await DB.query('COMMIT;');
|
||||
} catch (e) {
|
||||
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
|
||||
await DB.query('ROLLBACK;');
|
||||
}
|
||||
logger.notice('PoolsUpdater completed');
|
||||
|
||||
} catch (e) {
|
||||
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
|
||||
logger.err(`PoolsUpdater failed. Will try again in 24h. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
||||
logger.err(`PoolsUpdater failed. Will try again in 24h. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch our latest pools.json sha from the db
|
||||
* Fetch our latest pools-v2.json sha from the db
|
||||
*/
|
||||
private async updateDBSha(githubSha: string): Promise<void> {
|
||||
this.currentSha = githubSha;
|
||||
@@ -81,46 +100,46 @@ class PoolsUpdater {
|
||||
await DB.query('DELETE FROM state where name="pools_json_sha"');
|
||||
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
|
||||
} catch (e) {
|
||||
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
logger.err('Cannot save github pools-v2.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch our latest pools.json sha from the db
|
||||
* Fetch our latest pools-v2.json sha from the db
|
||||
*/
|
||||
private async getShaFromDb(): Promise<string | undefined> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
|
||||
return (rows.length > 0 ? rows[0].string : undefined);
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
logger.err('Cannot fetch pools-v2.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch our latest pools.json sha from github
|
||||
* Fetch our latest pools-v2.json sha from github
|
||||
*/
|
||||
private async fetchPoolsSha(): Promise<string | undefined> {
|
||||
const response = await this.query(this.treeUrl);
|
||||
|
||||
if (response !== undefined) {
|
||||
for (const file of response['tree']) {
|
||||
if (file['path'] === 'pools.json') {
|
||||
if (file['path'] === 'pools-v2.json') {
|
||||
return file['sha'];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`, logger.tags.mining);
|
||||
logger.err(`Cannot find "pools-v2.json" in git tree (${this.treeUrl})`, logger.tags.mining);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Http request wrapper
|
||||
*/
|
||||
private async query(path): Promise<object | undefined> {
|
||||
private async query(path): Promise<any[] | undefined> {
|
||||
type axiosOptions = {
|
||||
headers: {
|
||||
'User-Agent': string
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from 'path';
|
||||
import config from '../config';
|
||||
import logger from '../logger';
|
||||
import { IConversionRates } from '../mempool.interfaces';
|
||||
import PricesRepository from '../repositories/PricesRepository';
|
||||
import PricesRepository, { MAX_PRICES } from '../repositories/PricesRepository';
|
||||
import BitfinexApi from './price-feeds/bitfinex-api';
|
||||
import BitflyerApi from './price-feeds/bitflyer-api';
|
||||
import CoinbaseApi from './price-feeds/coinbase-api';
|
||||
@@ -46,13 +46,13 @@ class PriceUpdater {
|
||||
|
||||
public getEmptyPricesObj(): IConversionRates {
|
||||
return {
|
||||
USD: 0,
|
||||
EUR: 0,
|
||||
GBP: 0,
|
||||
CAD: 0,
|
||||
CHF: 0,
|
||||
AUD: 0,
|
||||
JPY: 0,
|
||||
USD: -1,
|
||||
EUR: -1,
|
||||
GBP: -1,
|
||||
CAD: -1,
|
||||
CHF: -1,
|
||||
AUD: -1,
|
||||
JPY: -1,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -115,7 +115,7 @@ class PriceUpdater {
|
||||
if (feed.currencies.includes(currency)) {
|
||||
try {
|
||||
const price = await feed.$fetchPrice(currency);
|
||||
if (price > 0) {
|
||||
if (price > -1 && price < MAX_PRICES[currency]) {
|
||||
prices.push(price);
|
||||
}
|
||||
logger.debug(`${feed.name} BTC/${currency} price: ${price}`, logger.tags.mining);
|
||||
@@ -239,7 +239,7 @@ class PriceUpdater {
|
||||
|
||||
for (const currency of this.currencies) {
|
||||
const price = historicalEntry[time][currency];
|
||||
if (price > 0) {
|
||||
if (price > -1 && price < MAX_PRICES[currency]) {
|
||||
grouped[time][currency].push(typeof price === 'string' ? parseInt(price, 10) : price);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
import { BlockExtended } from '../mempool.interfaces';
|
||||
|
||||
export function prepareBlock(block: any): BlockExtended {
|
||||
return <BlockExtended>{
|
||||
id: block.id ?? block.hash, // hash for indexed block
|
||||
timestamp: block.timestamp ?? block.time ?? block.blockTimestamp, // blockTimestamp for indexed block
|
||||
height: block.height,
|
||||
version: block.version,
|
||||
bits: (typeof block.bits === 'string' ? parseInt(block.bits, 16): block.bits),
|
||||
nonce: block.nonce,
|
||||
difficulty: block.difficulty,
|
||||
merkle_root: block.merkle_root ?? block.merkleroot,
|
||||
tx_count: block.tx_count ?? block.nTx,
|
||||
size: block.size,
|
||||
weight: block.weight,
|
||||
previousblockhash: block.previousblockhash,
|
||||
extras: {
|
||||
coinbaseRaw: block.coinbase_raw ?? block.extras?.coinbaseRaw,
|
||||
medianFee: block.medianFee ?? block.median_fee ?? block.extras?.medianFee,
|
||||
feeRange: block.feeRange ?? block?.extras?.feeRange ?? block.fee_span,
|
||||
reward: block.reward ?? block?.extras?.reward,
|
||||
totalFees: block.totalFees ?? block?.fees ?? block?.extras?.totalFees,
|
||||
avgFee: block?.extras?.avgFee ?? block.avg_fee,
|
||||
avgFeeRate: block?.avgFeeRate ?? block.avg_fee_rate,
|
||||
pool: block?.extras?.pool ?? (block?.pool_id ? {
|
||||
id: block.pool_id,
|
||||
name: block.pool_name,
|
||||
slug: block.pool_slug,
|
||||
} : undefined),
|
||||
usd: block?.extras?.usd ?? block.usd ?? null,
|
||||
}
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user