Merge branch 'master' into nymkappa/feature/align-dashboards

This commit is contained in:
wiz 2023-02-26 13:07:35 +09:00 committed by GitHub
commit 5981e52534
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 1272 additions and 536 deletions

View File

@ -1,5 +1,5 @@
The Mempool Open Source Project
Copyright (c) 2019-2022 The Mempool Open Source Project Developers
Copyright (c) 2019-2023 The Mempool Open Source Project Developers
This program is free software; you can redistribute it and/or modify it under
the terms of (at your option) either:

View File

@ -3,7 +3,6 @@
"ENABLED": true,
"NETWORK": "__MEMPOOL_NETWORK__",
"BACKEND": "__MEMPOOL_BACKEND__",
"ENABLED": true,
"BLOCKS_SUMMARIES_INDEXING": true,
"HTTP_PORT": 1,
"SPAWN_CLUSTER_PROCS": 2,
@ -28,7 +27,8 @@
"AUDIT": "__MEMPOOL_AUDIT__",
"ADVANCED_GBT_AUDIT": "__MEMPOOL_ADVANCED_GBT_AUDIT__",
"ADVANCED_GBT_MEMPOOL": "__MEMPOOL_ADVANCED_GBT_MEMPOOL__",
"CPFP_INDEXING": "__MEMPOOL_CPFP_INDEXING__"
"CPFP_INDEXING": "__MEMPOOL_CPFP_INDEXING__",
"MAX_BLOCKS_BULK_QUERY": "__MEMPOOL_MAX_BLOCKS_BULK_QUERY__"
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",

View File

@ -36,11 +36,12 @@ describe('Mempool Backend Config', () => {
USER_AGENT: 'mempool',
STDOUT_LOG_MIN_PRIORITY: 'debug',
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
AUDIT: false,
ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false,
CPFP_INDEXING: false,
MAX_BLOCKS_BULK_QUERY: 0,
});
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });

View File

@ -28,6 +28,7 @@ class BitcoinApi implements AbstractBitcoinApi {
size: block.size,
weight: block.weight,
previousblockhash: block.previousblockhash,
medianTime: block.mediantime,
};
}

View File

@ -95,6 +95,8 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from', this.getBlocksByBulk.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from/:to', this.getBlocksByBulk.bind(this))
;
if (config.MEMPOOL.BACKEND !== 'esplora') {
@ -402,6 +404,41 @@ class BitcoinRoutes {
}
}
private async getBlocksByBulk(req: Request, res: Response) {
try {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) { // Liquid, Bisq - Not implemented
return res.status(404).send(`This API is only available for Bitcoin networks`);
}
if (config.MEMPOOL.MAX_BLOCKS_BULK_QUERY <= 0) {
return res.status(404).send(`This API is disabled. Set config.MEMPOOL.MAX_BLOCKS_BULK_QUERY to a positive number to enable it.`);
}
if (!Common.indexingEnabled()) {
return res.status(404).send(`Indexing is required for this API`);
}
const from = parseInt(req.params.from, 10);
if (!req.params.from || from < 0) {
return res.status(400).send(`Parameter 'from' must be a block height (integer)`);
}
const to = req.params.to === undefined ? await bitcoinApi.$getBlockHeightTip() : parseInt(req.params.to, 10);
if (to < 0) {
return res.status(400).send(`Parameter 'to' must be a block height (integer)`);
}
if (from > to) {
return res.status(400).send(`Parameter 'to' must be a higher block height than 'from'`);
}
if ((to - from + 1) > config.MEMPOOL.MAX_BLOCKS_BULK_QUERY) {
return res.status(400).send(`You can only query ${config.MEMPOOL.MAX_BLOCKS_BULK_QUERY} blocks at once.`);
}
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await blocks.$getBlocksBetweenHeight(from, to));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getLegacyBlocks(req: Request, res: Response) {
try {
const returnBlocks: IEsploraApi.Block[] = [];

View File

@ -88,6 +88,7 @@ export namespace IEsploraApi {
size: number;
weight: number;
previousblockhash: string;
medianTime?: number;
}
export interface Address {

View File

@ -25,6 +25,7 @@ import mining from './mining/mining';
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
import PricesRepository from '../repositories/PricesRepository';
import priceUpdater from '../tasks/price-updater';
import chainTips from './chain-tips';
class Blocks {
private blocks: BlockExtended[] = [];
@ -165,33 +166,80 @@ class Blocks {
* @returns BlockExtended
*/
private async $getBlockExtended(block: IEsploraApi.Block, transactions: TransactionExtended[]): Promise<BlockExtended> {
const blockExtended: BlockExtended = Object.assign({ extras: {} }, block);
blockExtended.extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
blockExtended.extras.coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
blockExtended.extras.coinbaseRaw = blockExtended.extras.coinbaseTx.vin[0].scriptsig;
blockExtended.extras.usd = priceUpdater.latestPrices.USD;
const blk: BlockExtended = Object.assign({ extras: {} }, block);
blk.extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
blk.extras.coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
blk.extras.coinbaseRaw = blk.extras.coinbaseTx.vin[0].scriptsig;
blk.extras.usd = priceUpdater.latestPrices.USD;
blk.extras.medianTimestamp = block.medianTime;
blk.extras.orphans = chainTips.getOrphanedBlocksAtHeight(blk.height);
if (block.height === 0) {
blockExtended.extras.medianFee = 0; // 50th percentiles
blockExtended.extras.feeRange = [0, 0, 0, 0, 0, 0, 0];
blockExtended.extras.totalFees = 0;
blockExtended.extras.avgFee = 0;
blockExtended.extras.avgFeeRate = 0;
blk.extras.medianFee = 0; // 50th percentiles
blk.extras.feeRange = [0, 0, 0, 0, 0, 0, 0];
blk.extras.totalFees = 0;
blk.extras.avgFee = 0;
blk.extras.avgFeeRate = 0;
blk.extras.utxoSetChange = 0;
blk.extras.avgTxSize = 0;
blk.extras.totalInputs = 0;
blk.extras.totalOutputs = 1;
blk.extras.totalOutputAmt = 0;
blk.extras.segwitTotalTxs = 0;
blk.extras.segwitTotalSize = 0;
blk.extras.segwitTotalWeight = 0;
} else {
const stats = await bitcoinClient.getBlockStats(block.id, [
'feerate_percentiles', 'minfeerate', 'maxfeerate', 'totalfee', 'avgfee', 'avgfeerate'
]);
blockExtended.extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
blockExtended.extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
blockExtended.extras.totalFees = stats.totalfee;
blockExtended.extras.avgFee = stats.avgfee;
blockExtended.extras.avgFeeRate = stats.avgfeerate;
const stats = await bitcoinClient.getBlockStats(block.id);
blk.extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
blk.extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
blk.extras.totalFees = stats.totalfee;
blk.extras.avgFee = stats.avgfee;
blk.extras.avgFeeRate = stats.avgfeerate;
blk.extras.utxoSetChange = stats.utxo_increase;
blk.extras.avgTxSize = Math.round(stats.total_size / stats.txs * 100) * 0.01;
blk.extras.totalInputs = stats.ins;
blk.extras.totalOutputs = stats.outs;
blk.extras.totalOutputAmt = stats.total_out;
blk.extras.segwitTotalTxs = stats.swtxs;
blk.extras.segwitTotalSize = stats.swtotal_size;
blk.extras.segwitTotalWeight = stats.swtotal_weight;
}
if (Common.blocksSummariesIndexingEnabled()) {
blk.extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(block.id);
if (blk.extras.feePercentiles !== null) {
blk.extras.medianFeeAmt = blk.extras.feePercentiles[3];
}
}
blk.extras.virtualSize = block.weight / 4.0;
if (blk.extras.coinbaseTx.vout.length > 0) {
blk.extras.coinbaseAddress = blk.extras.coinbaseTx.vout[0].scriptpubkey_address ?? null;
blk.extras.coinbaseSignature = blk.extras.coinbaseTx.vout[0].scriptpubkey_asm ?? null;
blk.extras.coinbaseSignatureAscii = transactionUtils.hex2ascii(blk.extras.coinbaseTx.vin[0].scriptsig) ?? null;
} else {
blk.extras.coinbaseAddress = null;
blk.extras.coinbaseSignature = null;
blk.extras.coinbaseSignatureAscii = null;
}
const header = await bitcoinClient.getBlockHeader(block.id, false);
blk.extras.header = header;
const coinStatsIndex = indexer.isCoreIndexReady('coinstatsindex');
if (coinStatsIndex !== null && coinStatsIndex.best_block_height >= block.height) {
const txoutset = await bitcoinClient.getTxoutSetinfo('none', block.height);
blk.extras.utxoSetSize = txoutset.txouts,
blk.extras.totalInputAmt = Math.round(txoutset.block_info.prevout_spent * 100000000);
} else {
blk.extras.utxoSetSize = null;
blk.extras.totalInputAmt = null;
}
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
let pool: PoolTag;
if (blockExtended.extras?.coinbaseTx !== undefined) {
pool = await this.$findBlockMiner(blockExtended.extras?.coinbaseTx);
if (blk.extras?.coinbaseTx !== undefined) {
pool = await this.$findBlockMiner(blk.extras?.coinbaseTx);
} else {
if (config.DATABASE.ENABLED === true) {
pool = await poolsRepository.$getUnknownPool();
@ -201,10 +249,10 @@ class Blocks {
}
if (!pool) { // We should never have this situation in practise
logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. ` +
logger.warn(`Cannot assign pool to block ${blk.height} and 'unknown' pool does not exist. ` +
`Check your "pools" table entries`);
} else {
blockExtended.extras.pool = {
blk.extras.pool = {
id: pool.id,
name: pool.name,
slug: pool.slug,
@ -214,12 +262,12 @@ class Blocks {
if (config.MEMPOOL.AUDIT) {
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
if (auditScore != null) {
blockExtended.extras.matchRate = auditScore.matchRate;
blk.extras.matchRate = auditScore.matchRate;
}
}
}
return blockExtended;
return blk;
}
/**
@ -500,6 +548,7 @@ class Blocks {
} else {
this.currentBlockHeight++;
logger.debug(`New block found (#${this.currentBlockHeight})!`);
await chainTips.updateOrphanedBlocks();
}
const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight);
@ -688,7 +737,6 @@ class Blocks {
}
public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> {
let currentHeight = fromHeight !== undefined ? fromHeight : this.currentBlockHeight;
if (currentHeight > this.currentBlockHeight) {
limit -= currentHeight - this.currentBlockHeight;
@ -728,6 +776,113 @@ class Blocks {
return returnBlocks;
}
/**
* Used for bulk block data query
*
* @param fromHeight
* @param toHeight
*/
public async $getBlocksBetweenHeight(fromHeight: number, toHeight: number): Promise<any> {
if (!Common.indexingEnabled()) {
return [];
}
const blocks: any[] = [];
while (fromHeight <= toHeight) {
let block: any = await blocksRepository.$getBlockByHeight(fromHeight);
if (!block) {
await this.$indexBlock(fromHeight);
block = await blocksRepository.$getBlockByHeight(fromHeight);
if (!block) {
continue;
}
}
// Cleanup fields before sending the response
const cleanBlock: any = {
height: block.height ?? null,
hash: block.id ?? null,
timestamp: block.blockTimestamp ?? null,
median_timestamp: block.medianTime ?? null,
previous_block_hash: block.previousblockhash ?? null,
difficulty: block.difficulty ?? null,
header: block.header ?? null,
version: block.version ?? null,
bits: block.bits ?? null,
nonce: block.nonce ?? null,
size: block.size ?? null,
weight: block.weight ?? null,
tx_count: block.tx_count ?? null,
merkle_root: block.merkle_root ?? null,
reward: block.reward ?? null,
total_fee_amt: block.fees ?? null,
avg_fee_amt: block.avg_fee ?? null,
median_fee_amt: block.median_fee_amt ?? null,
fee_amt_percentiles: block.fee_percentiles ?? null,
avg_fee_rate: block.avg_fee_rate ?? null,
median_fee_rate: block.median_fee ?? null,
fee_rate_percentiles: block.fee_span ?? null,
total_inputs: block.total_inputs ?? null,
total_input_amt: block.total_input_amt ?? null,
total_outputs: block.total_outputs ?? null,
total_output_amt: block.total_output_amt ?? null,
segwit_total_txs: block.segwit_total_txs ?? null,
segwit_total_size: block.segwit_total_size ?? null,
segwit_total_weight: block.segwit_total_weight ?? null,
avg_tx_size: block.avg_tx_size ?? null,
utxoset_change: block.utxoset_change ?? null,
utxoset_size: block.utxoset_size ?? null,
coinbase_raw: block.coinbase_raw ?? null,
coinbase_address: block.coinbase_address ?? null,
coinbase_signature: block.coinbase_signature ?? null,
coinbase_signature_ascii: block.coinbase_signature_ascii ?? null,
pool_slug: block.pool_slug ?? null,
};
if (Common.blocksSummariesIndexingEnabled() && cleanBlock.fee_amt_percentiles === null) {
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
if (cleanBlock.fee_amt_percentiles === null) {
const block = await bitcoinClient.getBlock(cleanBlock.hash, 2);
const summary = this.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
}
if (cleanBlock.fee_amt_percentiles !== null) {
cleanBlock.median_fee_amt = cleanBlock.fee_amt_percentiles[3];
}
}
cleanBlock.fee_amt_percentiles = {
'min': cleanBlock.fee_amt_percentiles[0],
'perc_10': cleanBlock.fee_amt_percentiles[1],
'perc_25': cleanBlock.fee_amt_percentiles[2],
'perc_50': cleanBlock.fee_amt_percentiles[3],
'perc_75': cleanBlock.fee_amt_percentiles[4],
'perc_90': cleanBlock.fee_amt_percentiles[5],
'max': cleanBlock.fee_amt_percentiles[6],
};
cleanBlock.fee_rate_percentiles = {
'min': cleanBlock.fee_rate_percentiles[0],
'perc_10': cleanBlock.fee_rate_percentiles[1],
'perc_25': cleanBlock.fee_rate_percentiles[2],
'perc_50': cleanBlock.fee_rate_percentiles[3],
'perc_75': cleanBlock.fee_rate_percentiles[4],
'perc_90': cleanBlock.fee_rate_percentiles[5],
'max': cleanBlock.fee_rate_percentiles[6],
};
// Re-org can happen after indexing so we need to always get the
// latest state from core
cleanBlock.orphans = chainTips.getOrphanedBlocksAtHeight(cleanBlock.height);
blocks.push(cleanBlock);
fromHeight++;
}
return blocks;
}
public async $getBlockAuditSummary(hash: string): Promise<any> {
let summary;
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {

View File

@ -0,0 +1,57 @@
import logger from "../logger";
import bitcoinClient from "./bitcoin/bitcoin-client";
export interface ChainTip {
height: number;
hash: string;
branchlen: number;
status: 'invalid' | 'active' | 'valid-fork' | 'valid-headers' | 'headers-only';
};
export interface OrphanedBlock {
height: number;
hash: string;
status: 'valid-fork' | 'valid-headers' | 'headers-only';
}
class ChainTips {
private chainTips: ChainTip[] = [];
private orphanedBlocks: OrphanedBlock[] = [];
public async updateOrphanedBlocks(): Promise<void> {
try {
this.chainTips = await bitcoinClient.getChainTips();
this.orphanedBlocks = [];
for (const chain of this.chainTips) {
if (chain.status === 'valid-fork' || chain.status === 'valid-headers') {
let block = await bitcoinClient.getBlock(chain.hash);
while (block && block.confirmations === -1) {
this.orphanedBlocks.push({
height: block.height,
hash: block.hash,
status: chain.status
});
block = await bitcoinClient.getBlock(block.previousblockhash);
}
}
}
logger.debug(`Updated orphaned blocks cache. Found ${this.orphanedBlocks.length} orphaned blocks`);
} catch (e) {
logger.err(`Cannot get fetch orphaned blocks. Reason: ${e instanceof Error ? e.message : e}`);
}
}
public getOrphanedBlocksAtHeight(height: number): OrphanedBlock[] {
const orphans: OrphanedBlock[] = [];
for (const block of this.orphanedBlocks) {
if (block.height === height) {
orphans.push(block);
}
}
return orphans;
}
}
export default new ChainTips();

View File

@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
import { RowDataPacket } from 'mysql2';
class DatabaseMigration {
private static currentVersion = 54;
private static currentVersion = 56;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@ -62,8 +62,8 @@ class DatabaseMigration {
if (databaseSchemaVersion <= 2) {
// Disable some spam logs when they're not relevant
this.uniqueLogs.push(this.blocksTruncatedMessage);
this.uniqueLogs.push(this.hashratesTruncatedMessage);
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
}
logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
@ -483,6 +483,23 @@ class DatabaseMigration {
}
await this.updateToSchemaVersion(54);
}
if (databaseSchemaVersion < 55) {
await this.$executeQuery(this.getAdditionalBlocksDataQuery());
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
await this.updateToSchemaVersion(55);
}
if (databaseSchemaVersion < 56) {
await this.$executeQuery('ALTER TABLE pools ADD unique_id int NOT NULL DEFAULT -1');
await this.$executeQuery('TRUNCATE TABLE `blocks`');
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('DELETE FROM `pools`');
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
this.uniqueLog(logger.notice, '`pools` table has been truncated`');
await this.updateToSchemaVersion(56);
}
}
/**
@ -756,6 +773,28 @@ class DatabaseMigration {
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getAdditionalBlocksDataQuery(): string {
return `ALTER TABLE blocks
ADD median_timestamp timestamp NOT NULL,
ADD coinbase_address varchar(100) NULL,
ADD coinbase_signature varchar(500) NULL,
ADD coinbase_signature_ascii varchar(500) NULL,
ADD avg_tx_size double unsigned NOT NULL,
ADD total_inputs int unsigned NOT NULL,
ADD total_outputs int unsigned NOT NULL,
ADD total_output_amt bigint unsigned NOT NULL,
ADD fee_percentiles longtext NULL,
ADD median_fee_amt int unsigned NULL,
ADD segwit_total_txs int unsigned NOT NULL,
ADD segwit_total_size int unsigned NOT NULL,
ADD segwit_total_weight int unsigned NOT NULL,
ADD header varchar(160) NOT NULL,
ADD utxoset_change int NOT NULL,
ADD utxoset_size int unsigned NULL,
ADD total_input_amt bigint unsigned NULL
`;
}
private getCreateDailyStatsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS hashrates (
hashrate_timestamp timestamp NOT NULL,

View File

@ -9,7 +9,7 @@ import { TransactionExtended } from '../mempool.interfaces';
import { Common } from './common';
class DiskCache {
private cacheSchemaVersion = 1;
private cacheSchemaVersion = 2;
private static FILE_NAME = config.MEMPOOL.CACHE_DIR + '/cache.json';
private static FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/cache{number}.json';

View File

@ -172,7 +172,7 @@ class Mining {
}
/**
* [INDEXING] Generate weekly mining pool hashrate history
* Generate weekly mining pool hashrate history
*/
public async $generatePoolHashrateHistory(): Promise<void> {
const now = new Date();
@ -279,7 +279,7 @@ class Mining {
}
/**
* [INDEXING] Generate daily hashrate data
* Generate daily hashrate data
*/
public async $generateNetworkHashrateHistory(): Promise<void> {
// We only run this once a day around midnight
@ -459,7 +459,7 @@ class Mining {
/**
* Create a link between blocks and the latest price at when they were mined
*/
public async $indexBlockPrices() {
public async $indexBlockPrices(): Promise<void> {
if (this.blocksPriceIndexingRunning === true) {
return;
}
@ -520,6 +520,41 @@ class Mining {
this.blocksPriceIndexingRunning = false;
}
/**
* Index core coinstatsindex
*/
public async $indexCoinStatsIndex(): Promise<void> {
let timer = new Date().getTime() / 1000;
let totalIndexed = 0;
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
let currentBlockHeight = blockchainInfo.blocks;
while (currentBlockHeight > 0) {
const indexedBlocks = await BlocksRepository.$getBlocksMissingCoinStatsIndex(
currentBlockHeight, currentBlockHeight - 10000);
for (const block of indexedBlocks) {
const txoutset = await bitcoinClient.getTxoutSetinfo('none', block.height);
await BlocksRepository.$updateCoinStatsIndexData(block.hash, txoutset.txouts,
Math.round(txoutset.block_info.prevout_spent * 100000000));
++totalIndexed;
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
if (elapsedSeconds > 5) {
logger.info(`Indexing coinstatsindex data for block #${block.height}. Indexed ${totalIndexed} blocks.`, logger.tags.mining);
timer = new Date().getTime() / 1000;
}
}
currentBlockHeight -= 10000;
}
if (totalIndexed) {
logger.info(`Indexing missing coinstatsindex data completed`, logger.tags.mining);
}
}
private getDateMidnight(date: Date): Date {
date.setUTCHours(0);
date.setUTCMinutes(0);

View File

@ -1,15 +1,8 @@
import DB from '../database';
import logger from '../logger';
import config from '../config';
import BlocksRepository from '../repositories/BlocksRepository';
interface Pool {
name: string;
link: string;
regexes: string[];
addresses: string[];
slug: string;
}
import PoolsRepository from '../repositories/PoolsRepository';
import { PoolTag } from '../mempool.interfaces';
class PoolsParser {
miningPools: any[] = [];
@ -20,270 +13,142 @@ class PoolsParser {
'addresses': '[]',
'slug': 'unknown'
};
slugWarnFlag = false;
private uniqueLogs: string[] = [];
private uniqueLog(loggerFunction: any, msg: string): void {
if (this.uniqueLogs.includes(msg)) {
return;
}
this.uniqueLogs.push(msg);
loggerFunction(msg);
}
public setMiningPools(pools): void {
for (const pool of pools) {
pool.regexes = pool.tags;
delete(pool.tags);
}
this.miningPools = pools;
}
/**
* Parse the pools.json file, consolidate the data and dump it into the database
* Populate our db with updated mining pool definition
* @param pools
*/
public async migratePoolsJson(poolsJson: object): Promise<void> {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
public async migratePoolsJson(): Promise<void> {
await this.$insertUnknownPool();
// First we save every entries without paying attention to pool duplication
const poolsDuplicated: Pool[] = [];
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
for (let i = 0; i < coinbaseTags.length; ++i) {
poolsDuplicated.push({
'name': (<Pool>coinbaseTags[i][1]).name,
'link': (<Pool>coinbaseTags[i][1]).link,
'regexes': [coinbaseTags[i][0]],
'addresses': [],
'slug': ''
});
}
const addressesTags = Object.entries(poolsJson['payout_addresses']);
for (let i = 0; i < addressesTags.length; ++i) {
poolsDuplicated.push({
'name': (<Pool>addressesTags[i][1]).name,
'link': (<Pool>addressesTags[i][1]).link,
'regexes': [],
'addresses': [addressesTags[i][0]],
'slug': ''
});
}
// Then, we find unique mining pool names
const poolNames: string[] = [];
for (let i = 0; i < poolsDuplicated.length; ++i) {
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
poolNames.push(poolsDuplicated[i].name);
for (const pool of this.miningPools) {
if (!pool.id) {
logger.info(`Mining pool ${pool.name} has no unique 'id' defined. Skipping.`);
continue;
}
}
logger.debug(`Found ${poolNames.length} unique mining pools`, logger.tags.mining);
// Get existing pools from the db
let existingPools;
try {
if (config.DATABASE.ENABLED === true) {
[existingPools] = await DB.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
const poolDB = await PoolsRepository.$getPoolByUniqueId(pool.id, false);
if (!poolDB) {
// New mining pool
const slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.debug(`Inserting new mining pool ${pool.name}`);
await PoolsRepository.$insertNewMiningPool(pool, slug);
await this.$deleteUnknownBlocks();
} else {
existingPools = [];
}
} catch (e) {
logger.err('Cannot get existing pools from the database, skipping pools.json import', logger.tags.mining);
return;
}
this.miningPools = [];
// Finally, we generate the final consolidated pools data
const finalPoolDataAdd: Pool[] = [];
const finalPoolDataUpdate: Pool[] = [];
const finalPoolDataRename: Pool[] = [];
for (let i = 0; i < poolNames.length; ++i) {
let allAddresses: string[] = [];
let allRegexes: string[] = [];
const match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
for (let y = 0; y < match.length; ++y) {
allAddresses = allAddresses.concat(match[y].addresses);
allRegexes = allRegexes.concat(match[y].regexes);
}
const finalPoolName = poolNames[i].replace(`'`, `''`); // To support single quote in names when doing db queries
let slug: string | undefined;
try {
slug = poolsJson['slugs'][poolNames[i]];
} catch (e) {
if (this.slugWarnFlag === false) {
logger.warn(`pools.json does not seem to contain the 'slugs' object`, logger.tags.mining);
this.slugWarnFlag = true;
if (poolDB.name !== pool.name) {
// Pool has been renamed
const newSlug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.warn(`Renaming ${poolDB.name} mining pool to ${pool.name}. Slug has been updated. Maybe you want to make a redirection from 'https://mempool.space/mining/pool/${poolDB.slug}' to 'https://mempool.space/mining/pool/${newSlug}`);
await PoolsRepository.$renameMiningPool(poolDB.id, newSlug, pool.name);
}
}
if (slug === undefined) {
// Only keep alphanumerical
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`, logger.tags.mining);
}
const poolObj = {
'name': finalPoolName,
'link': match[0].link,
'regexes': allRegexes,
'addresses': allAddresses,
'slug': slug
};
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
if (existingPool !== undefined) {
// Check if any data was actually updated
const equals = (a, b) =>
a.length === b.length &&
a.every((v, i) => v === b[i]);
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
finalPoolDataUpdate.push(poolObj);
if (poolDB.link !== pool.link) {
// Pool link has changed
logger.debug(`Updating link for ${pool.name} mining pool`);
await PoolsRepository.$updateMiningPoolLink(poolDB.id, pool.link);
}
} else if (config.DATABASE.ENABLED) {
// Double check that if we're not just renaming a pool (same address same regex)
const [poolToRename]: any[] = await DB.query(`
SELECT * FROM pools
WHERE addresses = ? OR regexes = ?`,
[JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)]
);
if (poolToRename && poolToRename.length > 0) {
// We're actually renaming an existing pool
finalPoolDataRename.push({
'name': poolObj.name,
'link': poolObj.link,
'regexes': allRegexes,
'addresses': allAddresses,
'slug': slug
});
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`, logger.tags.mining);
} else {
logger.debug(`Add '${finalPoolName}' mining pool`, logger.tags.mining);
finalPoolDataAdd.push(poolObj);
if (JSON.stringify(pool.addresses) !== poolDB.addresses ||
JSON.stringify(pool.regexes) !== poolDB.regexes) {
// Pool addresses changed or coinbase tags changed
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool. If 'AUTOMATIC_BLOCK_REINDEXING' is enabled, we will re-index its blocks and 'unknown' blocks`);
await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes);
await this.$deleteBlocksForPool(poolDB);
}
}
this.miningPools.push({
'name': finalPoolName,
'link': match[0].link,
'regexes': JSON.stringify(allRegexes),
'addresses': JSON.stringify(allAddresses),
'slug': slug
});
}
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools.json import completed (no database)', logger.tags.mining);
return;
}
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
finalPoolDataRename.length > 0
) {
logger.debug(`Update pools table now`, logger.tags.mining);
// Add new mining pools into the database
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
}
queryAdd = queryAdd.slice(0, -1) + ';';
// Updated existing mining pools in the database
const updateQueries: string[] = [];
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
updateQueries.push(`
UPDATE pools
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
slug='${finalPoolDataUpdate[i].slug}'
WHERE name='${finalPoolDataUpdate[i].name}'
;`);
}
// Rename mining pools
const renameQueries: string[] = [];
for (let i = 0; i < finalPoolDataRename.length; ++i) {
renameQueries.push(`
UPDATE pools
SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}',
slug='${finalPoolDataRename[i].slug}'
WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}'
AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}'
;`);
}
try {
if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) {
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
}
if (finalPoolDataAdd.length > 0) {
await DB.query({ sql: queryAdd, timeout: 120000 });
}
for (const query of updateQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
for (const query of renameQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
await this.insertUnknownPool();
logger.info('Mining pools.json import completed', logger.tags.mining);
} catch (e) {
logger.err(`Cannot import pools in the database`, logger.tags.mining);
throw e;
}
}
try {
await this.insertUnknownPool();
} catch (e) {
logger.err(`Cannot insert unknown pool in the database`, logger.tags.mining);
throw e;
}
logger.info('Mining pools.json import completed');
}
/**
* Manually add the 'unknown pool'
*/
private async insertUnknownPool() {
public async $insertUnknownPool(): Promise<void> {
if (!config.DATABASE.ENABLED) {
return;
}
try {
const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
if (rows.length === 0) {
await DB.query({
sql: `INSERT INTO pools(name, link, regexes, addresses, slug)
VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]", "unknown");
sql: `INSERT INTO pools(name, link, regexes, addresses, slug, unique_id)
VALUES("${this.unknownPool.name}", "${this.unknownPool.link}", "[]", "[]", "${this.unknownPool.slug}", 0);
`});
} else {
await DB.query(`UPDATE pools
SET name='Unknown', link='https://learnmeabitcoin.com/technical/coinbase-transaction',
SET name='${this.unknownPool.name}', link='${this.unknownPool.link}',
regexes='[]', addresses='[]',
slug='unknown'
WHERE name='Unknown'
slug='${this.unknownPool.slug}',
unique_id=0
WHERE slug='${this.unknownPool.slug}'
`);
}
} catch (e) {
logger.err('Unable to insert "Unknown" mining pool', logger.tags.mining);
logger.err(`Unable to insert or update "Unknown" mining pool. Reason: ${e instanceof Error ? e.message : e}`);
}
}
/**
* Delete blocks which needs to be reindexed
* Delete indexed blocks for an updated mining pool
*
* @param pool
*/
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
return;
}
const blockCount = await BlocksRepository.$blockCount(null, null);
if (blockCount === 0) {
return;
}
for (const updatedPool of finalPoolDataUpdate) {
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
if (pool.length > 0) {
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`, logger.tags.mining);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
}
}
// Ignore early days of Bitcoin as there were not mining pool yet
logger.notice(`Deleting blocks with unknown mining pool from height 130635 for future re-indexing`, logger.tags.mining);
// Get oldest blocks mined by the pool and assume pools.json updates only concern most recent years
// Ignore early days of Bitcoin as there were no mining pool yet
const [oldestPoolBlock]: any[] = await DB.query(`
SELECT height
FROM blocks
WHERE pool_id = ?
ORDER BY height
LIMIT 1`,
[pool.id]
);
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : 130635;
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${oldestBlockHeight} for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= ${oldestBlockHeight}`,
[unknownPool[0].id]
);
logger.notice(`Deleting blocks from ${pool.name} mining pool for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ?`,
[pool.id]
);
}
logger.notice(`Truncating hashrates for future re-indexing`, logger.tags.mining);
await DB.query(`DELETE FROM hashrates`);
private async $deleteUnknownBlocks(): Promise<void> {
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height 130635 for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= 130635`,
[unknownPool[0].id]
);
}
}

View File

@ -14,6 +14,7 @@ class TransactionUtils {
vout: tx.vout
.map((vout) => ({
scriptpubkey_address: vout.scriptpubkey_address,
scriptpubkey_asm: vout.scriptpubkey_asm,
value: vout.value
}))
.filter((vout) => vout.value)

View File

@ -32,6 +32,7 @@ interface IConfig {
ADVANCED_GBT_AUDIT: boolean;
ADVANCED_GBT_MEMPOOL: boolean;
CPFP_INDEXING: boolean;
MAX_BLOCKS_BULK_QUERY: number;
};
ESPLORA: {
REST_API_URL: string;
@ -147,12 +148,13 @@ const defaults: IConfig = {
'USER_AGENT': 'mempool',
'STDOUT_LOG_MIN_PRIORITY': 'debug',
'AUTOMATIC_BLOCK_REINDEXING': false,
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
'AUDIT': false,
'ADVANCED_GBT_AUDIT': false,
'ADVANCED_GBT_MEMPOOL': false,
'CPFP_INDEXING': false,
'MAX_BLOCKS_BULK_QUERY': 0,
},
'ESPLORA': {
'REST_API_URL': 'http://127.0.0.1:3000',

View File

@ -24,7 +24,8 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
private checkDBFlag() {
if (config.DATABASE.ENABLED === false) {
logger.err('Trying to use DB feature but config.DATABASE.ENABLED is set to false, please open an issue');
const stack = new Error().stack;
logger.err(`Trying to use DB feature but config.DATABASE.ENABLED is set to false, please open an issue.\nStack trace: ${stack}}`);
}
}

View File

@ -36,6 +36,8 @@ import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
import fundingTxFetcher from './tasks/lightning/sync-tasks/funding-tx-fetcher';
import forensicsService from './tasks/lightning/forensics.service';
import priceUpdater from './tasks/price-updater';
import mining from './api/mining/mining';
import chainTips from './api/chain-tips';
import { AxiosError } from 'axios';
class Server {
@ -133,6 +135,7 @@ class Server {
}
priceUpdater.$run();
await chainTips.updateOrphanedBlocks();
this.setUpHttpApiRoutes();

View File

@ -8,18 +8,67 @@ import bitcoinClient from './api/bitcoin/bitcoin-client';
import priceUpdater from './tasks/price-updater';
import PricesRepository from './repositories/PricesRepository';
export interface CoreIndex {
name: string;
synced: boolean;
best_block_height: number;
}
class Indexer {
runIndexer = true;
indexerRunning = false;
tasksRunning: string[] = [];
coreIndexes: CoreIndex[] = [];
public reindex() {
/**
* Check which core index is available for indexing
*/
public async checkAvailableCoreIndexes(): Promise<void> {
const updatedCoreIndexes: CoreIndex[] = [];
const indexes: any = await bitcoinClient.getIndexInfo();
for (const indexName in indexes) {
const newState = {
name: indexName,
synced: indexes[indexName].synced,
best_block_height: indexes[indexName].best_block_height,
};
logger.info(`Core index '${indexName}' is ${indexes[indexName].synced ? 'synced' : 'not synced'}. Best block height is ${indexes[indexName].best_block_height}`);
updatedCoreIndexes.push(newState);
if (indexName === 'coinstatsindex' && newState.synced === true) {
const previousState = this.isCoreIndexReady('coinstatsindex');
// if (!previousState || previousState.synced === false) {
this.runSingleTask('coinStatsIndex');
// }
}
}
this.coreIndexes = updatedCoreIndexes;
}
/**
* Return the best block height if a core index is available, or 0 if not
*
* @param name
* @returns
*/
public isCoreIndexReady(name: string): CoreIndex | null {
for (const index of this.coreIndexes) {
if (index.name === name && index.synced === true) {
return index;
}
}
return null;
}
public reindex(): void {
if (Common.indexingEnabled()) {
this.runIndexer = true;
}
}
public async runSingleTask(task: 'blocksPrices') {
public async runSingleTask(task: 'blocksPrices' | 'coinStatsIndex'): Promise<void> {
if (!Common.indexingEnabled()) {
return;
}
@ -28,20 +77,27 @@ class Indexer {
this.tasksRunning.push(task);
const lastestPriceId = await PricesRepository.$getLatestPriceId();
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`)
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`);
setTimeout(() => {
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
this.runSingleTask('blocksPrices');
}, 10000);
} else {
logger.debug(`Blocks prices indexer will run now`)
logger.debug(`Blocks prices indexer will run now`);
await mining.$indexBlockPrices();
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
}
}
if (task === 'coinStatsIndex' && !this.tasksRunning.includes(task)) {
this.tasksRunning.push(task);
logger.debug(`Indexing coinStatsIndex now`);
await mining.$indexCoinStatsIndex();
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
}
}
public async $run() {
public async $run(): Promise<void> {
if (!Common.indexingEnabled() || this.runIndexer === false ||
this.indexerRunning === true || mempool.hasPriority()
) {
@ -57,7 +113,9 @@ class Indexer {
this.runIndexer = false;
this.indexerRunning = true;
logger.debug(`Running mining indexer`);
logger.info(`Running mining indexer`);
await this.checkAvailableCoreIndexes();
try {
await priceUpdater.$run();
@ -93,7 +151,7 @@ class Indexer {
setTimeout(() => this.reindex(), runEvery);
}
async $resetHashratesIndexingState() {
async $resetHashratesIndexingState(): Promise<void> {
try {
await HashratesRepository.$setLatestRun('last_hashrates_indexing', 0);
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', 0);

View File

@ -1,4 +1,5 @@
import { IEsploraApi } from './api/bitcoin/esplora-api.interface';
import { OrphanedBlock } from './api/chain-tips';
import { HeapNode } from "./utils/pairing-heap";
export interface PoolTag {
@ -64,6 +65,7 @@ interface VinStrippedToScriptsig {
interface VoutStrippedToScriptPubkey {
scriptpubkey_address: string | undefined;
scriptpubkey_asm: string | undefined;
value: number;
}
@ -160,6 +162,27 @@ export interface BlockExtension {
avgFeeRate?: number;
coinbaseRaw?: string;
usd?: number | null;
medianTimestamp?: number;
blockTime?: number;
orphans?: OrphanedBlock[] | null;
coinbaseAddress?: string | null;
coinbaseSignature?: string | null;
coinbaseSignatureAscii?: string | null;
virtualSize?: number;
avgTxSize?: number;
totalInputs?: number;
totalOutputs?: number;
totalOutputAmt?: number;
medianFeeAmt?: number | null;
feePercentiles?: number[] | null,
segwitTotalTxs?: number;
segwitTotalSize?: number;
segwitTotalWeight?: number;
header?: string;
utxoSetChange?: number;
// Requires coinstatsindex, will be set to NULL otherwise
utxoSetSize?: number | null;
totalInputAmt?: number | null;
}
export interface BlockExtended extends IEsploraApi.Block {

View File

@ -18,17 +18,27 @@ class BlocksRepository {
public async $saveBlockInDatabase(block: BlockExtended) {
try {
const query = `INSERT INTO blocks(
height, hash, blockTimestamp, size,
weight, tx_count, coinbase_raw, difficulty,
pool_id, fees, fee_span, median_fee,
reward, version, bits, nonce,
merkle_root, previous_block_hash, avg_fee, avg_fee_rate
height, hash, blockTimestamp, size,
weight, tx_count, coinbase_raw, difficulty,
pool_id, fees, fee_span, median_fee,
reward, version, bits, nonce,
merkle_root, previous_block_hash, avg_fee, avg_fee_rate,
median_timestamp, header, coinbase_address,
coinbase_signature, utxoset_size, utxoset_change, avg_tx_size,
total_inputs, total_outputs, total_input_amt, total_output_amt,
fee_percentiles, segwit_total_txs, segwit_total_size, segwit_total_weight,
median_fee_amt, coinbase_signature_ascii
) VALUE (
?, ?, FROM_UNIXTIME(?), ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?, ?, ?
?, ?, ?, ?,
FROM_UNIXTIME(?), ?, ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?
)`;
const params: any[] = [
@ -52,6 +62,23 @@ class BlocksRepository {
block.previousblockhash,
block.extras.avgFee,
block.extras.avgFeeRate,
block.extras.medianTimestamp,
block.extras.header,
block.extras.coinbaseAddress,
block.extras.coinbaseSignature,
block.extras.utxoSetSize,
block.extras.utxoSetChange,
block.extras.avgTxSize,
block.extras.totalInputs,
block.extras.totalOutputs,
block.extras.totalInputAmt,
block.extras.totalOutputAmt,
block.extras.feePercentiles ? JSON.stringify(block.extras.feePercentiles) : null,
block.extras.segwitTotalTxs,
block.extras.segwitTotalSize,
block.extras.segwitTotalWeight,
block.extras.medianFeeAmt,
block.extras.coinbaseSignatureAscii,
];
await DB.query(query, params);
@ -65,6 +92,33 @@ class BlocksRepository {
}
}
/**
* Save newly indexed data from core coinstatsindex
*
* @param utxoSetSize
* @param totalInputAmt
*/
public async $updateCoinStatsIndexData(blockHash: string, utxoSetSize: number,
totalInputAmt: number
) : Promise<void> {
try {
const query = `
UPDATE blocks
SET utxoset_size = ?, total_input_amt = ?
WHERE hash = ?
`;
const params: any[] = [
utxoSetSize,
totalInputAmt,
blockHash
];
await DB.query(query, params);
} catch (e: any) {
logger.err('Cannot update indexed block coinstatsindex. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get all block height that have not been indexed between [startHeight, endHeight]
*/
@ -310,32 +364,17 @@ class BlocksRepository {
public async $getBlockByHeight(height: number): Promise<object | null> {
try {
const [rows]: any[] = await DB.query(`SELECT
blocks.height,
hash,
blocks.*,
hash as id,
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
size,
weight,
tx_count,
coinbase_raw,
difficulty,
UNIX_TIMESTAMP(blocks.median_timestamp) as medianTime,
pools.id as pool_id,
pools.name as pool_name,
pools.link as pool_link,
pools.slug as pool_slug,
pools.addresses as pool_addresses,
pools.regexes as pool_regexes,
fees,
fee_span,
median_fee,
reward,
version,
bits,
nonce,
merkle_root,
previous_block_hash as previousblockhash,
avg_fee,
avg_fee_rate
previous_block_hash as previousblockhash
FROM blocks
JOIN pools ON blocks.pool_id = pools.id
WHERE blocks.height = ${height}
@ -346,6 +385,7 @@ class BlocksRepository {
}
rows[0].fee_span = JSON.parse(rows[0].fee_span);
rows[0].fee_percentiles = JSON.parse(rows[0].fee_percentiles);
return rows[0];
} catch (e) {
logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e));
@ -694,7 +734,6 @@ class BlocksRepository {
logger.err('Cannot fetch CPFP unindexed blocks. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
return [];
}
/**
@ -741,7 +780,7 @@ class BlocksRepository {
try {
let query = `INSERT INTO blocks_prices(height, price_id) VALUES`;
for (const price of blockPrices) {
query += ` (${price.height}, ${price.priceId}),`
query += ` (${price.height}, ${price.priceId}),`;
}
query = query.slice(0, -1);
await DB.query(query);
@ -754,6 +793,43 @@ class BlocksRepository {
}
}
}
/**
* Get all indexed blocsk with missing coinstatsindex data
*/
public async $getBlocksMissingCoinStatsIndex(maxHeight: number, minHeight: number): Promise<any> {
try {
const [blocks] = await DB.query(`
SELECT height, hash
FROM blocks
WHERE height >= ${minHeight} AND height <= ${maxHeight} AND
(utxoset_size IS NULL OR total_input_amt IS NULL)
`);
return blocks;
} catch (e) {
logger.err(`Cannot get blocks with missing coinstatsindex. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Save indexed median fee to avoid recomputing it later
*
* @param id
* @param feePercentiles
*/
public async $saveFeePercentilesForBlockId(id: string, feePercentiles: number[]): Promise<void> {
try {
await DB.query(`
UPDATE blocks SET fee_percentiles = ?, median_fee_amt = ?
WHERE hash = ?`,
[JSON.stringify(feePercentiles), feePercentiles[3], id]
);
} catch (e) {
logger.err(`Cannot update block fee_percentiles. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new BlocksRepository();

View File

@ -80,6 +80,48 @@ class BlocksSummariesRepository {
logger.err('Cannot delete indexed blocks summaries. Reason: ' + (e instanceof Error ? e.message : e));
}
}
/**
* Get the fee percentiles if the block has already been indexed, [] otherwise
*
* @param id
*/
public async $getFeePercentilesByBlockId(id: string): Promise<number[] | null> {
try {
const [rows]: any[] = await DB.query(`
SELECT transactions
FROM blocks_summaries
WHERE id = ?`,
[id]
);
if (rows === null || rows.length === 0) {
return null;
}
const transactions = JSON.parse(rows[0].transactions);
if (transactions === null) {
return null;
}
transactions.shift(); // Ignore coinbase
transactions.sort((a: any, b: any) => a.fee - b.fee);
const fees = transactions.map((t: any) => t.fee);
return [
fees[0] ?? 0, // min
fees[Math.max(0, Math.floor(fees.length * 0.1) - 1)] ?? 0, // 10th
fees[Math.max(0, Math.floor(fees.length * 0.25) - 1)] ?? 0, // 25th
fees[Math.max(0, Math.floor(fees.length * 0.5) - 1)] ?? 0, // median
fees[Math.max(0, Math.floor(fees.length * 0.75) - 1)] ?? 0, // 75th
fees[Math.max(0, Math.floor(fees.length * 0.9) - 1)] ?? 0, // 90th
fees[fees.length - 1] ?? 0, // max
];
} catch (e) {
logger.err(`Cannot get block summaries transactions. Reason: ` + (e instanceof Error ? e.message : e));
return null;
}
}
}
export default new BlocksSummariesRepository();

View File

@ -1,4 +1,5 @@
import { Common } from '../api/common';
import poolsParser from '../api/pools-parser';
import config from '../config';
import DB from '../database';
import logger from '../logger';
@ -17,7 +18,11 @@ class PoolsRepository {
* Get unknown pool tagging info
*/
public async $getUnknownPool(): Promise<PoolTag> {
const [rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
let [rows]: any[] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
if (rows && rows.length === 0 && config.DATABASE.ENABLED) {
await poolsParser.$insertUnknownPool();
[rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
}
return <PoolTag>rows[0];
}
@ -59,7 +64,7 @@ class PoolsRepository {
/**
* Get basic pool info and block count between two timestamp
*/
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
const query = `SELECT COUNT(height) as blockCount, pools.id as poolId, pools.name as poolName
FROM pools
LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)
@ -75,9 +80,9 @@ class PoolsRepository {
}
/**
* Get mining pool statistics for one pool
* Get a mining pool info
*/
public async $getPool(slug: string): Promise<PoolTag | null> {
public async $getPool(slug: string, parse: boolean = true): Promise<PoolTag | null> {
const query = `
SELECT *
FROM pools
@ -90,10 +95,12 @@ class PoolsRepository {
return null;
}
rows[0].regexes = JSON.parse(rows[0].regexes);
if (parse) {
rows[0].regexes = JSON.parse(rows[0].regexes);
}
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
rows[0].addresses = []; // pools.json only contains mainnet addresses
} else {
} else if (parse) {
rows[0].addresses = JSON.parse(rows[0].addresses);
}
@ -103,6 +110,116 @@ class PoolsRepository {
throw e;
}
}
/**
* Get a mining pool info by its unique id
*/
public async $getPoolByUniqueId(id: number, parse: boolean = true): Promise<PoolTag | null> {
const query = `
SELECT *
FROM pools
WHERE pools.unique_id = ?`;
try {
const [rows]: any[] = await DB.query(query, [id]);
if (rows.length < 1) {
return null;
}
if (parse) {
rows[0].regexes = JSON.parse(rows[0].regexes);
}
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
rows[0].addresses = []; // pools.json only contains mainnet addresses
} else if (parse) {
rows[0].addresses = JSON.parse(rows[0].addresses);
}
return rows[0];
} catch (e) {
logger.err('Cannot get pool from db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Insert a new mining pool in the database
*
* @param pool
*/
public async $insertNewMiningPool(pool: any, slug: string): Promise<void> {
try {
await DB.query(`
INSERT INTO pools
SET name = ?, link = ?, addresses = ?, regexes = ?, slug = ?, unique_id = ?`,
[pool.name, pool.link, JSON.stringify(pool.addresses), JSON.stringify(pool.regexes), slug, pool.id]
);
} catch (e: any) {
logger.err(`Cannot insert new mining pool into db. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Rename an existing mining pool
*
* @param dbId
* @param newSlug
* @param newName
*/
public async $renameMiningPool(dbId: number, newSlug: string, newName: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET slug = ?, name = ?
WHERE id = ?`,
[newSlug, newName, dbId]
);
} catch (e: any) {
logger.err(`Cannot rename mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Update an exisiting mining pool link
*
* @param dbId
* @param newLink
*/
public async $updateMiningPoolLink(dbId: number, newLink: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET link = ?
WHERE id = ?`,
[newLink, dbId]
);
} catch (e: any) {
logger.err(`Cannot update link for mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Update an existing mining pool addresses or coinbase tags
*
* @param dbId
* @param addresses
* @param regexes
*/
public async $updateMiningPoolTags(dbId: number, addresses: string, regexes: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET addresses = ?, regexes = ?
WHERE id = ?`,
[JSON.stringify(addresses), JSON.stringify(regexes), dbId]
);
} catch (e: any) {
logger.err(`Cannot update mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
}
export default new PoolsRepository();

View File

@ -88,5 +88,7 @@ module.exports = {
verifyTxOutProof: 'verifytxoutproof', // bitcoind v0.11.0+
walletLock: 'walletlock',
walletPassphrase: 'walletpassphrase',
walletPassphraseChange: 'walletpassphrasechange'
}
walletPassphraseChange: 'walletpassphrasechange',
getTxoutSetinfo: 'gettxoutsetinfo',
getIndexInfo: 'getindexinfo',
};

View File

@ -8,7 +8,7 @@ import { SocksProxyAgent } from 'socks-proxy-agent';
import * as https from 'https';
/**
* Maintain the most recent version of pools.json
* Maintain the most recent version of pools-v2.json
*/
class PoolsUpdater {
lastRun: number = 0;
@ -17,6 +17,11 @@ class PoolsUpdater {
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
public async updatePoolsJson(): Promise<void> {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
logger.info(`Not updating mining pools to avoid inconsistency because AUTOMATIC_BLOCK_REINDEXING is set to false`)
return;
}
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
@ -38,7 +43,7 @@ class PoolsUpdater {
}
try {
const githubSha = await this.fetchPoolsSha(); // Fetch pools.json sha from github
const githubSha = await this.fetchPoolsSha(); // Fetch pools-v2.json sha from github
if (githubSha === undefined) {
return;
}
@ -47,32 +52,46 @@ class PoolsUpdater {
this.currentSha = await this.getShaFromDb();
}
logger.debug(`Pools.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
logger.debug(`pools-v2.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
if (this.currentSha !== undefined && this.currentSha === githubSha) {
return;
}
if (this.currentSha === undefined) {
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
} else {
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
}
const poolsJson = await this.query(this.poolsUrl);
if (poolsJson === undefined) {
return;
}
await poolsParser.migratePoolsJson(poolsJson);
await this.updateDBSha(githubSha);
logger.notice(`PoolsUpdater completed`, logger.tags.mining);
poolsParser.setMiningPools(poolsJson);
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools-v2.json import completed (no database)');
return;
}
try {
await DB.query('START TRANSACTION;');
await poolsParser.migratePoolsJson();
await this.updateDBSha(githubSha);
await DB.query('COMMIT;');
} catch (e) {
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
await DB.query('ROLLBACK;');
}
logger.notice('PoolsUpdater completed');
} catch (e) {
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
logger.err(`PoolsUpdater failed. Will try again in 24h. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
logger.err(`PoolsUpdater failed. Will try again in 24h. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
}
}
/**
* Fetch our latest pools.json sha from the db
* Fetch our latest pools-v2.json sha from the db
*/
private async updateDBSha(githubSha: string): Promise<void> {
this.currentSha = githubSha;
@ -81,46 +100,46 @@ class PoolsUpdater {
await DB.query('DELETE FROM state where name="pools_json_sha"');
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
} catch (e) {
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
logger.err('Cannot save github pools-v2.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
}
}
}
/**
* Fetch our latest pools.json sha from the db
* Fetch our latest pools-v2.json sha from the db
*/
private async getShaFromDb(): Promise<string | undefined> {
try {
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
return (rows.length > 0 ? rows[0].string : undefined);
} catch (e) {
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
logger.err('Cannot fetch pools-v2.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
return undefined;
}
}
/**
* Fetch our latest pools.json sha from github
* Fetch our latest pools-v2.json sha from github
*/
private async fetchPoolsSha(): Promise<string | undefined> {
const response = await this.query(this.treeUrl);
if (response !== undefined) {
for (const file of response['tree']) {
if (file['path'] === 'pools.json') {
if (file['path'] === 'pools-v2.json') {
return file['sha'];
}
}
}
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`, logger.tags.mining);
logger.err(`Cannot find "pools-v2.json" in git tree (${this.treeUrl})`, logger.tags.mining);
return undefined;
}
/**
* Http request wrapper
*/
private async query(path): Promise<object | undefined> {
private async query(path): Promise<any[] | undefined> {
type axiosOptions = {
headers: {
'User-Agent': string

View File

@ -111,6 +111,7 @@ Below we list all settings from `mempool-config.json` and the corresponding over
"ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false,
"CPFP_INDEXING": false,
"MAX_BLOCKS_BULK_QUERY": 0,
},
```
@ -141,6 +142,7 @@ Corresponding `docker-compose.yml` overrides:
MEMPOOL_ADVANCED_GBT_AUDIT: ""
MEMPOOL_ADVANCED_GBT_MEMPOOL: ""
MEMPOOL_CPFP_INDEXING: ""
MAX_BLOCKS_BULK_QUERY: ""
...
```

View File

@ -25,7 +25,8 @@
"AUDIT": __MEMPOOL_AUDIT__,
"ADVANCED_GBT_AUDIT": __MEMPOOL_ADVANCED_GBT_AUDIT__,
"ADVANCED_GBT_MEMPOOL": __MEMPOOL_ADVANCED_GBT_MEMPOOL__,
"CPFP_INDEXING": __MEMPOOL_CPFP_INDEXING__
"CPFP_INDEXING": __MEMPOOL_CPFP_INDEXING__,
"MAX_BLOCKS_BULK_QUERY": __MEMPOOL__MAX_BLOCKS_BULK_QUERY__
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",

View File

@ -30,6 +30,7 @@ __MEMPOOL_AUDIT__=${MEMPOOL_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_AUDIT__=${MEMPOOL_ADVANCED_GBT_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_MEMPOOL__=${MEMPOOL_ADVANCED_GBT_MEMPOOL:=false}
__MEMPOOL_CPFP_INDEXING__=${MEMPOOL_CPFP_INDEXING:=false}
__MEMPOOL_MAX_BLOCKS_BULK_QUERY__=${MEMPOOL_MAX_BLOCKS_BULK_QUERY:=0}
# CORE_RPC
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
@ -142,6 +143,7 @@ sed -i "s!__MEMPOOL_AUDIT__!${__MEMPOOL_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_MEMPOOL__!${__MEMPOOL_ADVANCED_GBT_MEMPOOL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_AUDIT__!${__MEMPOOL_ADVANCED_GBT_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CPFP_INDEXING__!${__MEMPOOL_CPFP_INDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_BLOCKS_BULK_QUERY__!${__MEMPOOL_MAX_BLOCKS_BULK_QUERY__}!g" mempool-config.json
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json

View File

@ -352,7 +352,7 @@
<div class="copyright">
<div class="title">
Copyright &copy; 2019-2022<br>
Copyright &copy; 2019-2023<br>
The Mempool Open Source Project
</div>
<p>

View File

@ -8,7 +8,7 @@
<div class="block-titles">
<h1 class="title">
<ng-template [ngIf]="blockHeight === 0"><ng-container i18n="@@2303359202781425764">Genesis</ng-container></ng-template>
<ng-template [ngIf]="blockHeight" i18n="shared.block-title">{{ blockHeight }}</ng-template>
<ng-template [ngIf]="blockHeight">{{ blockHeight }}</ng-template>
</h1>
<div class="blockhash" *ngIf="blockHash">
<h2 class="truncate right">{{ blockHash.slice(0,32) }}</h2>

View File

@ -1,8 +1,8 @@
<div class="blocks-container blockchain-blocks-container" [class.time-ltr]="timeLtr"
[style.left]="static ? (offset || 0) + 'px' : null"
*ngIf="(loadingBlocks$ | async) === false; else loadingBlocksTemplate">
*ngIf="static || (loadingBlocks$ | async) === false; else loadingBlocksTemplate">
<div *ngFor="let block of blocks; let i = index; trackBy: trackByBlocksFn">
<ng-container *ngIf="block && !block.loading && !block.placeholder; else placeholderBlock">
<ng-container *ngIf="connected && block && !block.loading && !block.placeholder; else placeholderBlock">
<div [attr.data-cy]="'bitcoin-block-offset-' + offset + '-index-' + i"
class="text-center bitcoin-block mined-block blockchain-blocks-offset-{{ offset }}-index-{{ i }}"
id="bitcoin-block-{{ block.height }}" [ngStyle]="blockStyles[i]"
@ -43,10 +43,8 @@
<div [attr.data-cy]="'bitcoin-block-' + i + '-transactions'" class="transaction-count">
<ng-container
*ngTemplateOutlet="block.tx_count === 1 ? transactionsSingular : transactionsPlural; context: {$implicit: block.tx_count | number}"></ng-container>
<ng-template #transactionsSingular let-i i18n="shared.transaction-count.singular">{{ i }}
transaction</ng-template>
<ng-template #transactionsPlural let-i i18n="shared.transaction-count.plural">{{ i }}
transactions</ng-template>
<ng-template #transactionsSingular let-i i18n="shared.transaction-count.singular">{{ i }} transaction</ng-template>
<ng-template #transactionsPlural let-i i18n="shared.transaction-count.plural">{{ i }} transactions</ng-template>
</div>
<div [attr.data-cy]="'bitcoin-block-' + offset + '-index-' + i + '-time'" class="time-difference">
<app-time-since [time]="block.timestamp" [fastRender]="true"></app-time-since></div>
@ -59,19 +57,19 @@
</div>
</ng-container>
<ng-template #placeholderBlock>
<ng-container *ngIf="block && block.placeholder; else loadingBlock">
<ng-container *ngIf="block && block.placeholder && connected && !loadingTip; else loadingBlock">
<div [attr.data-cy]="'bitcoin-block-' + offset + '-index-' + i"
class="text-center bitcoin-block mined-block placeholder-block blockchain-blocks-{{ i }}"
id="bitcoin-block-{{ block.height }}" [ngStyle]="blockStyles[i]">
</div>
</ng-container>
</ng-template>
<ng-template #loadingBlock>
<ng-container *ngIf="block && block.loading">
<div class="flashing">
<ng-container *ngIf="!connected || loadingTip || (block && block.loading)">
<div class="flashing loading">
<div class="text-center bitcoin-block mined-block" id="bitcoin-block-{{ block.height }}"
[ngStyle]="blockStyles[i]"></div>
[ngStyle]="convertStyleForLoadingBlock(blockStyles[i])"></div>
</div>
</ng-container>
</ng-template>

View File

@ -137,6 +137,10 @@
opacity: 1;
}
.loading .bitcoin-block.mined-block {
background: #2d3348;
}
@keyframes opacityPulse {
0% {opacity: 0.7;}
50% {opacity: 1.0;}

View File

@ -22,6 +22,8 @@ export class BlockchainBlocksComponent implements OnInit, OnChanges, OnDestroy {
@Input() offset: number = 0;
@Input() height: number = 0;
@Input() count: number = 8;
@Input() loadingTip: boolean = false;
@Input() connected: boolean = true;
specialBlocks = specialBlocks;
network = '';
@ -288,6 +290,13 @@ export class BlockchainBlocksComponent implements OnInit, OnChanges, OnDestroy {
};
}
convertStyleForLoadingBlock(style) {
return {
...style,
background: "#2d3348",
};
}
getStyleForLoadingBlock(index: number, animateEnterFrom: number = 0) {
const addLeft = animateEnterFrom || 0;

View File

@ -6,7 +6,7 @@
<app-mempool-blocks [hidden]="pageIndex > 0"></app-mempool-blocks>
<app-blockchain-blocks [hidden]="pageIndex > 0"></app-blockchain-blocks>
<ng-container *ngFor="let page of pages; trackBy: trackByPageFn">
<app-blockchain-blocks [static]="true" [offset]="page.offset" [height]="page.height" [count]="blocksPerPage"></app-blockchain-blocks>
<app-blockchain-blocks [static]="true" [offset]="page.offset" [height]="page.height" [count]="blocksPerPage" [loadingTip]="loadingTip" [connected]="connected"></app-blockchain-blocks>
</ng-container>
</div>
<div id="divider" [hidden]="pageIndex > 0">

View File

@ -1,5 +1,5 @@
import { Component, OnInit, OnDestroy, ChangeDetectionStrategy, Input, OnChanges, SimpleChanges } from '@angular/core';
import { Subscription } from 'rxjs';
import { firstValueFrom, Subscription } from 'rxjs';
import { StateService } from '../../services/state.service';
@Component({
@ -18,6 +18,9 @@ export class BlockchainComponent implements OnInit, OnDestroy {
timeLtrSubscription: Subscription;
timeLtr: boolean = this.stateService.timeLtr.value;
ltrTransitionEnabled = false;
connectionStateSubscription: Subscription;
loadingTip: boolean = true;
connected: boolean = true;
constructor(
public stateService: StateService,
@ -28,10 +31,17 @@ export class BlockchainComponent implements OnInit, OnDestroy {
this.timeLtrSubscription = this.stateService.timeLtr.subscribe((ltr) => {
this.timeLtr = !!ltr;
});
this.connectionStateSubscription = this.stateService.connectionState$.subscribe(state => {
this.connected = (state === 2);
})
firstValueFrom(this.stateService.chainTip$).then(tip => {
this.loadingTip = false;
});
}
ngOnDestroy() {
this.timeLtrSubscription.unsubscribe();
this.connectionStateSubscription.unsubscribe();
}
trackByPageFn(index: number, item: { index: number }) {

View File

@ -1,30 +1,30 @@
<div class="dropdown-menu show" *ngIf="results" [hidden]="!results.hashQuickMatch && !results.addresses.length && !results.nodes.length && !results.channels.length">
<ng-template [ngIf]="results.blockHeight">
<div class="card-title">Bitcoin Block Height</div>
<div class="card-title" i18n="search.bitcoin-block-height">Bitcoin Block Height</div>
<button (click)="clickItem(0)" [class.active]="0 === activeIdx" type="button" role="option" class="dropdown-item">
Go to "{{ results.searchText }}"
<ng-container *ngTemplateOutlet="goTo; context: { $implicit: results.searchText }"></ng-container>
</button>
</ng-template>
<ng-template [ngIf]="results.txId">
<div class="card-title">Bitcoin Transaction</div>
<div class="card-title" i18n="search.bitcoin-transaction">Bitcoin Transaction</div>
<button (click)="clickItem(0)" [class.active]="0 === activeIdx" type="button" role="option" class="dropdown-item">
Go to "{{ results.searchText | shortenString : 13 }}"
<ng-container *ngTemplateOutlet="goTo; context: { $implicit: results.searchText | shortenString : 13 }"></ng-container>
</button>
</ng-template>
<ng-template [ngIf]="results.address">
<div class="card-title">Bitcoin Address</div>
<div class="card-title" i18n="search.bitcoin-address">Bitcoin Address</div>
<button (click)="clickItem(0)" [class.active]="0 === activeIdx" type="button" role="option" class="dropdown-item">
Go to "{{ results.searchText | shortenString : isMobile ? 20 : 30 }}"
<ng-container *ngTemplateOutlet="goTo; context: { $implicit: results.searchText | shortenString : isMobile ? 20 : 30 }"></ng-container>
</button>
</ng-template>
<ng-template [ngIf]="results.blockHash">
<div class="card-title">Bitcoin Block</div>
<div class="card-title" i18n="search.bitcoin-block">Bitcoin Block</div>
<button (click)="clickItem(0)" [class.active]="0 === activeIdx" type="button" role="option" class="dropdown-item">
Go to "{{ results.searchText | shortenString : 13 }}"
<ng-container *ngTemplateOutlet="goTo; context: { $implicit: results.searchText | shortenString : 13 }"></ng-container>
</button>
</ng-template>
<ng-template [ngIf]="results.addresses.length">
<div class="card-title">Bitcoin Addresses</div>
<div class="card-title" i18n="search.bitcoin-addresses">Bitcoin Addresses</div>
<ng-template ngFor [ngForOf]="results.addresses" let-address let-i="index">
<button (click)="clickItem(results.hashQuickMatch + i)" [class.active]="(results.hashQuickMatch + i) === activeIdx" type="button" role="option" class="dropdown-item">
<ngb-highlight [result]="address | shortenString : isMobile ? 25 : 36" [term]="results.searchText"></ngb-highlight>
@ -32,7 +32,7 @@
</ng-template>
</ng-template>
<ng-template [ngIf]="results.nodes.length">
<div class="card-title">Lightning Nodes</div>
<div class="card-title" i18n="search.lightning-nodes">Lightning Nodes</div>
<ng-template ngFor [ngForOf]="results.nodes" let-node let-i="index">
<button (click)="clickItem(results.hashQuickMatch + results.addresses.length + i)" [class.inactive]="node.status === 0" [class.active]="results.hashQuickMatch + results.addresses.length + i === activeIdx" [routerLink]="['/lightning/node' | relativeUrl, node.public_key]" type="button" role="option" class="dropdown-item">
<ngb-highlight [result]="node.alias" [term]="results.searchText"></ngb-highlight> &nbsp;<span class="symbol">{{ node.public_key | shortenString : 10 }}</span>
@ -40,7 +40,7 @@
</ng-template>
</ng-template>
<ng-template [ngIf]="results.channels.length">
<div class="card-title">Lightning Channels</div>
<div class="card-title" i18n="search.lightning-channels">Lightning Channels</div>
<ng-template ngFor [ngForOf]="results.channels" let-channel let-i="index">
<button (click)="clickItem(results.hashQuickMatch + results.addresses.length + results.nodes.length + i)" [class.inactive]="channel.status === 2" [class.active]="results.hashQuickMatch + results.addresses.length + results.nodes.length + i === activeIdx" type="button" role="option" class="dropdown-item">
<ngb-highlight [result]="channel.short_id" [term]="results.searchText"></ngb-highlight> &nbsp;<span class="symbol">{{ channel.id }}</span>
@ -48,3 +48,5 @@
</ng-template>
</ng-template>
</div>
<ng-template #goTo let-x i18n="search.go-to">Go to "{{ x }}"</ng-template>

View File

@ -21,6 +21,7 @@ export class StartComponent implements OnInit, OnDestroy {
timeLtr: boolean = this.stateService.timeLtr.value;
chainTipSubscription: Subscription;
chainTip: number = -1;
tipIsSet: boolean = false;
markBlockSubscription: Subscription;
blockCounterSubscription: Subscription;
@ViewChild('blockchainContainer') blockchainContainer: ElementRef;
@ -58,6 +59,7 @@ export class StartComponent implements OnInit, OnDestroy {
});
this.chainTipSubscription = this.stateService.chainTip$.subscribe((height) => {
this.chainTip = height;
this.tipIsSet = true;
this.updatePages();
if (this.pendingMark != null) {
this.scrollToBlock(this.pendingMark);
@ -66,7 +68,7 @@ export class StartComponent implements OnInit, OnDestroy {
});
this.markBlockSubscription = this.stateService.markBlock$.subscribe((mark) => {
if (mark?.blockHeight != null) {
if (this.chainTip >=0) {
if (this.tipIsSet) {
if (!this.blockInViewport(mark.blockHeight)) {
this.scrollToBlock(mark.blockHeight);
}

View File

@ -10,7 +10,7 @@
<div class="doc-content">
<div id="disclaimer">
<table><tr><td><svg viewBox="0 0 304 304" xmlns="http://www.w3.org/2000/svg"><g fill-rule="evenodd" style="fill:#ffc107;fill-opacity:1"><path d="M135.3 34.474c-15.62 27.306-54.206 95.63-85.21 150.534L9.075 257.583C5.382 264.08 6.76 269.217 7.908 271.7c2.326 5.028 7.29 7.537 11.155 8.215l.78.133 264.698.006-.554-.02c4.152.255 9.664-1.24 12.677-6.194 1.926-3.18 3.31-8.589-1.073-16.278L213.637 114.37l-45.351-79.205c-5.681-9.932-12.272-12.022-16.8-12.022-4.42 0-10.818 1.964-16.181 11.331h-.006zm-69.072 159.94c30.997-54.885 69.563-123.184 85.16-150.446l.186-.297c.2.303.393.582.618.981l45.363 79.22s72.377 126.47 78.569 137.283l-247.618-.007 37.719-66.734" style="fill:#ffc107;fill-opacity:1"/><path d="M152.597 247.445c8.02 0 14.518-6.728 14.518-15.025 0-8.29-6.499-15.018-14.518-15.018-8.031 0-14.529 6.728-14.529 15.018 0 8.297 6.498 15.025 14.53 15.025m-.001-147.18c11.586 0 22.23 10.958 20.977 21.7l-9.922 75.564c-.966 6.601-4.95 11.433-11.055 11.433s-10.102-4.832-11.056-11.433l-9.927-75.564c-1.26-10.742 9.39-21.7 20.983-21.7" style="fill:#ffc107;fill-opacity:1"/></g></svg></td><td><p><b>mempool.space merely provides data about the Bitcoin network.</b> It cannot help you with retrieving funds, confirming your transaction quicker, etc.</p><p>For any such requests, you need to get in touch with the entity that helped make the transaction (wallet software, exchange company, etc).</p></td></tr></table>
<table><tr><td><svg viewBox="0 0 304 304" xmlns="http://www.w3.org/2000/svg"><g fill-rule="evenodd" style="fill:#ffc107;fill-opacity:1"><path d="M135.3 34.474c-15.62 27.306-54.206 95.63-85.21 150.534L9.075 257.583C5.382 264.08 6.76 269.217 7.908 271.7c2.326 5.028 7.29 7.537 11.155 8.215l.78.133 264.698.006-.554-.02c4.152.255 9.664-1.24 12.677-6.194 1.926-3.18 3.31-8.589-1.073-16.278L213.637 114.37l-45.351-79.205c-5.681-9.932-12.272-12.022-16.8-12.022-4.42 0-10.818 1.964-16.181 11.331h-.006zm-69.072 159.94c30.997-54.885 69.563-123.184 85.16-150.446l.186-.297c.2.303.393.582.618.981l45.363 79.22s72.377 126.47 78.569 137.283l-247.618-.007 37.719-66.734" style="fill:#ffc107;fill-opacity:1"/><path d="M152.597 247.445c8.02 0 14.518-6.728 14.518-15.025 0-8.29-6.499-15.018-14.518-15.018-8.031 0-14.529 6.728-14.529 15.018 0 8.297 6.498 15.025 14.53 15.025m-.001-147.18c11.586 0 22.23 10.958 20.977 21.7l-9.922 75.564c-.966 6.601-4.95 11.433-11.055 11.433s-10.102-4.832-11.056-11.433l-9.927-75.564c-1.26-10.742 9.39-21.7 20.983-21.7" style="fill:#ffc107;fill-opacity:1"/></g></svg></td><td><p i18n="faq.big-disclaimer"><b>mempool.space merely provides data about the Bitcoin network.</b> It cannot help you with retrieving funds, confirming your transaction quicker, etc.</p><p>For any such requests, you need to get in touch with the entity that helped make the transaction (wallet software, exchange company, etc).</p></td></tr></table>
</div>

View File

@ -17,19 +17,19 @@ export class ClosingTypeComponent implements OnChanges {
getLabelFromType(type: number): { label: string; class: string } {
switch (type) {
case 1: return {
label: 'Mutually closed',
label: $localize`Mutually closed`,
class: 'success',
};
case 2: return {
label: 'Force closed',
label: $localize`Force closed`,
class: 'warning',
};
case 3: return {
label: 'Force closed with penalty',
label: $localize`Force closed with penalty`,
class: 'danger',
};
default: return {
label: 'Unknown',
label: $localize`:@@e5d8bb389c702588877f039d72178f219453a72d:Unknown`,
class: 'secondary',
};
}

View File

@ -1,9 +1,9 @@
<div class="widget-toggler">
<a href="" (click)="switchMode('avg')" class="toggler-option"
[ngClass]="{'inactive': mode === 'avg'}"><small>avg</small></a>
[ngClass]="{'inactive': mode === 'avg'}"><small i18n="statistics.average-small">avg</small></a>
<span style="color: #ffffff66; font-size: 8px"> | </span>
<a href="" (click)="switchMode('med')" class="toggler-option"
[ngClass]="{'inactive': mode === 'med'}"><small>med</small></a>
[ngClass]="{'inactive': mode === 'med'}"><small i18n="statistics.median-small">med</small></a>
</div>
<div class="fee-estimation-wrapper" *ngIf="statistics$ | async as statistics; else loadingReward">

View File

@ -167,7 +167,7 @@ export class NodeFeeChartComponent implements OnInit {
padding: 10,
data: [
{
name: 'Outgoing Fees',
name: $localize`Outgoing Fees`,
inactiveColor: 'rgb(110, 112, 121)',
textStyle: {
color: 'white',
@ -175,7 +175,7 @@ export class NodeFeeChartComponent implements OnInit {
icon: 'roundRect',
},
{
name: 'Incoming Fees',
name: $localize`Incoming Fees`,
inactiveColor: 'rgb(110, 112, 121)',
textStyle: {
color: 'white',
@ -205,7 +205,7 @@ export class NodeFeeChartComponent implements OnInit {
series: outgoingData.length === 0 ? undefined : [
{
zlevel: 0,
name: 'Outgoing Fees',
name: $localize`Outgoing Fees`,
data: outgoingData.map(bucket => ({
value: bucket.capacity,
label: bucket.label,
@ -219,7 +219,7 @@ export class NodeFeeChartComponent implements OnInit {
},
{
zlevel: 0,
name: 'Incoming Fees',
name: $localize`Incoming Fees`,
data: incomingData.map(bucket => ({
value: -bucket.capacity,
label: bucket.label,

View File

@ -62,7 +62,12 @@ export class CacheService {
for (let i = 0; i < chunkSize; i++) {
this.blockLoading[maxHeight - i] = true;
}
const result = await firstValueFrom(this.apiService.getBlocks$(maxHeight));
let result;
try {
result = await firstValueFrom(this.apiService.getBlocks$(maxHeight));
} catch (e) {
console.log("failed to load blocks: ", e.message);
}
for (let i = 0; i < chunkSize; i++) {
delete this.blockLoading[maxHeight - i];
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
datadir=/bitcoin
server=1
txindex=1
coinstatsindex=1
listen=1
discover=1
par=16

View File

@ -11,6 +11,7 @@
"INDEXING_BLOCKS_AMOUNT": -1,
"BLOCKS_SUMMARIES_INDEXING": true,
"AUDIT": true,
"CPFP_INDEXING": true,
"ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": false,
"USE_SECOND_NODE_FOR_MINFEE": true