Merge branch 'master' into ops/esplora-unix-sockets

This commit is contained in:
wiz
2023-04-03 15:34:47 +09:00
committed by GitHub
88 changed files with 5614 additions and 2576 deletions

View File

@@ -1,4 +1,5 @@
import config from '../config';
import logger from '../logger';
import { TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
@@ -39,17 +40,19 @@ class Audit {
} else {
isCensored[txid] = true;
}
displacedWeight += mempool[txid].weight;
displacedWeight += mempool[txid]?.weight || 0;
} else {
matchedWeight += mempool[txid].weight;
matchedWeight += mempool[txid]?.weight || 0;
}
projectedWeight += mempool[txid].weight;
projectedWeight += mempool[txid]?.weight || 0;
inTemplate[txid] = true;
}
displacedWeight += (4000 - transactions[0].weight);
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
if (transactions[0]) {
displacedWeight += (4000 - transactions[0].weight);
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
}
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
// these displaced transactions should occupy the first N weight units of the next projected block
@@ -59,19 +62,24 @@ class Audit {
let failures = 0;
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
const txid = projectedBlocks[1].transactionIds[index];
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
const tx = mempool[txid];
if (tx) {
const fits = (tx.weight - displacedWeightRemaining) < 4000;
const feeMatches = tx.effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, tx.effectiveFeePerVsize);
}
if (tx.firstSeen == null || (now - (tx?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= tx.weight;
}
failures = 0;
} else {
failures++;
}
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= mempool[txid].weight;
}
failures = 0;
} else {
failures++;
logger.warn('projected transaction missing from mempool cache');
}
index++;
}
@@ -108,20 +116,25 @@ class Audit {
index = projectedBlocks[0].transactionIds.length - 1;
while (index >= 0) {
const txid = projectedBlocks[0].transactionIds[index];
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = mempool[txid].effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
const tx = mempool[txid];
if (tx) {
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (tx.effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = tx.effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (tx.effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
}
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
} else {
logger.warn('projected transaction missing from mempool cache');
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
index--;
}

View File

@@ -16,7 +16,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
super(bitcoinClient);
const electrumConfig = { client: 'mempool-v2', version: '1.4' };
const electrumPersistencePolicy = { retryPeriod: 10000, maxRetry: 1000, callback: null };
const electrumPersistencePolicy = { retryPeriod: 1000, maxRetry: Number.MAX_SAFE_INTEGER, callback: null };
const electrumCallbacks = {
onConnect: (client, versionInfo) => { logger.info(`Connected to Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT} (${JSON.stringify(versionInfo)})`); },

View File

@@ -2,7 +2,7 @@ import config from '../config';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
import logger from '../logger';
import memPool from './mempool';
import { BlockExtended, BlockExtension, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
import { BlockExtended, BlockExtension, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo, CpfpSummary } from '../mempool.interfaces';
import { Common } from './common';
import diskCache from './disk-cache';
import transactionUtils from './transaction-utils';
@@ -200,8 +200,15 @@ class Blocks {
extras.segwitTotalWeight = 0;
} else {
const stats: IBitcoinApi.BlockStats = await bitcoinClient.getBlockStats(block.id);
extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
let feeStats = {
medianFee: stats.feerate_percentiles[2], // 50th percentiles
feeRange: [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat(),
};
if (transactions?.length > 1) {
feeStats = Common.calcEffectiveFeeStatistics(transactions);
}
extras.medianFee = feeStats.medianFee;
extras.feeRange = feeStats.feeRange;
extras.totalFees = stats.totalfee;
extras.avgFee = stats.avgfee;
extras.avgFeeRate = stats.avgfeerate;
@@ -403,12 +410,13 @@ class Blocks {
try {
// Get all indexed block hash
const unindexedBlockHeights = await blocksRepository.$getCPFPUnindexedBlocks();
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
if (!unindexedBlockHeights?.length) {
return;
}
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
// Logging
let count = 0;
let countThisRun = 0;
@@ -558,7 +566,7 @@ class Blocks {
}
while (this.currentBlockHeight < blockHeightTip) {
if (this.currentBlockHeight < blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT) {
if (this.currentBlockHeight === 0) {
this.currentBlockHeight = blockHeightTip;
} else {
this.currentBlockHeight++;
@@ -571,7 +579,8 @@ class Blocks {
const block = BitcoinApi.convertBlock(verboseBlock);
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false);
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
const cpfpSummary: CpfpSummary = Common.calculateCpfp(block.height, transactions);
const blockExtended: BlockExtended = await this.$getBlockExtended(block, cpfpSummary.transactions);
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
// start async callbacks
@@ -581,11 +590,10 @@ class Blocks {
if (!fastForwarded) {
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock.id) {
logger.warn(`Chain divergence detected at block ${lastBlock.height}, re-indexing most recent data`);
logger.warn(`Chain divergence detected at block ${lastBlock.height}, re-indexing most recent data`, logger.tags.mining);
// We assume there won't be a reorg with more than 10 block depth
await BlocksRepository.$deleteBlocksFrom(lastBlock.height - 10);
await HashratesRepository.$deleteLastEntries();
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock.height - 10);
await cpfpRepository.$deleteClustersFrom(lastBlock.height - 10);
for (let i = 10; i >= 0; --i) {
const newBlock = await this.$indexBlock(lastBlock.height - i);
@@ -596,7 +604,7 @@ class Blocks {
}
await mining.$indexDifficultyAdjustments();
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`);
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`, logger.tags.mining);
indexer.reindex();
}
await blocksRepository.$saveBlockInDatabase(blockExtended);
@@ -608,7 +616,7 @@ class Blocks {
priceId: lastestPriceId,
}]);
} else {
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
logger.debug(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
setTimeout(() => {
indexer.runSingleTask('blocksPrices');
}, 10000);
@@ -619,7 +627,7 @@ class Blocks {
await this.$getStrippedBlockTransactions(blockExtended.id, true);
}
if (config.MEMPOOL.CPFP_INDEXING) {
this.$indexCPFP(blockExtended.id, this.currentBlockHeight);
this.$saveCpfp(blockExtended.id, this.currentBlockHeight, cpfpSummary);
}
}
}
@@ -728,7 +736,7 @@ class Blocks {
// Index the response if needed
if (Common.blocksSummariesIndexingEnabled() === true) {
await BlocksSummariesRepository.$saveSummary({height: block.height, mined: summary});
await BlocksSummariesRepository.$saveTransactions(block.height, block.hash, summary.transactions);
}
return summary.transactions;
@@ -844,7 +852,7 @@ class Blocks {
if (cleanBlock.fee_amt_percentiles === null) {
const block = await bitcoinClient.getBlock(cleanBlock.hash, 2);
const summary = this.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
await BlocksSummariesRepository.$saveTransactions(cleanBlock.height, cleanBlock.hash, summary.transactions);
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
}
if (cleanBlock.fee_amt_percentiles !== null) {
@@ -913,42 +921,20 @@ class Blocks {
public async $indexCPFP(hash: string, height: number): Promise<void> {
const block = await bitcoinClient.getBlock(hash, 2);
const transactions = block.tx.map(tx => {
tx.vsize = tx.weight / 4;
tx.fee *= 100_000_000;
return tx;
});
const clusters: any[] = [];
const summary = Common.calculateCpfp(height, transactions);
let cluster: TransactionStripped[] = [];
let ancestors: { [txid: string]: boolean } = {};
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
cluster.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += tx.vsize;
});
const effectiveFeePerVsize = totalFee / totalVSize;
if (cluster.length > 1) {
clusters.push({
root: cluster[0].txid,
height,
txs: cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: tx.fee || 0 }; }),
effectiveFeePerVsize,
});
}
cluster = [];
ancestors = {};
}
cluster.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
const result = await cpfpRepository.$batchSaveClusters(clusters);
await this.$saveCpfp(hash, height, summary);
const effectiveFeeStats = Common.calcEffectiveFeeStatistics(summary.transactions);
await blocksRepository.$saveEffectiveFeeStats(hash, effectiveFeeStats);
}
public async $saveCpfp(hash: string, height: number, cpfpSummary: CpfpSummary): Promise<void> {
const result = await cpfpRepository.$batchSaveClusters(cpfpSummary.clusters);
if (!result) {
await cpfpRepository.$insertProgressMarker(height);
}

View File

@@ -1,4 +1,4 @@
import { CpfpInfo, MempoolBlockWithTransactions, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import { Ancestor, CpfpInfo, CpfpSummary, EffectiveFeeStats, MempoolBlockWithTransactions, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import config from '../config';
import { NodeSocket } from '../repositories/NodesSocketsRepository';
import { isIP } from 'net';
@@ -345,4 +345,99 @@ export class Common {
};
}
}
static calculateCpfp(height: number, transactions: TransactionExtended[]): CpfpSummary {
const clusters: { root: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number }[] = [];
let cluster: TransactionExtended[] = [];
let ancestors: { [txid: string]: boolean } = {};
const txMap = {};
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
txMap[tx.txid] = tx;
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
cluster.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += (tx.weight / 4);
});
const effectiveFeePerVsize = totalFee / totalVSize;
if (cluster.length > 1) {
clusters.push({
root: cluster[0].txid,
height,
txs: cluster.map(tx => { return { txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }; }),
effectiveFeePerVsize,
});
}
cluster.forEach(tx => {
txMap[tx.txid].effectiveFeePerVsize = effectiveFeePerVsize;
});
cluster = [];
ancestors = {};
}
cluster.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
return {
transactions,
clusters,
};
}
static calcEffectiveFeeStatistics(transactions: { weight: number, fee: number, effectiveFeePerVsize?: number, txid: string }[]): EffectiveFeeStats {
const sortedTxs = transactions.map(tx => { return { txid: tx.txid, weight: tx.weight, rate: tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4)) }; }).sort((a, b) => a.rate - b.rate);
let weightCount = 0;
let medianFee = 0;
let medianWeight = 0;
// calculate the "medianFee" as the average fee rate of the middle 10000 weight units of transactions
const leftBound = 1995000;
const rightBound = 2005000;
for (let i = 0; i < sortedTxs.length && weightCount < rightBound; i++) {
const left = weightCount;
const right = weightCount + sortedTxs[i].weight;
if (right > leftBound) {
const weight = Math.min(right, rightBound) - Math.max(left, leftBound);
medianFee += (sortedTxs[i].rate * (weight / 4) );
medianWeight += weight;
}
weightCount += sortedTxs[i].weight;
}
const medianFeeRate = medianWeight ? (medianFee / (medianWeight / 4)) : 0;
// minimum effective fee heuristic:
// lowest of
// a) the 1st percentile of effective fee rates
// b) the minimum effective fee rate in the last 2% of transactions (in block order)
const minFee = Math.min(
Common.getNthPercentile(1, sortedTxs).rate,
transactions.slice(-transactions.length / 50).reduce((min, tx) => { return Math.min(min, tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4))); }, Infinity)
);
// maximum effective fee heuristic:
// highest of
// a) the 99th percentile of effective fee rates
// b) the maximum effective fee rate in the first 2% of transactions (in block order)
const maxFee = Math.max(
Common.getNthPercentile(99, sortedTxs).rate,
transactions.slice(0, transactions.length / 50).reduce((max, tx) => { return Math.max(max, tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4))); }, 0)
);
return {
medianFee: medianFeeRate,
feeRange: [
minFee,
[10,25,50,75,90].map(n => Common.getNthPercentile(n, sortedTxs).rate),
maxFee,
].flat(),
};
}
static getNthPercentile(n: number, sortedDistribution: any[]): any {
return sortedDistribution[Math.floor((sortedDistribution.length - 1) * (n / 100))];
}
}

View File

@@ -497,6 +497,7 @@ class DatabaseMigration {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('DELETE FROM `pools`');
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`);
this.uniqueLog(logger.notice, '`pools` table has been truncated`');
await this.updateToSchemaVersion(56);
}

View File

@@ -24,12 +24,11 @@ export function calcDifficultyAdjustment(
network: string,
latestBlockTimestamp: number,
): DifficultyAdjustment {
const ESTIMATE_LAG_BLOCKS = 146; // For first 7.2% of epoch, don't estimate.
const EPOCH_BLOCK_LENGTH = 2016; // Bitcoin mainnet
const BLOCK_SECONDS_TARGET = 600; // Bitcoin mainnet
const TESTNET_MAX_BLOCK_SECONDS = 1200; // Bitcoin testnet
const diffSeconds = nowSeconds - DATime;
const diffSeconds = Math.max(0, nowSeconds - DATime);
const blocksInEpoch = (blockHeight >= 0) ? blockHeight % EPOCH_BLOCK_LENGTH : 0;
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
@@ -37,18 +36,16 @@ export function calcDifficultyAdjustment(
const expectedBlocks = diffSeconds / BLOCK_SECONDS_TARGET;
let difficultyChange = 0;
let timeAvgSecs = diffSeconds / blocksInEpoch;
// Only calculate the estimate once we have 7.2% of blocks in current epoch
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
let timeAvgSecs = blocksInEpoch ? diffSeconds / blocksInEpoch : BLOCK_SECONDS_TARGET;
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
// Testnet difficulty is set to 1 after 20 minutes of no blocks,

View File

@@ -43,7 +43,9 @@ class DiskCache {
const mempool = memPool.getMempool();
const mempoolArray: TransactionExtended[] = [];
for (const tx in mempool) {
mempoolArray.push(mempool[tx]);
if (mempool[tx] && !mempool[tx].deleteAfter) {
mempoolArray.push(mempool[tx]);
}
}
Common.shuffleArray(mempoolArray);
@@ -162,7 +164,7 @@ class DiskCache {
}
}
} catch (e) {
logger.info('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
}

View File

@@ -4,21 +4,29 @@ import * as fs from 'fs';
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
import { ILightningApi } from '../lightning-api.interface';
import config from '../../../config';
import logger from '../../../logger';
class LndApi implements AbstractLightningApi {
axiosConfig: AxiosRequestConfig = {};
constructor() {
if (config.LIGHTNING.ENABLED) {
if (!config.LIGHTNING.ENABLED) {
return;
}
try {
this.axiosConfig = {
headers: {
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex')
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex'),
},
httpsAgent: new Agent({
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
}),
timeout: config.LND.TIMEOUT
};
} catch (e) {
config.LIGHTNING.ENABLED = false;
logger.updateNetwork();
logger.err(`Could not initialize LND Macaroon/TLS Cert. Disabling LIGHTNING. ` + (e instanceof Error ? e.message : e));
}
}

View File

@@ -59,7 +59,7 @@ class MempoolBlocks {
// Loop through and traverse all ancestors and sum up all the sizes + fees
// Pass down size + fee to all unconfirmed children
let sizes = 0;
memPoolArray.forEach((tx, i) => {
memPoolArray.forEach((tx) => {
sizes += tx.weight;
if (sizes > 4000000 * 8) {
return;
@@ -74,7 +74,7 @@ class MempoolBlocks {
const time = end - start;
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
const blocks = this.calculateMempoolBlocks(memPoolArray);
if (saveResults) {
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
@@ -85,26 +85,23 @@ class MempoolBlocks {
return blocks;
}
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] {
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[]): MempoolBlockWithTransactions[] {
const mempoolBlocks: MempoolBlockWithTransactions[] = [];
let blockWeight = 0;
let blockSize = 0;
let transactions: TransactionExtended[] = [];
transactionsSorted.forEach((tx) => {
if (blockWeight + tx.weight <= config.MEMPOOL.BLOCK_WEIGHT_UNITS
|| mempoolBlocks.length === config.MEMPOOL.MEMPOOL_BLOCKS_AMOUNT - 1) {
blockWeight += tx.weight;
blockSize += tx.size;
transactions.push(tx);
} else {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, mempoolBlocks.length));
mempoolBlocks.push(this.dataToMempoolBlocks(transactions));
blockWeight = tx.weight;
blockSize = tx.size;
transactions = [tx];
}
});
if (transactions.length) {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, mempoolBlocks.length));
mempoolBlocks.push(this.dataToMempoolBlocks(transactions));
}
return mempoolBlocks;
@@ -151,7 +148,7 @@ class MempoolBlocks {
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const strippedMempool: { [txid: string]: ThreadTransaction } = {};
Object.values(newMempool).forEach(entry => {
Object.values(newMempool).filter(tx => !tx.deleteAfter).forEach(entry => {
strippedMempool[entry.txid] = {
txid: entry.txid,
fee: entry.fee,
@@ -186,7 +183,14 @@ class MempoolBlocks {
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'set', mempool: strippedMempool });
const { blocks, clusters } = await workerResultPromise;
let { blocks, clusters } = await workerResultPromise;
// filter out stale transactions
const unfilteredCount = blocks.reduce((total, block) => { return total + block.length; }, 0);
blocks = blocks.map(block => block.filter(tx => (tx.txid && tx.txid in newMempool)));
const filteredCount = blocks.reduce((total, block) => { return total + block.length; }, 0);
if (filteredCount < unfilteredCount) {
logger.warn(`tx selection worker thread returned ${unfilteredCount - filteredCount} stale transactions from makeBlockTemplates`);
}
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
@@ -228,7 +232,14 @@ class MempoolBlocks {
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'update', added: addedStripped, removed });
const { blocks, clusters } = await workerResultPromise;
let { blocks, clusters } = await workerResultPromise;
// filter out stale transactions
const unfilteredCount = blocks.reduce((total, block) => { return total + block.length; }, 0);
blocks = blocks.map(block => block.filter(tx => (tx.txid && tx.txid in newMempool)));
const filteredCount = blocks.reduce((total, block) => { return total + block.length; }, 0);
if (filteredCount < unfilteredCount) {
logger.warn(`tx selection worker thread returned ${unfilteredCount - filteredCount} stale transactions from updateBlockTemplates`);
}
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
@@ -243,7 +254,7 @@ class MempoolBlocks {
// update this thread's mempool with the results
blocks.forEach(block => {
block.forEach(tx => {
if (tx.txid in mempool) {
if (tx.txid && tx.txid in mempool) {
if (tx.effectiveFeePerVsize != null) {
mempool[tx.txid].effectiveFeePerVsize = tx.effectiveFeePerVsize;
}
@@ -253,6 +264,10 @@ class MempoolBlocks {
const cluster = clusters[tx.cpfpRoot];
let matched = false;
cluster.forEach(txid => {
if (!txid || !mempool[txid]) {
logger.warn('projected transaction ancestor missing from mempool cache');
return;
}
if (txid === tx.txid) {
matched = true;
} else {
@@ -273,15 +288,17 @@ class MempoolBlocks {
mempool[tx.txid].bestDescendant = null;
}
mempool[tx.txid].cpfpChecked = tx.cpfpChecked;
} else {
logger.warn('projected transaction missing from mempool cache');
}
});
});
// unpack the condensed blocks into proper mempool blocks
const mempoolBlocks = blocks.map((transactions, blockIndex) => {
const mempoolBlocks = blocks.map((transactions) => {
return this.dataToMempoolBlocks(transactions.map(tx => {
return mempool[tx.txid] || null;
}).filter(tx => !!tx), blockIndex);
}).filter(tx => !!tx));
});
if (saveResults) {
@@ -293,7 +310,7 @@ class MempoolBlocks {
return mempoolBlocks;
}
private dataToMempoolBlocks(transactions: TransactionExtended[], blocksIndex: number): MempoolBlockWithTransactions {
private dataToMempoolBlocks(transactions: TransactionExtended[]): MempoolBlockWithTransactions {
let totalSize = 0;
let totalWeight = 0;
const fitTransactions: TransactionExtended[] = [];
@@ -304,22 +321,14 @@ class MempoolBlocks {
fitTransactions.push(tx);
}
});
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
}
if (transactions.length > 4000) {
rangeLength = 6;
} else if (transactions.length > 10000) {
rangeLength = 8;
}
const feeStats = Common.calcEffectiveFeeStatistics(transactions);
return {
blockSize: totalSize,
blockVSize: totalWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
medianFee: feeStats.medianFee, // Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: feeStats.feeRange, //Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: fitTransactions.map((tx) => Common.stripTransaction(tx)),
};

View File

@@ -38,7 +38,6 @@ class Mempool {
constructor() {
setInterval(this.updateTxPerSecond.bind(this), 1000);
setInterval(this.deleteExpiredTransactions.bind(this), 20000);
}
/**
@@ -256,7 +255,7 @@ class Mempool {
}
}
private deleteExpiredTransactions() {
public deleteExpiredTransactions() {
const now = new Date().getTime();
for (const tx in this.mempoolCache) {
const lazyDeleteAt = this.mempoolCache[tx].deleteAfter;

View File

@@ -452,7 +452,7 @@ class Mining {
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5) {
const progress = Math.round(totalBlockChecked / blocks.length * 100);
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
logger.debug(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
timer = new Date().getTime() / 1000;
}
}
@@ -558,8 +558,10 @@ class Mining {
currentBlockHeight -= 10000;
}
if (totalIndexed) {
logger.info(`Indexing missing coinstatsindex data completed`, logger.tags.mining);
if (totalIndexed > 0) {
logger.info(`Indexing missing coinstatsindex data completed. Indexed ${totalIndexed}`, logger.tags.mining);
} else {
logger.debug(`Indexing missing coinstatsindex data completed. Indexed 0.`, logger.tags.mining);
}
}

View File

@@ -211,6 +211,7 @@ class WebsocketHandler {
if (!_blocks) {
_blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
}
const da = difficultyAdjustment.getDifficultyAdjustment();
return {
'mempoolInfo': memPool.getMempoolInfo(),
'vBytesPerSecond': memPool.getVBytesPerSecond(),
@@ -220,7 +221,7 @@ class WebsocketHandler {
'transactions': memPool.getLatestTransactions(),
'backendInfo': backendInfo.getBackendInfo(),
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': difficultyAdjustment.getDifficultyAdjustment(),
'da': da?.previousTime ? da : undefined,
'fees': feeApi.getRecommendedFee(),
...this.extraInitProperties
};
@@ -278,7 +279,9 @@ class WebsocketHandler {
response['mempoolInfo'] = mempoolInfo;
response['vBytesPerSecond'] = vBytesPerSecond;
response['transactions'] = newTransactions.slice(0, 6).map((tx) => Common.stripTransaction(tx));
response['da'] = da;
if (da?.previousTime) {
response['da'] = da;
}
response['fees'] = recommendedFees;
}
@@ -505,7 +508,7 @@ class WebsocketHandler {
const response = {
'block': block,
'mempoolInfo': memPool.getMempoolInfo(),
'da': da,
'da': da?.previousTime ? da : undefined,
'fees': fees,
};

View File

@@ -178,6 +178,7 @@ class Server {
logger.debug(msg);
}
}
memPool.deleteExpiredTransactions();
await blocks.$updateBlocks();
await memPool.$updateMempool();
indexer.$run();
@@ -275,7 +276,7 @@ class Server {
if (!this.warnedHeapCritical && this.maxHeapSize > warnThreshold) {
this.warnedHeapCritical = true;
logger.warn(`Used ${(this.maxHeapSize / stats.heap_size_limit).toFixed(2)}% of heap limit (${formatBytes(this.maxHeapSize, byteUnits, true)} / ${formatBytes(stats.heap_size_limit, byteUnits)})!`);
logger.warn(`Used ${(this.maxHeapSize / stats.heap_size_limit * 100).toFixed(2)}% of heap limit (${formatBytes(this.maxHeapSize, byteUnits, true)} / ${formatBytes(stats.heap_size_limit, byteUnits)})!`);
}
if (this.lastHeapLogTime === null || (now - this.lastHeapLogTime) > (this.heapLogInterval * 1000)) {
logger.debug(`Memory usage: ${formatBytes(this.maxHeapSize, byteUnits)} / ${formatBytes(stats.heap_size_limit, byteUnits)}`);

View File

@@ -69,6 +69,10 @@ class Logger {
this.network = this.getNetwork();
}
public updateNetwork(): void {
this.network = this.getNetwork();
}
private addprio(prio): void {
this[prio] = (function(_this) {
return function(msg, tag?: string) {

View File

@@ -214,6 +214,16 @@ export interface MempoolStats {
tx_count: number;
}
export interface EffectiveFeeStats {
medianFee: number; // median effective fee rate
feeRange: number[]; // 2nd, 10th, 25th, 50th, 75th, 90th, 98th percentiles
}
export interface CpfpSummary {
transactions: TransactionExtended[];
clusters: { root: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number }[];
}
export interface Statistic {
id?: number;
added: string;
@@ -309,9 +319,11 @@ export interface IDifficultyAdjustment {
remainingBlocks: number;
remainingTime: number;
previousRetarget: number;
previousTime: number;
nextRetargetHeight: number;
timeAvg: number;
timeOffset: number;
expectedBlocks: number;
}
export interface IndexedDifficultyAdjustment {

View File

@@ -1,4 +1,4 @@
import { BlockExtended, BlockExtension, BlockPrice } from '../mempool.interfaces';
import { BlockExtended, BlockExtension, BlockPrice, EffectiveFeeStats } from '../mempool.interfaces';
import DB from '../database';
import logger from '../logger';
import { Common } from '../api/common';
@@ -466,30 +466,6 @@ class BlocksRepository {
}
}
/**
* Get one block by hash
*/
public async $getBlockByHash(hash: string): Promise<object | null> {
try {
const query = `
SELECT ${BLOCK_DB_FIELDS}
FROM blocks
JOIN pools ON blocks.pool_id = pools.id
WHERE hash = ?;
`;
const [rows]: any[] = await DB.query(query, [hash]);
if (rows.length <= 0) {
return null;
}
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
} catch (e) {
logger.err(`Cannot get indexed block ${hash}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Return blocks difficulty
*/
@@ -599,7 +575,6 @@ class BlocksRepository {
if (blocks[idx].previous_block_hash !== blocks[idx - 1].hash) {
logger.warn(`Chain divergence detected at block ${blocks[idx - 1].height}`);
await this.$deleteBlocksFrom(blocks[idx - 1].height);
await BlocksSummariesRepository.$deleteBlocksFrom(blocks[idx - 1].height);
await HashratesRepository.$deleteHashratesFromTimestamp(blocks[idx - 1].timestamp - 604800);
await DifficultyAdjustmentsRepository.$deleteAdjustementsFromHeight(blocks[idx - 1].height);
return false;
@@ -619,7 +594,7 @@ class BlocksRepository {
* Delete blocks from the database from blockHeight
*/
public async $deleteBlocksFrom(blockHeight: number) {
logger.info(`Delete newer blocks from height ${blockHeight} from the database`);
logger.info(`Delete newer blocks from height ${blockHeight} from the database`, logger.tags.mining);
try {
await DB.query(`DELETE FROM blocks where height >= ${blockHeight}`);
@@ -908,6 +883,25 @@ class BlocksRepository {
}
}
/**
* Save indexed effective fee statistics
*
* @param id
* @param feeStats
*/
public async $saveEffectiveFeeStats(id: string, feeStats: EffectiveFeeStats): Promise<void> {
try {
await DB.query(`
UPDATE blocks SET median_fee = ?, fee_span = ?
WHERE hash = ?`,
[feeStats.medianFee, JSON.stringify(feeStats.feeRange), id]
);
} catch (e) {
logger.err(`Cannot update block fee stats. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Convert a mysql row block into a BlockExtended. Note that you
* must provide the correct field into dbBlk object param
@@ -978,6 +972,7 @@ class BlocksRepository {
}
// If we're missing block summary related field, check if we can populate them on the fly now
// This is for example triggered upon re-org
if (Common.blocksSummariesIndexingEnabled() &&
(extras.medianFeeAmt === null || extras.feePercentiles === null))
{
@@ -985,7 +980,7 @@ class BlocksRepository {
if (extras.feePercentiles === null) {
const block = await bitcoinClient.getBlock(dbBlk.id, 2);
const summary = blocks.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
await BlocksSummariesRepository.$saveTransactions(dbBlk.height, dbBlk.hash, summary.transactions);
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
}
if (extras.feePercentiles !== null) {

View File

@@ -1,6 +1,6 @@
import DB from '../database';
import logger from '../logger';
import { BlockSummary } from '../mempool.interfaces';
import { BlockSummary, TransactionStripped } from '../mempool.interfaces';
class BlocksSummariesRepository {
public async $getByBlockId(id: string): Promise<BlockSummary | undefined> {
@@ -17,7 +17,7 @@ class BlocksSummariesRepository {
return undefined;
}
public async $saveSummary(params: { height: number, mined?: BlockSummary}) {
public async $saveSummary(params: { height: number, mined?: BlockSummary}): Promise<void> {
const blockId = params.mined?.id;
try {
const transactions = JSON.stringify(params.mined?.transactions || []);
@@ -37,6 +37,20 @@ class BlocksSummariesRepository {
}
}
public async $saveTransactions(blockHeight: number, blockId: string, transactions: TransactionStripped[]): Promise<void> {
try {
const transactionsStr = JSON.stringify(transactions);
await DB.query(`
INSERT INTO blocks_summaries
SET height = ?, transactions = ?, id = ?
ON DUPLICATE KEY UPDATE transactions = ?`,
[blockHeight, transactionsStr, blockId, transactionsStr]);
} catch (e: any) {
logger.debug(`Cannot save block summary transactions for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
public async $saveTemplate(params: { height: number, template: BlockSummary}) {
const blockId = params.template?.id;
try {
@@ -68,19 +82,6 @@ class BlocksSummariesRepository {
return [];
}
/**
* Delete blocks from the database from blockHeight
*/
public async $deleteBlocksFrom(blockHeight: number) {
logger.info(`Delete newer blocks summary from height ${blockHeight} from the database`);
try {
await DB.query(`DELETE FROM blocks_summaries where height >= ${blockHeight}`);
} catch (e) {
logger.err('Cannot delete indexed blocks summaries. Reason: ' + (e instanceof Error ? e.message : e));
}
}
/**
* Get the fee percentiles if the block has already been indexed, [] otherwise
*

View File

@@ -48,7 +48,7 @@ class CpfpRepository {
}
}
public async $batchSaveClusters(clusters: { root: string, height: number, txs: any, effectiveFeePerVsize: number}[]): Promise<boolean> {
public async $batchSaveClusters(clusters: { root: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number }[]): Promise<boolean> {
try {
const clusterValues: any[] = [];
const txs: any[] = [];

View File

@@ -220,7 +220,7 @@ class HashratesRepository {
* Delete hashrates from the database from timestamp
*/
public async $deleteHashratesFromTimestamp(timestamp: number) {
logger.info(`Delete newer hashrates from timestamp ${new Date(timestamp * 1000).toUTCString()} from the database`);
logger.info(`Delete newer hashrates from timestamp ${new Date(timestamp * 1000).toUTCString()} from the database`, logger.tags.mining);
try {
await DB.query(`DELETE FROM hashrates WHERE hashrate_timestamp >= FROM_UNIXTIME(?)`, [timestamp]);

View File

@@ -160,7 +160,7 @@ class PricesRepository {
// Compute fiat exchange rates
let latestPrice = rates[0] as ApiPrice;
if (latestPrice.USD === -1) {
if (!latestPrice || latestPrice.USD === -1) {
latestPrice = priceUpdater.getEmptyPricesObj();
}

View File

@@ -27,7 +27,7 @@ class ForensicsService {
private async $runTasks(): Promise<void> {
try {
logger.info(`Running forensics scans`);
logger.debug(`Running forensics scans`);
if (config.MEMPOOL.BACKEND === 'esplora') {
await this.$runClosedChannelsForensics(false);
@@ -73,7 +73,7 @@ class ForensicsService {
let progress = 0;
try {
logger.info(`Started running closed channel forensics...`);
logger.debug(`Started running closed channel forensics...`);
let channels;
if (onlyNewChannels) {
channels = await channelsApi.$getClosedChannelsWithoutReason();
@@ -156,7 +156,7 @@ class ForensicsService {
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Closed channels forensics scan complete.`);
logger.debug(`Closed channels forensics scan complete.`);
} catch (e) {
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
}
@@ -217,7 +217,7 @@ class ForensicsService {
let progress = 0;
try {
logger.info(`Started running open channel forensics...`);
logger.debug(`Started running open channel forensics...`);
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
for (const openChannel of channels) {
@@ -266,7 +266,7 @@ class ForensicsService {
}
}
logger.info(`Open channels forensics scan complete.`);
logger.debug(`Open channels forensics scan complete.`);
} catch (e) {
logger.err('$runOpenedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
} finally {

View File

@@ -283,7 +283,7 @@ class NetworkSyncService {
} else {
log += ` for the first time`;
}
logger.info(`${log}`, logger.tags.ln);
logger.debug(`${log}`, logger.tags.ln);
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
for (const channel of channels) {

View File

@@ -15,16 +15,20 @@ class LightningStatsImporter {
topologiesFolder = config.LIGHTNING.TOPOLOGY_FOLDER;
async $run(): Promise<void> {
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
try {
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
return;
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
return;
}
await this.$importHistoricalLightningStats();
await this.$cleanupIncorrectSnapshot();
} catch (e) {
logger.err(`Exception in LightningStatsImporter::$run(). ${e}`);
}
await this.$importHistoricalLightningStats();
await this.$cleanupIncorrectSnapshot();
}
/**

View File

@@ -62,7 +62,7 @@ class PoolsUpdater {
if (this.currentSha === null) {
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl} over ${network}`, logger.tags.mining);
} else {
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
logger.warn(`pools-v2.json is outdated, fetching latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
}
const poolsJson = await this.query(this.poolsUrl);
if (poolsJson === undefined) {

View File

@@ -222,7 +222,7 @@ class PriceUpdater {
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
const existingPriceTimes = await PricesRepository.$getPricesTimes();
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
logger.debug(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
const historicalPrices: PriceHistory[] = [];