Merge branch 'master' into ops/esplora-unix-sockets
This commit is contained in:
@@ -5,9 +5,9 @@ const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first
|
||||
|
||||
class Audit {
|
||||
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
|
||||
: { censored: string[], added: string[], fresh: string[], score: number } {
|
||||
: { censored: string[], added: string[], fresh: string[], score: number, similarity: number } {
|
||||
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
|
||||
return { censored: [], added: [], fresh: [], score: 0 };
|
||||
return { censored: [], added: [], fresh: [], score: 0, similarity: 1 };
|
||||
}
|
||||
|
||||
const matches: string[] = []; // present in both mined block and template
|
||||
@@ -16,6 +16,8 @@ class Audit {
|
||||
const isCensored = {}; // missing, without excuse
|
||||
const isDisplaced = {};
|
||||
let displacedWeight = 0;
|
||||
let matchedWeight = 0;
|
||||
let projectedWeight = 0;
|
||||
|
||||
const inBlock = {};
|
||||
const inTemplate = {};
|
||||
@@ -38,11 +40,16 @@ class Audit {
|
||||
isCensored[txid] = true;
|
||||
}
|
||||
displacedWeight += mempool[txid].weight;
|
||||
} else {
|
||||
matchedWeight += mempool[txid].weight;
|
||||
}
|
||||
projectedWeight += mempool[txid].weight;
|
||||
inTemplate[txid] = true;
|
||||
}
|
||||
|
||||
displacedWeight += (4000 - transactions[0].weight);
|
||||
projectedWeight += transactions[0].weight;
|
||||
matchedWeight += transactions[0].weight;
|
||||
|
||||
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
|
||||
// these displaced transactions should occupy the first N weight units of the next projected block
|
||||
@@ -121,12 +128,14 @@ class Audit {
|
||||
const numCensored = Object.keys(isCensored).length;
|
||||
const numMatches = matches.length - 1; // adjust for coinbase tx
|
||||
const score = numMatches > 0 ? (numMatches / (numMatches + numCensored)) : 0;
|
||||
const similarity = projectedWeight ? matchedWeight / projectedWeight : 1;
|
||||
|
||||
return {
|
||||
censored: Object.keys(isCensored),
|
||||
added,
|
||||
fresh,
|
||||
score
|
||||
score,
|
||||
similarity,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ const nodeRpcCredentials: BitcoinRpcCredentials = {
|
||||
port: config.CORE_RPC.PORT,
|
||||
user: config.CORE_RPC.USERNAME,
|
||||
pass: config.CORE_RPC.PASSWORD,
|
||||
timeout: 60000,
|
||||
timeout: config.CORE_RPC.TIMEOUT,
|
||||
};
|
||||
|
||||
export default new bitcoin.Client(nodeRpcCredentials);
|
||||
|
||||
@@ -7,7 +7,7 @@ const nodeRpcCredentials: BitcoinRpcCredentials = {
|
||||
port: config.SECOND_CORE_RPC.PORT,
|
||||
user: config.SECOND_CORE_RPC.USERNAME,
|
||||
pass: config.SECOND_CORE_RPC.PASSWORD,
|
||||
timeout: 60000,
|
||||
timeout: config.SECOND_CORE_RPC.TIMEOUT,
|
||||
};
|
||||
|
||||
export default new bitcoin.Client(nodeRpcCredentials);
|
||||
|
||||
@@ -143,7 +143,10 @@ class Blocks {
|
||||
* @returns BlockSummary
|
||||
*/
|
||||
public summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary {
|
||||
const stripped = block.tx.map((tx) => {
|
||||
if (Common.isLiquid()) {
|
||||
block = this.convertLiquidFees(block);
|
||||
}
|
||||
const stripped = block.tx.map((tx: IBitcoinApi.VerboseTransaction) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.weight / 4,
|
||||
@@ -158,6 +161,13 @@ class Blocks {
|
||||
};
|
||||
}
|
||||
|
||||
private convertLiquidFees(block: IBitcoinApi.VerboseBlock): IBitcoinApi.VerboseBlock {
|
||||
block.tx.forEach(tx => {
|
||||
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
|
||||
});
|
||||
return block;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a block with additional data (reward, coinbase, fees...)
|
||||
* @param block
|
||||
@@ -641,7 +651,7 @@ class Blocks {
|
||||
if (this.newBlockCallbacks.length) {
|
||||
this.newBlockCallbacks.forEach((cb) => cb(blockExtended, txIds, transactions));
|
||||
}
|
||||
if (!memPool.hasPriority() && (block.height % 6 === 0)) {
|
||||
if (!memPool.hasPriority() && (block.height % config.MEMPOOL.DISK_CACHE_BLOCK_INTERVAL === 0)) {
|
||||
diskCache.$saveCacheToDisk();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { CpfpInfo, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
|
||||
import { CpfpInfo, MempoolBlockWithTransactions, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
|
||||
import config from '../config';
|
||||
import { NodeSocket } from '../repositories/NodesSocketsRepository';
|
||||
import { isIP } from 'net';
|
||||
@@ -164,6 +164,30 @@ export class Common {
|
||||
return parents;
|
||||
}
|
||||
|
||||
// calculates the ratio of matched transactions to projected transactions by weight
|
||||
static getSimilarity(projectedBlock: MempoolBlockWithTransactions, transactions: TransactionExtended[]): number {
|
||||
let matchedWeight = 0;
|
||||
let projectedWeight = 0;
|
||||
const inBlock = {};
|
||||
|
||||
for (const tx of transactions) {
|
||||
inBlock[tx.txid] = tx;
|
||||
}
|
||||
|
||||
// look for transactions that were expected in the template, but missing from the mined block
|
||||
for (const tx of projectedBlock.transactions) {
|
||||
if (inBlock[tx.txid]) {
|
||||
matchedWeight += tx.vsize * 4;
|
||||
}
|
||||
projectedWeight += tx.vsize * 4;
|
||||
}
|
||||
|
||||
projectedWeight += transactions[0].weight;
|
||||
matchedWeight += transactions[0].weight;
|
||||
|
||||
return projectedWeight ? matchedWeight / projectedWeight : 1;
|
||||
}
|
||||
|
||||
static getSqlInterval(interval: string | null): string | null {
|
||||
switch (interval) {
|
||||
case '24h': return '1 DAY';
|
||||
|
||||
@@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
|
||||
import { RowDataPacket } from 'mysql2';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 58;
|
||||
private static currentVersion = 59;
|
||||
private queryTimeout = 3600_000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
@@ -510,6 +510,11 @@ class DatabaseMigration {
|
||||
// We only run some migration queries for this version
|
||||
await this.updateToSchemaVersion(58);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 59 && (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'testnet')) {
|
||||
// https://github.com/mempool/mempool/issues/3360
|
||||
await this.$executeQuery(`TRUNCATE prices`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,20 +19,16 @@ class DiskCache {
|
||||
private isWritingCache = false;
|
||||
|
||||
constructor() {
|
||||
if (!cluster.isMaster) {
|
||||
if (!cluster.isPrimary) {
|
||||
return;
|
||||
}
|
||||
process.on('SIGINT', (e) => {
|
||||
this.saveCacheToDiskSync();
|
||||
process.exit(2);
|
||||
});
|
||||
process.on('SIGTERM', (e) => {
|
||||
this.saveCacheToDiskSync();
|
||||
process.exit(2);
|
||||
this.$saveCacheToDisk(true);
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
async $saveCacheToDisk(): Promise<void> {
|
||||
async $saveCacheToDisk(sync: boolean = false): Promise<void> {
|
||||
if (!cluster.isPrimary) {
|
||||
return;
|
||||
}
|
||||
@@ -41,7 +37,7 @@ class DiskCache {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
logger.debug('Writing mempool and blocks data to disk cache (async)...');
|
||||
logger.debug(`Writing mempool and blocks data to disk cache (${ sync ? 'sync' : 'async' })...`);
|
||||
this.isWritingCache = true;
|
||||
|
||||
const mempool = memPool.getMempool();
|
||||
@@ -54,66 +50,46 @@ class DiskCache {
|
||||
|
||||
const chunkSize = Math.floor(mempoolArray.length / DiskCache.CHUNK_FILES);
|
||||
|
||||
await fsPromises.writeFile(DiskCache.FILE_NAME, JSON.stringify({
|
||||
cacheSchemaVersion: this.cacheSchemaVersion,
|
||||
blocks: blocks.getBlocks(),
|
||||
blockSummaries: blocks.getBlockSummaries(),
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), { flag: 'w' });
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
await fsPromises.writeFile(DiskCache.FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
|
||||
if (sync) {
|
||||
fs.writeFileSync(DiskCache.TMP_FILE_NAME, JSON.stringify({
|
||||
network: config.MEMPOOL.NETWORK,
|
||||
cacheSchemaVersion: this.cacheSchemaVersion,
|
||||
blocks: blocks.getBlocks(),
|
||||
blockSummaries: blocks.getBlockSummaries(),
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), { flag: 'w' });
|
||||
}
|
||||
logger.debug('Mempool and blocks data saved to disk cache');
|
||||
this.isWritingCache = false;
|
||||
} catch (e) {
|
||||
logger.warn('Error writing to cache file: ' + (e instanceof Error ? e.message : e));
|
||||
this.isWritingCache = false;
|
||||
}
|
||||
}
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
fs.writeFileSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), { flag: 'w' });
|
||||
}
|
||||
|
||||
saveCacheToDiskSync(): void {
|
||||
if (!cluster.isPrimary) {
|
||||
return;
|
||||
}
|
||||
if (this.isWritingCache) {
|
||||
logger.debug('Saving cache already in progress. Skipping.');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
logger.debug('Writing mempool and blocks data to disk cache (sync)...');
|
||||
this.isWritingCache = true;
|
||||
|
||||
const mempool = memPool.getMempool();
|
||||
const mempoolArray: TransactionExtended[] = [];
|
||||
for (const tx in mempool) {
|
||||
mempoolArray.push(mempool[tx]);
|
||||
}
|
||||
|
||||
Common.shuffleArray(mempoolArray);
|
||||
|
||||
const chunkSize = Math.floor(mempoolArray.length / DiskCache.CHUNK_FILES);
|
||||
|
||||
fs.writeFileSync(DiskCache.TMP_FILE_NAME, JSON.stringify({
|
||||
cacheSchemaVersion: this.cacheSchemaVersion,
|
||||
blocks: blocks.getBlocks(),
|
||||
blockSummaries: blocks.getBlockSummaries(),
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), { flag: 'w' });
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
fs.writeFileSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
|
||||
fs.renameSync(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
|
||||
}
|
||||
} else {
|
||||
await fsPromises.writeFile(DiskCache.TMP_FILE_NAME, JSON.stringify({
|
||||
network: config.MEMPOOL.NETWORK,
|
||||
cacheSchemaVersion: this.cacheSchemaVersion,
|
||||
blocks: blocks.getBlocks(),
|
||||
blockSummaries: blocks.getBlockSummaries(),
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), { flag: 'w' });
|
||||
}
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
await fsPromises.writeFile(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), { flag: 'w' });
|
||||
}
|
||||
|
||||
fs.renameSync(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
|
||||
await fsPromises.rename(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
await fsPromises.rename(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Mempool and blocks data saved to disk cache');
|
||||
@@ -160,6 +136,10 @@ class DiskCache {
|
||||
logger.notice('Disk cache contains an outdated schema version. Clearing it and skipping the cache loading.');
|
||||
return this.wipeCache();
|
||||
}
|
||||
if (data.network && data.network !== config.MEMPOOL.NETWORK) {
|
||||
logger.notice('Disk cache contains data from a different network. Clearing it and skipping the cache loading.');
|
||||
return this.wipeCache();
|
||||
}
|
||||
|
||||
if (data.mempoolArray) {
|
||||
for (const tx of data.mempoolArray) {
|
||||
|
||||
@@ -17,7 +17,7 @@ class LndApi implements AbstractLightningApi {
|
||||
httpsAgent: new Agent({
|
||||
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
|
||||
}),
|
||||
timeout: 10000
|
||||
timeout: config.LND.TIMEOUT
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import PricesRepository from '../../repositories/PricesRepository';
|
||||
import { bitcoinCoreApi } from '../bitcoin/bitcoin-api-factory';
|
||||
import { IEsploraApi } from '../bitcoin/esplora-api.interface';
|
||||
import database from '../../database';
|
||||
|
||||
class Mining {
|
||||
private blocksPriceIndexingRunning = false;
|
||||
@@ -141,6 +142,9 @@ class Mining {
|
||||
const blockCount1w: number = await BlocksRepository.$blockCount(pool.id, '1w');
|
||||
const totalBlock1w: number = await BlocksRepository.$blockCount(null, '1w');
|
||||
|
||||
const avgHealth = await BlocksRepository.$getAvgBlockHealthPerPoolId(pool.id);
|
||||
const totalReward = await BlocksRepository.$getTotalRewardForPoolId(pool.id);
|
||||
|
||||
let currentEstimatedHashrate = 0;
|
||||
try {
|
||||
currentEstimatedHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h);
|
||||
@@ -162,6 +166,8 @@ class Mining {
|
||||
},
|
||||
estimatedHashrate: currentEstimatedHashrate * (blockCount24h / totalBlock24h),
|
||||
reportedHashrate: null,
|
||||
avgBlockHealth: avgHealth,
|
||||
totalReward: totalReward,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -432,7 +432,7 @@ class WebsocketHandler {
|
||||
}
|
||||
|
||||
if (Common.indexingEnabled() && memPool.isInSync()) {
|
||||
const { censored, added, fresh, score } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
|
||||
const { censored, added, fresh, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
|
||||
const matchRate = Math.round(score * 100 * 100) / 100;
|
||||
|
||||
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
|
||||
@@ -464,8 +464,14 @@ class WebsocketHandler {
|
||||
|
||||
if (block.extras) {
|
||||
block.extras.matchRate = matchRate;
|
||||
block.extras.similarity = similarity;
|
||||
}
|
||||
}
|
||||
} else if (block.extras) {
|
||||
const mBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
|
||||
if (mBlocks?.length && mBlocks[0].transactions) {
|
||||
block.extras.similarity = Common.getSimilarity(mBlocks[0], transactions);
|
||||
}
|
||||
}
|
||||
|
||||
const removed: string[] = [];
|
||||
|
||||
Reference in New Issue
Block a user