Merge branch 'master' into docker_vars_test

This commit is contained in:
Felipe Knorr Kuhn
2023-05-26 07:16:29 -07:00
committed by GitHub
20 changed files with 133 additions and 55 deletions

View File

@@ -6,14 +6,15 @@ const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first
class Audit {
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
: { censored: string[], added: string[], fresh: string[], score: number, similarity: number } {
: { censored: string[], added: string[], fresh: string[], sigop: string[], score: number, similarity: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], fresh: [], score: 0, similarity: 1 };
return { censored: [], added: [], fresh: [], sigop: [], score: 0, similarity: 1 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
const sigop: string[] = []; // missing, but possibly has an adjusted vsize due to high sigop count
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
@@ -37,6 +38,8 @@ class Audit {
// tx is recent, may have reached the miner too late for inclusion
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
fresh.push(txid);
} else if (this.isPossibleHighSigop(mempool[txid])) {
sigop.push(txid);
} else {
isCensored[txid] = true;
}
@@ -137,10 +140,19 @@ class Audit {
censored: Object.keys(isCensored),
added,
fresh,
sigop,
score,
similarity,
};
}
// Detect transactions with a possibly adjusted vsize due to high sigop count
// very rough heuristic based on number of OP_CHECKMULTISIG outputs
// will miss cases with other sources of sigops
isPossibleHighSigop(tx: TransactionExtended): boolean {
const numBareMultisig = tx.vout.reduce((count, vout) => count + (vout.scriptpubkey_asm.includes('OP_CHECKMULTISIG') ? 1 : 0), 0);
return (numBareMultisig * 400) > tx.vsize;
}
}
export default new Audit();

View File

@@ -306,7 +306,7 @@ class Blocks {
}
const asciiScriptSig = transactionUtils.hex2ascii(txMinerInfo.vin[0].scriptsig);
const address = txMinerInfo.vout[0].scriptpubkey_address;
const addresses = txMinerInfo.vout.map((vout) => vout.scriptpubkey_address).filter((address) => address);
let pools: PoolTag[] = [];
if (config.DATABASE.ENABLED === true) {
@@ -316,11 +316,13 @@ class Blocks {
}
for (let i = 0; i < pools.length; ++i) {
if (address !== undefined) {
const addresses: string[] = typeof pools[i].addresses === 'string' ?
if (addresses.length) {
const poolAddresses: string[] = typeof pools[i].addresses === 'string' ?
JSON.parse(pools[i].addresses) : pools[i].addresses;
if (addresses.indexOf(address) !== -1) {
return pools[i];
for (let y = 0; y < poolAddresses.length; y++) {
if (addresses.indexOf(poolAddresses[y]) !== -1) {
return pools[i];
}
}
}

View File

@@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
import { RowDataPacket } from 'mysql2';
class DatabaseMigration {
private static currentVersion = 59;
private static currentVersion = 60;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@@ -516,6 +516,11 @@ class DatabaseMigration {
// https://github.com/mempool/mempool/issues/3360
await this.$executeQuery(`TRUNCATE prices`);
}
if (databaseSchemaVersion < 60 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD sigop_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(60);
}
}
/**

View File

@@ -21,6 +21,7 @@ class DiskCache {
private static RBF_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/rbfcache.json';
private static CHUNK_FILES = 25;
private isWritingCache = false;
private ignoreBlocksCache = false;
private semaphore: { resume: (() => void)[], locks: number } = {
resume: [],
@@ -218,8 +219,13 @@ class DiskCache {
}
await memPool.$setMempool(data.mempool);
blocks.setBlocks(data.blocks);
blocks.setBlockSummaries(data.blockSummaries || []);
if (!this.ignoreBlocksCache) {
blocks.setBlocks(data.blocks);
blocks.setBlockSummaries(data.blockSummaries || []);
} else {
logger.info('Re-saving cache with empty recent blocks data');
await this.$saveCacheToDisk(true);
}
} catch (e) {
logger.warn('Failed to parse mempoool and blocks cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
@@ -273,6 +279,10 @@ class DiskCache {
}
}
}
public setIgnoreBlocksCache(): void {
this.ignoreBlocksCache = true;
}
}
export default new DiskCache();

View File

@@ -41,7 +41,7 @@ class PoolsParser {
public async migratePoolsJson(): Promise<void> {
// We also need to wipe the backend cache to make sure we don't serve blocks with
// the wrong mining pool (usually happen with unknown blocks)
diskCache.wipeCache();
diskCache.setIgnoreBlocksCache();
await this.$insertUnknownPool();
@@ -118,10 +118,6 @@ class PoolsParser {
* @param pool
*/
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
return;
}
// Get oldest blocks mined by the pool and assume pools-v2.json updates only concern most recent years
// Ignore early days of Bitcoin as there were no mining pool yet
const [oldestPoolBlock]: any[] = await DB.query(`

View File

@@ -557,7 +557,7 @@ class WebsocketHandler {
}
if (Common.indexingEnabled() && memPool.isInSync()) {
const { censored, added, fresh, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
const { censored, added, fresh, sigop, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
const matchRate = Math.round(score * 100 * 100) / 100;
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
@@ -584,6 +584,7 @@ class WebsocketHandler {
addedTxs: added,
missingTxs: censored,
freshTxs: fresh,
sigopTxs: sigop,
matchRate: matchRate,
});

View File

@@ -32,6 +32,7 @@ export interface BlockAudit {
hash: string,
missingTxs: string[],
freshTxs: string[],
sigopTxs: string[],
addedTxs: string[],
matchRate: number,
}

View File

@@ -6,9 +6,9 @@ import { BlockAudit, AuditScore } from '../mempool.interfaces';
class BlocksAuditRepositories {
public async $saveAudit(audit: BlockAudit): Promise<void> {
try {
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, match_rate)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), audit.matchRate]);
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, sigop_txs, match_rate)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), audit.matchRate]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
@@ -52,7 +52,7 @@ class BlocksAuditRepositories {
const [rows]: any[] = await DB.query(
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
blocks.weight, blocks.tx_count,
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, fresh_txs as freshTxs, match_rate as matchRate
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, fresh_txs as freshTxs, sigop_txs as sigopTxs, match_rate as matchRate
FROM blocks_audits
JOIN blocks ON blocks.hash = blocks_audits.hash
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
@@ -63,6 +63,7 @@ class BlocksAuditRepositories {
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
rows[0].sigopTxs = JSON.parse(rows[0].sigopTxs);
rows[0].transactions = JSON.parse(rows[0].transactions);
rows[0].template = JSON.parse(rows[0].template);