save 'fresh' transactions in block audit repository
This commit is contained in:
parent
c43e4bb71b
commit
276470474d
@ -10,9 +10,9 @@ const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first
|
||||
|
||||
class Audit {
|
||||
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
|
||||
: { censored: string[], added: string[], score: number } {
|
||||
: { censored: string[], added: string[], fresh: string[], score: number } {
|
||||
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
|
||||
return { censored: [], added: [], score: 0 };
|
||||
return { censored: [], added: [], fresh: [], score: 0 };
|
||||
}
|
||||
|
||||
const matches: string[] = []; // present in both mined block and template
|
||||
@ -83,7 +83,17 @@ class Audit {
|
||||
} else {
|
||||
if (!isDisplaced[tx.txid]) {
|
||||
added.push(tx.txid);
|
||||
} else {
|
||||
}
|
||||
let blockIndex = -1;
|
||||
let index = -1;
|
||||
projectedBlocks.forEach((block, bi) => {
|
||||
const i = block.transactionIds.indexOf(tx.txid);
|
||||
if (i >= 0) {
|
||||
blockIndex = bi;
|
||||
index = i;
|
||||
}
|
||||
});
|
||||
overflowWeight += tx.weight;
|
||||
}
|
||||
totalWeight += tx.weight;
|
||||
@ -119,6 +129,7 @@ class Audit {
|
||||
return {
|
||||
censored: Object.keys(isCensored),
|
||||
added,
|
||||
fresh,
|
||||
score
|
||||
};
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 44;
|
||||
private static currentVersion = 45;
|
||||
private queryTimeout = 900_000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
@ -365,6 +365,10 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('TRUNCATE TABLE `blocks_audits`');
|
||||
await this.$executeQuery('UPDATE blocks_summaries SET template = NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 45 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD fresh_txs JSON DEFAULT "[]"');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -428,7 +428,7 @@ class WebsocketHandler {
|
||||
if (Common.indexingEnabled() && memPool.isInSync()) {
|
||||
const projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
|
||||
|
||||
const { censored, added, score } = Audit.auditBlock(transactions, projectedBlocks, _memPool);
|
||||
const { censored, added, fresh, score } = Audit.auditBlock(transactions, projectedBlocks, _memPool);
|
||||
matchRate = Math.round(score * 100 * 100) / 100;
|
||||
|
||||
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
|
||||
@ -454,6 +454,7 @@ class WebsocketHandler {
|
||||
hash: block.id,
|
||||
addedTxs: added,
|
||||
missingTxs: censored,
|
||||
freshTxs: fresh,
|
||||
matchRate: matchRate,
|
||||
});
|
||||
|
||||
|
@ -28,6 +28,7 @@ export interface BlockAudit {
|
||||
height: number,
|
||||
hash: string,
|
||||
missingTxs: string[],
|
||||
freshTxs: string[],
|
||||
addedTxs: string[],
|
||||
matchRate: number,
|
||||
}
|
||||
|
@ -5,9 +5,9 @@ import { BlockAudit, AuditScore } from '../mempool.interfaces';
|
||||
class BlocksAuditRepositories {
|
||||
public async $saveAudit(audit: BlockAudit): Promise<void> {
|
||||
try {
|
||||
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, match_rate)
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
|
||||
JSON.stringify(audit.addedTxs), audit.matchRate]);
|
||||
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, match_rate)
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
|
||||
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), audit.matchRate]);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
|
||||
@ -51,7 +51,7 @@ class BlocksAuditRepositories {
|
||||
const [rows]: any[] = await DB.query(
|
||||
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
|
||||
blocks.weight, blocks.tx_count,
|
||||
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, match_rate as matchRate
|
||||
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, fresh_txs as freshTxs, match_rate as matchRate
|
||||
FROM blocks_audits
|
||||
JOIN blocks ON blocks.hash = blocks_audits.hash
|
||||
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
|
||||
@ -61,6 +61,7 @@ class BlocksAuditRepositories {
|
||||
if (rows.length) {
|
||||
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
|
||||
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
|
||||
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
|
||||
rows[0].transactions = JSON.parse(rows[0].transactions);
|
||||
rows[0].template = JSON.parse(rows[0].template);
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user