From 67295c1b9bc0a732828a51fc1b0d94fb1f29deee Mon Sep 17 00:00:00 2001 From: natsoni Date: Fri, 4 Oct 2024 22:15:00 +0900 Subject: [PATCH 1/3] add debug.log path to backend config --- backend/mempool-config.sample.json | 3 ++- backend/src/config.ts | 4 +++- docker/backend/mempool-config.json | 3 ++- docker/backend/start.sh | 2 ++ 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/backend/mempool-config.sample.json b/backend/mempool-config.sample.json index ce403e943..fd57b5c29 100644 --- a/backend/mempool-config.sample.json +++ b/backend/mempool-config.sample.json @@ -45,7 +45,8 @@ "PASSWORD": "mempool", "TIMEOUT": 60000, "COOKIE": false, - "COOKIE_PATH": "/path/to/bitcoin/.cookie" + "COOKIE_PATH": "/path/to/bitcoin/.cookie", + "DEBUG_LOG_PATH": "/path/to/bitcoin/debug.log" }, "ELECTRUM": { "HOST": "127.0.0.1", diff --git a/backend/src/config.ts b/backend/src/config.ts index a3e0a40af..1c303b13d 100644 --- a/backend/src/config.ts +++ b/backend/src/config.ts @@ -85,6 +85,7 @@ interface IConfig { TIMEOUT: number; COOKIE: boolean; COOKIE_PATH: string; + DEBUG_LOG_PATH: string; }; SECOND_CORE_RPC: { HOST: string; @@ -225,7 +226,8 @@ const defaults: IConfig = { 'PASSWORD': 'mempool', 'TIMEOUT': 60000, 'COOKIE': false, - 'COOKIE_PATH': '/bitcoin/.cookie' + 'COOKIE_PATH': '/bitcoin/.cookie', + 'DEBUG_LOG_PATH': '', }, 'SECOND_CORE_RPC': { 'HOST': '127.0.0.1', diff --git a/docker/backend/mempool-config.json b/docker/backend/mempool-config.json index 79cd14644..335bec3f0 100644 --- a/docker/backend/mempool-config.json +++ b/docker/backend/mempool-config.json @@ -46,7 +46,8 @@ "PASSWORD": "__CORE_RPC_PASSWORD__", "TIMEOUT": __CORE_RPC_TIMEOUT__, "COOKIE": __CORE_RPC_COOKIE__, - "COOKIE_PATH": "__CORE_RPC_COOKIE_PATH__" + "COOKIE_PATH": "__CORE_RPC_COOKIE_PATH__", + "DEBUG_LOG_PATH": "__CORE_RPC_DEBUG_LOG_PATH__" }, "ELECTRUM": { "HOST": "__ELECTRUM_HOST__", diff --git a/docker/backend/start.sh b/docker/backend/start.sh index 39ca95a2a..bd986310d 100755 --- a/docker/backend/start.sh +++ b/docker/backend/start.sh @@ -48,6 +48,7 @@ __CORE_RPC_PASSWORD__=${CORE_RPC_PASSWORD:=mempool} __CORE_RPC_TIMEOUT__=${CORE_RPC_TIMEOUT:=60000} __CORE_RPC_COOKIE__=${CORE_RPC_COOKIE:=false} __CORE_RPC_COOKIE_PATH__=${CORE_RPC_COOKIE_PATH:=""} +__CORE_RPC_DEBUG_LOG_PATH__=${CORE_RPC_DEBUG_LOG_PATH:=""} # ELECTRUM __ELECTRUM_HOST__=${ELECTRUM_HOST:=127.0.0.1} @@ -205,6 +206,7 @@ sed -i "s!__CORE_RPC_PASSWORD__!${__CORE_RPC_PASSWORD__}!g" mempool-config.json sed -i "s!__CORE_RPC_TIMEOUT__!${__CORE_RPC_TIMEOUT__}!g" mempool-config.json sed -i "s!__CORE_RPC_COOKIE__!${__CORE_RPC_COOKIE__}!g" mempool-config.json sed -i "s!__CORE_RPC_COOKIE_PATH__!${__CORE_RPC_COOKIE_PATH__}!g" mempool-config.json +sed -i "s!__CORE_RPC_DEBUG_LOG_PATH__!${__CORE_RPC_DEBUG_LOG_PATH__}!g" mempool-config.json sed -i "s!__ELECTRUM_HOST__!${__ELECTRUM_HOST__}!g" mempool-config.json sed -i "s!__ELECTRUM_PORT__!${__ELECTRUM_PORT__}!g" mempool-config.json From 1a75e3e317511d48f69cebab3077f80629577603 Mon Sep 17 00:00:00 2001 From: natsoni Date: Fri, 4 Oct 2024 21:54:13 +0900 Subject: [PATCH 2/3] Store block first seen time in block audits --- .../__fixtures__/mempool-config.template.json | 3 +- backend/src/__tests__/config.test.ts | 3 +- backend/src/api/audit.ts | 40 +++++++++++++++++-- backend/src/api/database-migration.ts | 7 +++- backend/src/api/websocket-handler.ts | 3 +- backend/src/mempool.interfaces.ts | 2 + backend/src/replication/AuditReplication.ts | 3 +- .../repositories/BlocksAuditsRepository.ts | 16 ++++++-- 8 files changed, 65 insertions(+), 12 deletions(-) diff --git a/backend/src/__fixtures__/mempool-config.template.json b/backend/src/__fixtures__/mempool-config.template.json index 3796b7f22..decdd5cf2 100644 --- a/backend/src/__fixtures__/mempool-config.template.json +++ b/backend/src/__fixtures__/mempool-config.template.json @@ -46,7 +46,8 @@ "PASSWORD": "__CORE_RPC_PASSWORD__", "TIMEOUT": 1000, "COOKIE": false, - "COOKIE_PATH": "__CORE_RPC_COOKIE_PATH__" + "COOKIE_PATH": "__CORE_RPC_COOKIE_PATH__", + "DEBUG_LOG_PATH": "__CORE_RPC_DEBUG_LOG_PATH__" }, "ELECTRUM": { "HOST": "__ELECTRUM_HOST__", diff --git a/backend/src/__tests__/config.test.ts b/backend/src/__tests__/config.test.ts index 53ab546a8..d712fd736 100644 --- a/backend/src/__tests__/config.test.ts +++ b/backend/src/__tests__/config.test.ts @@ -73,7 +73,8 @@ describe('Mempool Backend Config', () => { PASSWORD: 'mempool', TIMEOUT: 60000, COOKIE: false, - COOKIE_PATH: '/bitcoin/.cookie' + COOKIE_PATH: '/bitcoin/.cookie', + DEBUG_LOG_PATH: '', }); expect(config.SECOND_CORE_RPC).toStrictEqual({ diff --git a/backend/src/api/audit.ts b/backend/src/api/audit.ts index e09234cdc..b29c3bf2b 100644 --- a/backend/src/api/audit.ts +++ b/backend/src/api/audit.ts @@ -1,3 +1,4 @@ +import * as fs from 'fs'; import config from '../config'; import logger from '../logger'; import { MempoolTransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces'; @@ -7,10 +8,10 @@ import transactionUtils from './transaction-utils'; const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners class Audit { - auditBlock(height: number, transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended }) - : { unseen: string[], censored: string[], added: string[], prioritized: string[], fresh: string[], sigop: string[], fullrbf: string[], accelerated: string[], score: number, similarity: number } { + auditBlock(height: number, transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended }, hash: string) + : { unseen: string[], censored: string[], added: string[], prioritized: string[], fresh: string[], sigop: string[], fullrbf: string[], accelerated: string[], score: number, similarity: number, firstSeen: string | undefined } { if (!projectedBlocks?.[0]?.transactionIds || !mempool) { - return { unseen: [], censored: [], added: [], prioritized: [], fresh: [], sigop: [], fullrbf: [], accelerated: [], score: 1, similarity: 1 }; + return { unseen: [], censored: [], added: [], prioritized: [], fresh: [], sigop: [], fullrbf: [], accelerated: [], score: 1, similarity: 1, firstSeen: undefined }; } const matches: string[] = []; // present in both mined block and template @@ -176,6 +177,8 @@ class Audit { } const similarity = projectedWeight ? matchedWeight / projectedWeight : 1; + const firstSeen = this.getFirstSeenFromLogs(hash); + return { unseen, censored: Object.keys(isCensored), @@ -187,8 +190,39 @@ class Audit { accelerated, score, similarity, + firstSeen }; } + + getFirstSeenFromLogs(hash: string): string | undefined { + const debugLogPath = config.CORE_RPC.DEBUG_LOG_PATH; + if (debugLogPath) { + try { + const fileDescriptor = fs.openSync(debugLogPath, 'r'); + const bufferSize = 2048; // Read the last few lines of the file + const buffer = Buffer.alloc(bufferSize); + const fileSize = fs.statSync(debugLogPath).size; + const chunkSize = Math.min(bufferSize, fileSize); + fs.readSync(fileDescriptor, buffer, 0, chunkSize, fileSize - chunkSize); + const lines = buffer.toString('utf8', 0, chunkSize).split('\n'); + fs.closeSync(fileDescriptor); + + for (let i = lines.length - 1; i >= 0; i--) { + const line = lines[i]; + if (line && line.includes(`Saw new header hash=${hash}`)) { + // Extract time from log: "2021-08-31T12:34:56Z" or "2021-08-31T12:34:56.123456Z" if logtimemicros=1 + const dateMatch = line.match(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.?\d{6})?Z/); + if (dateMatch) { + return dateMatch[0].replace("T", " ").replace("Z", ""); + } + } + } + } catch (e) { + logger.err(`Cannot parse block first seen time from Core logs. Reason: ` + (e instanceof Error ? e.message : e)); + } + } + return undefined; + } } export default new Audit(); \ No newline at end of file diff --git a/backend/src/api/database-migration.ts b/backend/src/api/database-migration.ts index 95f8c8707..8efa9d42a 100644 --- a/backend/src/api/database-migration.ts +++ b/backend/src/api/database-migration.ts @@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository'; import { RowDataPacket } from 'mysql2'; class DatabaseMigration { - private static currentVersion = 82; + private static currentVersion = 83; private queryTimeout = 3600_000; private statisticsAddedIndexed = false; private uniqueLogs: string[] = []; @@ -705,6 +705,11 @@ class DatabaseMigration { await this.$fixBadV1AuditBlocks(); await this.updateToSchemaVersion(82); } + + if (databaseSchemaVersion < 83 && isBitcoin === true) { + await this.$executeQuery('ALTER TABLE `blocks_audits` ADD first_seen timestamp(6) DEFAULT NULL'); + await this.updateToSchemaVersion(83); + } } /** diff --git a/backend/src/api/websocket-handler.ts b/backend/src/api/websocket-handler.ts index 2a047472e..387f5e837 100644 --- a/backend/src/api/websocket-handler.ts +++ b/backend/src/api/websocket-handler.ts @@ -975,7 +975,7 @@ class WebsocketHandler { } if (Common.indexingEnabled()) { - const { unseen, censored, added, prioritized, fresh, sigop, fullrbf, accelerated, score, similarity } = Audit.auditBlock(block.height, blockTransactions, projectedBlocks, auditMempool); + const { unseen, censored, added, prioritized, fresh, sigop, fullrbf, accelerated, score, similarity, firstSeen } = Audit.auditBlock(block.height, blockTransactions, projectedBlocks, auditMempool, block.id); const matchRate = Math.round(score * 100 * 100) / 100; const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions : []; @@ -1012,6 +1012,7 @@ class WebsocketHandler { matchRate: matchRate, expectedFees: totalFees, expectedWeight: totalWeight, + firstSeen: firstSeen, }); if (block.extras) { diff --git a/backend/src/mempool.interfaces.ts b/backend/src/mempool.interfaces.ts index 6eee1a9ee..fde465afb 100644 --- a/backend/src/mempool.interfaces.ts +++ b/backend/src/mempool.interfaces.ts @@ -45,6 +45,7 @@ export interface BlockAudit { expectedFees?: number, expectedWeight?: number, template?: any[]; + firstSeen?: string; } export interface TransactionAudit { @@ -57,6 +58,7 @@ export interface TransactionAudit { conflict?: boolean; coinbase?: boolean; firstSeen?: number; + blockFirstSeen?: string; } export interface AuditScore { diff --git a/backend/src/replication/AuditReplication.ts b/backend/src/replication/AuditReplication.ts index 6f616dbbe..720cbd9d7 100644 --- a/backend/src/replication/AuditReplication.ts +++ b/backend/src/replication/AuditReplication.ts @@ -124,7 +124,8 @@ class AuditReplication { matchRate: auditSummary.matchRate, expectedFees: auditSummary.expectedFees, expectedWeight: auditSummary.expectedWeight, - }); + firstSeen: auditSummary.firstSeen, + }, true); // add missing data to cached blocks const cachedBlock = blocks.getBlocks().find(block => block.id === blockHash); if (cachedBlock) { diff --git a/backend/src/repositories/BlocksAuditsRepository.ts b/backend/src/repositories/BlocksAuditsRepository.ts index 3b3f79ce0..fabb7ecd9 100644 --- a/backend/src/repositories/BlocksAuditsRepository.ts +++ b/backend/src/repositories/BlocksAuditsRepository.ts @@ -15,11 +15,11 @@ interface MigrationAudit { } class BlocksAuditRepositories { - public async $saveAudit(audit: BlockAudit): Promise { + public async $saveAudit(audit: BlockAudit, replication = false): Promise { try { - await DB.query(`INSERT INTO blocks_audits(version, time, height, hash, unseen_txs, missing_txs, added_txs, prioritized_txs, fresh_txs, sigop_txs, fullrbf_txs, accelerated_txs, match_rate, expected_fees, expected_weight) - VALUE (?, FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.version, audit.time, audit.height, audit.hash, JSON.stringify(audit.unseenTxs), JSON.stringify(audit.missingTxs), - JSON.stringify(audit.addedTxs), JSON.stringify(audit.prioritizedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), JSON.stringify(audit.acceleratedTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]); + await DB.query(`INSERT INTO blocks_audits(version, time, height, hash, unseen_txs, missing_txs, added_txs, prioritized_txs, fresh_txs, sigop_txs, fullrbf_txs, accelerated_txs, match_rate, expected_fees, expected_weight, first_seen) + VALUE (?, FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ${replication ? 'FROM_UNIXTIME(?)' : '?'})`, [audit.version, audit.time, audit.height, audit.hash, JSON.stringify(audit.unseenTxs), JSON.stringify(audit.missingTxs), + JSON.stringify(audit.addedTxs), JSON.stringify(audit.prioritizedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), JSON.stringify(audit.acceleratedTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight, audit.firstSeen]); } catch (e: any) { if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`); @@ -78,6 +78,7 @@ class BlocksAuditRepositories { blocks_audits.height, blocks_audits.hash as id, UNIX_TIMESTAMP(blocks_audits.time) as timestamp, + UNIX_TIMESTAMP(blocks_audits.first_seen) as firstSeen, template, unseen_txs as unseenTxs, missing_txs as missingTxs, @@ -96,6 +97,7 @@ class BlocksAuditRepositories { `, [hash]); if (rows.length) { + console.log(rows[0].firstSeen); rows[0].unseenTxs = JSON.parse(rows[0].unseenTxs); rows[0].missingTxs = JSON.parse(rows[0].missingTxs); rows[0].addedTxs = JSON.parse(rows[0].addedTxs); @@ -106,6 +108,10 @@ class BlocksAuditRepositories { rows[0].acceleratedTxs = JSON.parse(rows[0].acceleratedTxs); rows[0].template = JSON.parse(rows[0].template); + if (!rows[0].firstSeen) { + delete rows[0].firstSeen; + } + return rows[0]; } return null; @@ -124,6 +130,7 @@ class BlocksAuditRepositories { const isPrioritized = blockAudit.prioritizedTxs.includes(txid); const isAccelerated = blockAudit.acceleratedTxs.includes(txid); const isConflict = blockAudit.fullrbfTxs.includes(txid); + const blockFirstSeen = blockAudit.firstSeen; let isExpected = false; let firstSeen = undefined; blockAudit.template?.forEach(tx => { @@ -142,6 +149,7 @@ class BlocksAuditRepositories { conflict: isConflict, accelerated: isAccelerated, firstSeen, + ...(blockFirstSeen) && { blockFirstSeen }, }; } return null; From dd0542bbe1bc3ca8fc835ec3e7171ef4e592d0d1 Mon Sep 17 00:00:00 2001 From: natsoni Date: Wed, 9 Oct 2024 11:15:31 +0900 Subject: [PATCH 3/3] Store block first seen in db --- backend/src/api/audit.ts | 40 +------------ backend/src/api/database-migration.ts | 2 +- backend/src/api/websocket-handler.ts | 13 ++++- backend/src/mempool.interfaces.ts | 3 +- backend/src/replication/AuditReplication.ts | 3 +- .../repositories/BlocksAuditsRepository.ts | 16 ++--- backend/src/repositories/BlocksRepository.ts | 23 +++++++- backend/src/utils/file-read.ts | 58 +++++++++++++++++++ 8 files changed, 101 insertions(+), 57 deletions(-) create mode 100644 backend/src/utils/file-read.ts diff --git a/backend/src/api/audit.ts b/backend/src/api/audit.ts index b29c3bf2b..e09234cdc 100644 --- a/backend/src/api/audit.ts +++ b/backend/src/api/audit.ts @@ -1,4 +1,3 @@ -import * as fs from 'fs'; import config from '../config'; import logger from '../logger'; import { MempoolTransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces'; @@ -8,10 +7,10 @@ import transactionUtils from './transaction-utils'; const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners class Audit { - auditBlock(height: number, transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended }, hash: string) - : { unseen: string[], censored: string[], added: string[], prioritized: string[], fresh: string[], sigop: string[], fullrbf: string[], accelerated: string[], score: number, similarity: number, firstSeen: string | undefined } { + auditBlock(height: number, transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended }) + : { unseen: string[], censored: string[], added: string[], prioritized: string[], fresh: string[], sigop: string[], fullrbf: string[], accelerated: string[], score: number, similarity: number } { if (!projectedBlocks?.[0]?.transactionIds || !mempool) { - return { unseen: [], censored: [], added: [], prioritized: [], fresh: [], sigop: [], fullrbf: [], accelerated: [], score: 1, similarity: 1, firstSeen: undefined }; + return { unseen: [], censored: [], added: [], prioritized: [], fresh: [], sigop: [], fullrbf: [], accelerated: [], score: 1, similarity: 1 }; } const matches: string[] = []; // present in both mined block and template @@ -177,8 +176,6 @@ class Audit { } const similarity = projectedWeight ? matchedWeight / projectedWeight : 1; - const firstSeen = this.getFirstSeenFromLogs(hash); - return { unseen, censored: Object.keys(isCensored), @@ -190,39 +187,8 @@ class Audit { accelerated, score, similarity, - firstSeen }; } - - getFirstSeenFromLogs(hash: string): string | undefined { - const debugLogPath = config.CORE_RPC.DEBUG_LOG_PATH; - if (debugLogPath) { - try { - const fileDescriptor = fs.openSync(debugLogPath, 'r'); - const bufferSize = 2048; // Read the last few lines of the file - const buffer = Buffer.alloc(bufferSize); - const fileSize = fs.statSync(debugLogPath).size; - const chunkSize = Math.min(bufferSize, fileSize); - fs.readSync(fileDescriptor, buffer, 0, chunkSize, fileSize - chunkSize); - const lines = buffer.toString('utf8', 0, chunkSize).split('\n'); - fs.closeSync(fileDescriptor); - - for (let i = lines.length - 1; i >= 0; i--) { - const line = lines[i]; - if (line && line.includes(`Saw new header hash=${hash}`)) { - // Extract time from log: "2021-08-31T12:34:56Z" or "2021-08-31T12:34:56.123456Z" if logtimemicros=1 - const dateMatch = line.match(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.?\d{6})?Z/); - if (dateMatch) { - return dateMatch[0].replace("T", " ").replace("Z", ""); - } - } - } - } catch (e) { - logger.err(`Cannot parse block first seen time from Core logs. Reason: ` + (e instanceof Error ? e.message : e)); - } - } - return undefined; - } } export default new Audit(); \ No newline at end of file diff --git a/backend/src/api/database-migration.ts b/backend/src/api/database-migration.ts index 8efa9d42a..ee9df9151 100644 --- a/backend/src/api/database-migration.ts +++ b/backend/src/api/database-migration.ts @@ -707,7 +707,7 @@ class DatabaseMigration { } if (databaseSchemaVersion < 83 && isBitcoin === true) { - await this.$executeQuery('ALTER TABLE `blocks_audits` ADD first_seen timestamp(6) DEFAULT NULL'); + await this.$executeQuery('ALTER TABLE `blocks` ADD first_seen datetime(6) DEFAULT NULL'); await this.updateToSchemaVersion(83); } } diff --git a/backend/src/api/websocket-handler.ts b/backend/src/api/websocket-handler.ts index 387f5e837..d0e8f2cbd 100644 --- a/backend/src/api/websocket-handler.ts +++ b/backend/src/api/websocket-handler.ts @@ -16,6 +16,7 @@ import transactionUtils from './transaction-utils'; import rbfCache, { ReplacementInfo } from './rbf-cache'; import difficultyAdjustment from './difficulty-adjustment'; import feeApi from './fee-api'; +import BlocksRepository from '../repositories/BlocksRepository'; import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository'; import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository'; import Audit from './audit'; @@ -34,6 +35,7 @@ interface AddressTransactions { } import bitcoinSecondClient from './bitcoin/bitcoin-second-client'; import { calculateMempoolTxCpfp } from './cpfp'; +import { getRecentFirstSeen } from '../utils/file-read'; // valid 'want' subscriptions const wantable = [ @@ -975,7 +977,7 @@ class WebsocketHandler { } if (Common.indexingEnabled()) { - const { unseen, censored, added, prioritized, fresh, sigop, fullrbf, accelerated, score, similarity, firstSeen } = Audit.auditBlock(block.height, blockTransactions, projectedBlocks, auditMempool, block.id); + const { unseen, censored, added, prioritized, fresh, sigop, fullrbf, accelerated, score, similarity } = Audit.auditBlock(block.height, blockTransactions, projectedBlocks, auditMempool); const matchRate = Math.round(score * 100 * 100) / 100; const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions : []; @@ -1012,7 +1014,6 @@ class WebsocketHandler { matchRate: matchRate, expectedFees: totalFees, expectedWeight: totalWeight, - firstSeen: firstSeen, }); if (block.extras) { @@ -1029,6 +1030,14 @@ class WebsocketHandler { } } + if (config.CORE_RPC.DEBUG_LOG_PATH && block.extras) { + const firstSeen = getRecentFirstSeen(block.id); + if (firstSeen) { + BlocksRepository.$saveFirstSeenTime(block.id, firstSeen); + block.extras.firstSeen = firstSeen; + } + } + const confirmedTxids: { [txid: string]: boolean } = {}; // Update mempool to remove transactions included in the new block diff --git a/backend/src/mempool.interfaces.ts b/backend/src/mempool.interfaces.ts index fde465afb..dc703af21 100644 --- a/backend/src/mempool.interfaces.ts +++ b/backend/src/mempool.interfaces.ts @@ -45,7 +45,6 @@ export interface BlockAudit { expectedFees?: number, expectedWeight?: number, template?: any[]; - firstSeen?: string; } export interface TransactionAudit { @@ -58,7 +57,6 @@ export interface TransactionAudit { conflict?: boolean; coinbase?: boolean; firstSeen?: number; - blockFirstSeen?: string; } export interface AuditScore { @@ -322,6 +320,7 @@ export interface BlockExtension { segwitTotalSize: number; segwitTotalWeight: number; header: string; + firstSeen: number | null; utxoSetChange: number; // Requires coinstatsindex, will be set to NULL otherwise utxoSetSize: number | null; diff --git a/backend/src/replication/AuditReplication.ts b/backend/src/replication/AuditReplication.ts index 720cbd9d7..6f616dbbe 100644 --- a/backend/src/replication/AuditReplication.ts +++ b/backend/src/replication/AuditReplication.ts @@ -124,8 +124,7 @@ class AuditReplication { matchRate: auditSummary.matchRate, expectedFees: auditSummary.expectedFees, expectedWeight: auditSummary.expectedWeight, - firstSeen: auditSummary.firstSeen, - }, true); + }); // add missing data to cached blocks const cachedBlock = blocks.getBlocks().find(block => block.id === blockHash); if (cachedBlock) { diff --git a/backend/src/repositories/BlocksAuditsRepository.ts b/backend/src/repositories/BlocksAuditsRepository.ts index fabb7ecd9..3b3f79ce0 100644 --- a/backend/src/repositories/BlocksAuditsRepository.ts +++ b/backend/src/repositories/BlocksAuditsRepository.ts @@ -15,11 +15,11 @@ interface MigrationAudit { } class BlocksAuditRepositories { - public async $saveAudit(audit: BlockAudit, replication = false): Promise { + public async $saveAudit(audit: BlockAudit): Promise { try { - await DB.query(`INSERT INTO blocks_audits(version, time, height, hash, unseen_txs, missing_txs, added_txs, prioritized_txs, fresh_txs, sigop_txs, fullrbf_txs, accelerated_txs, match_rate, expected_fees, expected_weight, first_seen) - VALUE (?, FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ${replication ? 'FROM_UNIXTIME(?)' : '?'})`, [audit.version, audit.time, audit.height, audit.hash, JSON.stringify(audit.unseenTxs), JSON.stringify(audit.missingTxs), - JSON.stringify(audit.addedTxs), JSON.stringify(audit.prioritizedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), JSON.stringify(audit.acceleratedTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight, audit.firstSeen]); + await DB.query(`INSERT INTO blocks_audits(version, time, height, hash, unseen_txs, missing_txs, added_txs, prioritized_txs, fresh_txs, sigop_txs, fullrbf_txs, accelerated_txs, match_rate, expected_fees, expected_weight) + VALUE (?, FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.version, audit.time, audit.height, audit.hash, JSON.stringify(audit.unseenTxs), JSON.stringify(audit.missingTxs), + JSON.stringify(audit.addedTxs), JSON.stringify(audit.prioritizedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), JSON.stringify(audit.acceleratedTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]); } catch (e: any) { if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`); @@ -78,7 +78,6 @@ class BlocksAuditRepositories { blocks_audits.height, blocks_audits.hash as id, UNIX_TIMESTAMP(blocks_audits.time) as timestamp, - UNIX_TIMESTAMP(blocks_audits.first_seen) as firstSeen, template, unseen_txs as unseenTxs, missing_txs as missingTxs, @@ -97,7 +96,6 @@ class BlocksAuditRepositories { `, [hash]); if (rows.length) { - console.log(rows[0].firstSeen); rows[0].unseenTxs = JSON.parse(rows[0].unseenTxs); rows[0].missingTxs = JSON.parse(rows[0].missingTxs); rows[0].addedTxs = JSON.parse(rows[0].addedTxs); @@ -108,10 +106,6 @@ class BlocksAuditRepositories { rows[0].acceleratedTxs = JSON.parse(rows[0].acceleratedTxs); rows[0].template = JSON.parse(rows[0].template); - if (!rows[0].firstSeen) { - delete rows[0].firstSeen; - } - return rows[0]; } return null; @@ -130,7 +124,6 @@ class BlocksAuditRepositories { const isPrioritized = blockAudit.prioritizedTxs.includes(txid); const isAccelerated = blockAudit.acceleratedTxs.includes(txid); const isConflict = blockAudit.fullrbfTxs.includes(txid); - const blockFirstSeen = blockAudit.firstSeen; let isExpected = false; let firstSeen = undefined; blockAudit.template?.forEach(tx => { @@ -149,7 +142,6 @@ class BlocksAuditRepositories { conflict: isConflict, accelerated: isAccelerated, firstSeen, - ...(blockFirstSeen) && { blockFirstSeen }, }; } return null; diff --git a/backend/src/repositories/BlocksRepository.ts b/backend/src/repositories/BlocksRepository.ts index f958e5c8b..112cb1903 100644 --- a/backend/src/repositories/BlocksRepository.ts +++ b/backend/src/repositories/BlocksRepository.ts @@ -57,6 +57,7 @@ interface DatabaseBlock { utxoSetChange: number; utxoSetSize: number; totalInputAmt: number; + firstSeen: number; } const BLOCK_DB_FIELDS = ` @@ -99,7 +100,8 @@ const BLOCK_DB_FIELDS = ` blocks.header, blocks.utxoset_change AS utxoSetChange, blocks.utxoset_size AS utxoSetSize, - blocks.total_input_amt AS totalInputAmt + blocks.total_input_amt AS totalInputAmt, + UNIX_TIMESTAMP(blocks.first_seen) AS firstSeen `; class BlocksRepository { @@ -1021,6 +1023,24 @@ class BlocksRepository { } } + /** + * Save block first seen time + * + * @param id + */ + public async $saveFirstSeenTime(id: string, firstSeen: number): Promise { + try { + await DB.query(` + UPDATE blocks SET first_seen = FROM_UNIXTIME(?) + WHERE hash = ?`, + [firstSeen, id] + ); + } catch (e) { + logger.err(`Cannot update block first seen time. Reason: ` + (e instanceof Error ? e.message : e)); + throw e; + } + } + /** * Convert a mysql row block into a BlockExtended. Note that you * must provide the correct field into dbBlk object param @@ -1078,6 +1098,7 @@ class BlocksRepository { extras.utxoSetSize = dbBlk.utxoSetSize; extras.totalInputAmt = dbBlk.totalInputAmt; extras.virtualSize = dbBlk.weight / 4.0; + extras.firstSeen = dbBlk.firstSeen; // Re-org can happen after indexing so we need to always get the // latest state from core diff --git a/backend/src/utils/file-read.ts b/backend/src/utils/file-read.ts new file mode 100644 index 000000000..ddf8660c4 --- /dev/null +++ b/backend/src/utils/file-read.ts @@ -0,0 +1,58 @@ +import * as fs from 'fs'; +import logger from '../logger'; +import config from '../config'; + +function readFile(filePath: string, bufferSize?: number): string[] { + const fileSize = fs.statSync(filePath).size; + const chunkSize = bufferSize || fileSize; + const fileDescriptor = fs.openSync(filePath, 'r'); + const buffer = Buffer.alloc(chunkSize); + + fs.readSync(fileDescriptor, buffer, 0, chunkSize, fileSize - chunkSize); + fs.closeSync(fileDescriptor); + + const lines = buffer.toString('utf8', 0, chunkSize).split('\n'); + return lines; +} + +function extractDateFromLogLine(line: string): number | undefined { + // Extract time from log: "2021-08-31T12:34:56Z" or "2021-08-31T12:34:56.123456Z" + const dateMatch = line.match(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{6})?Z/); + if (!dateMatch) { + return undefined; + } + + const dateStr = dateMatch[0]; + const date = new Date(dateStr); + let timestamp = Math.floor(date.getTime() / 1000); // Remove decimal (microseconds are added later) + + const timePart = dateStr.split('T')[1]; + const microseconds = timePart.split('.')[1] || ''; + + if (!microseconds) { + return timestamp; + } + + return parseFloat(timestamp + '.' + microseconds); +} + +export function getRecentFirstSeen(hash: string): number | undefined { + const debugLogPath = config.CORE_RPC.DEBUG_LOG_PATH; + if (debugLogPath) { + try { + // Read the last few lines of debug.log + const lines = readFile(debugLogPath, 2048); + + for (let i = lines.length - 1; i >= 0; i--) { + const line = lines[i]; + if (line && line.includes(`Saw new header hash=${hash}`)) { + return extractDateFromLogLine(line); + } + } + } catch (e) { + logger.err(`Cannot parse block first seen time from Core logs. Reason: ` + (e instanceof Error ? e.message : e)); + } + } + + return undefined; +}