diff --git a/backend/src/api/backend-info.ts b/backend/src/api/backend-info.ts index 5a556ef18..d98675671 100644 --- a/backend/src/api/backend-info.ts +++ b/backend/src/api/backend-info.ts @@ -2,6 +2,7 @@ import * as fs from 'fs'; import * as os from 'os'; import logger from '../logger'; import { IBackendInfo } from '../mempool.interfaces'; +const { spawnSync } = require('child_process'); class BackendInfo { private gitCommitHash = ''; @@ -27,10 +28,23 @@ class BackendInfo { } private setLatestCommitHash(): void { - try { - this.gitCommitHash = fs.readFileSync('../.git/refs/heads/master').toString().trim(); - } catch (e) { - logger.err('Could not load git commit info: ' + (e instanceof Error ? e.message : e)); + //TODO: share this logic with `generate-config.js` + if (process.env.DOCKER_COMMIT_HASH) { + this.gitCommitHash = process.env.DOCKER_COMMIT_HASH; + } else { + try { + const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']); + if (!gitRevParse.error) { + const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, ''); + this.gitCommitHash = output ? output : '?'; + } else if (gitRevParse.error.code === 'ENOENT') { + console.log('git not found, cannot parse git hash'); + this.gitCommitHash = '?'; + } + } catch (e: any) { + console.log('Could not load git commit info: ' + e.message); + this.gitCommitHash = '?'; + } } } diff --git a/backend/src/api/database-migration.ts b/backend/src/api/database-migration.ts index ffa9041e3..20519cbf2 100644 --- a/backend/src/api/database-migration.ts +++ b/backend/src/api/database-migration.ts @@ -6,7 +6,7 @@ import logger from '../logger'; const sleep = (ms: number) => new Promise(res => setTimeout(res, ms)); class DatabaseMigration { - private static currentVersion = 15; + private static currentVersion = 16; private queryTimeout = 120000; private statisticsAddedIndexed = false; @@ -175,6 +175,11 @@ class DatabaseMigration { await this.$executeQuery(connection, 'ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"'); } + if (databaseSchemaVersion < 16 && isBitcoin === true) { + logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`); + await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index because we changed timestamps + } + connection.release(); } catch (e) { connection.release(); diff --git a/backend/src/api/mining.ts b/backend/src/api/mining.ts index 8b277da57..35884efb3 100644 --- a/backend/src/api/mining.ts +++ b/backend/src/api/mining.ts @@ -86,9 +86,9 @@ class Mining { } // We only run this once a week - const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_weekly_hashrates_indexing'); + const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_weekly_hashrates_indexing') * 1000; const now = new Date(); - if ((now.getTime() / 1000) - latestTimestamp < 604800) { + if (now.getTime() - latestTimestamp < 604800000) { return; } @@ -99,23 +99,23 @@ class Mining { const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps(); const hashrates: any[] = []; - const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f + const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f const lastMonday = new Date(now.setDate(now.getDate() - (now.getDay() + 6) % 7)); const lastMondayMidnight = this.getDateMidnight(lastMonday); - let toTimestamp = Math.round((lastMondayMidnight.getTime() - 604800) / 1000); + let toTimestamp = lastMondayMidnight.getTime(); const totalWeekIndexed = (await BlocksRepository.$blockCount(null, null)) / 1008; let indexedThisRun = 0; let totalIndexed = 0; - let startedAt = new Date().getTime() / 1000; + let startedAt = new Date().getTime(); while (toTimestamp > genesisTimestamp) { - const fromTimestamp = toTimestamp - 604800; + const fromTimestamp = toTimestamp - 604800000; // Skip already indexed weeks - if (indexedTimestamp.includes(toTimestamp)) { - toTimestamp -= 604800; + if (indexedTimestamp.includes(toTimestamp / 1000)) { + toTimestamp -= 604800000; ++totalIndexed; continue; } @@ -123,17 +123,17 @@ class Mining { // Check if we have blocks for the previous week (which mean that the week // we are currently indexing has complete data) const blockStatsPreviousWeek: any = await BlocksRepository.$blockCountBetweenTimestamp( - null, fromTimestamp - 604800, toTimestamp - 604800); + null, (fromTimestamp - 604800000) / 1000, (toTimestamp - 604800000) / 1000); if (blockStatsPreviousWeek.blockCount === 0) { // We are done indexing break; } const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp( - null, fromTimestamp, toTimestamp); + null, fromTimestamp / 1000, toTimestamp / 1000); const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount, blockStats.lastBlockHeight); - let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp, toTimestamp); + let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp / 1000, toTimestamp / 1000); const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0); pools = pools.map((pool: any) => { pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate; @@ -143,7 +143,7 @@ class Mining { for (const pool of pools) { hashrates.push({ - hashrateTimestamp: toTimestamp, + hashrateTimestamp: toTimestamp / 1000, avgHashrate: pool['hashrate'], poolId: pool.poolId, share: pool['share'], @@ -154,17 +154,17 @@ class Mining { await HashratesRepository.$saveHashrates(hashrates); hashrates.length = 0; - const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt)); + const elapsedSeconds = Math.max(1, Math.round((new Date().getTime()) - startedAt)) / 1000; if (elapsedSeconds > 1) { const weeksPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2); - const formattedDate = new Date(fromTimestamp * 1000).toUTCString(); + const formattedDate = new Date(fromTimestamp).toUTCString(); const weeksLeft = Math.round(totalWeekIndexed - totalIndexed); logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds} weeks/sec | ~${weeksLeft} weeks left to index`); - startedAt = new Date().getTime() / 1000; + startedAt = new Date().getTime(); indexedThisRun = 0; } - toTimestamp -= 604800; + toTimestamp -= 604800000; ++indexedThisRun; ++totalIndexed; } @@ -186,9 +186,9 @@ class Mining { } // We only run this once a day - const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_hashrates_indexing'); - const now = new Date().getTime() / 1000; - if (now - latestTimestamp < 86400) { + const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_hashrates_indexing') * 1000; + const now = new Date().getTime(); + if (now - latestTimestamp < 86400000) { return; } @@ -198,22 +198,22 @@ class Mining { logger.info(`Indexing network daily hashrate`); const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp); - const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f + const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f const lastMidnight = this.getDateMidnight(new Date()); - let toTimestamp = Math.round(lastMidnight.getTime() / 1000); + let toTimestamp = Math.round(lastMidnight.getTime()); const hashrates: any[] = []; const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144; let indexedThisRun = 0; let totalIndexed = 0; - let startedAt = new Date().getTime() / 1000; + let startedAt = new Date().getTime(); while (toTimestamp > genesisTimestamp) { - const fromTimestamp = toTimestamp - 86400; + const fromTimestamp = toTimestamp - 86400000; // Skip already indexed weeks - if (indexedTimestamp.includes(toTimestamp)) { - toTimestamp -= 86400; + if (indexedTimestamp.includes(toTimestamp / 1000)) { + toTimestamp -= 86400000; ++totalIndexed; continue; } @@ -221,18 +221,18 @@ class Mining { // Check if we have blocks for the previous day (which mean that the day // we are currently indexing has complete data) const blockStatsPreviousDay: any = await BlocksRepository.$blockCountBetweenTimestamp( - null, fromTimestamp - 86400, toTimestamp - 86400); + null, (fromTimestamp - 86400000) / 1000, (toTimestamp - 86400000) / 1000); if (blockStatsPreviousDay.blockCount === 0) { // We are done indexing break; } const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp( - null, fromTimestamp, toTimestamp); + null, fromTimestamp / 1000, toTimestamp / 1000); const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount, blockStats.lastBlockHeight); hashrates.push({ - hashrateTimestamp: toTimestamp, + hashrateTimestamp: toTimestamp / 1000, avgHashrate: lastBlockHashrate, poolId: 0, share: 1, @@ -244,17 +244,17 @@ class Mining { hashrates.length = 0; } - const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt)); + const elapsedSeconds = Math.max(1, Math.round(new Date().getTime() - startedAt)) / 1000; if (elapsedSeconds > 1) { const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2); - const formattedDate = new Date(fromTimestamp * 1000).toUTCString(); + const formattedDate = new Date(fromTimestamp).toUTCString(); const daysLeft = Math.round(totalDayIndexed - totalIndexed); logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`); - startedAt = new Date().getTime() / 1000; + startedAt = new Date().getTime(); indexedThisRun = 0; } - toTimestamp -= 86400; + toTimestamp -= 86400000; ++indexedThisRun; ++totalIndexed; } diff --git a/docker/backend/Dockerfile b/docker/backend/Dockerfile index ceddc9000..c013fc23a 100644 --- a/docker/backend/Dockerfile +++ b/docker/backend/Dockerfile @@ -1,5 +1,8 @@ FROM node:16.10.0-buster-slim AS builder +ARG commitHash +ENV DOCKER_COMMIT_HASH=${commitHash} + WORKDIR /build COPY . . diff --git a/frontend/generate-config.js b/frontend/generate-config.js index 617ab3c0e..1f37953b7 100644 --- a/frontend/generate-config.js +++ b/frontend/generate-config.js @@ -51,9 +51,9 @@ if (process.env.DOCKER_COMMIT_HASH) { } else { try { const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']); - if (!gitRevParse.error) { - gitCommitHash = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, ''); + const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, ''); + gitCommitHash = output ? output : '?'; console.log(`mempool revision ${gitCommitHash}`); } else if (gitRevParse.error.code === 'ENOENT') { console.log('git not found, cannot parse git hash');