Merge branch 'master' into bugfix/blocks-list-css
This commit is contained in:
commit
a384328f50
@ -2,6 +2,7 @@ import * as fs from 'fs';
|
|||||||
import * as os from 'os';
|
import * as os from 'os';
|
||||||
import logger from '../logger';
|
import logger from '../logger';
|
||||||
import { IBackendInfo } from '../mempool.interfaces';
|
import { IBackendInfo } from '../mempool.interfaces';
|
||||||
|
const { spawnSync } = require('child_process');
|
||||||
|
|
||||||
class BackendInfo {
|
class BackendInfo {
|
||||||
private gitCommitHash = '';
|
private gitCommitHash = '';
|
||||||
@ -27,10 +28,23 @@ class BackendInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private setLatestCommitHash(): void {
|
private setLatestCommitHash(): void {
|
||||||
try {
|
//TODO: share this logic with `generate-config.js`
|
||||||
this.gitCommitHash = fs.readFileSync('../.git/refs/heads/master').toString().trim();
|
if (process.env.DOCKER_COMMIT_HASH) {
|
||||||
} catch (e) {
|
this.gitCommitHash = process.env.DOCKER_COMMIT_HASH;
|
||||||
logger.err('Could not load git commit info: ' + (e instanceof Error ? e.message : e));
|
} else {
|
||||||
|
try {
|
||||||
|
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
|
||||||
|
if (!gitRevParse.error) {
|
||||||
|
const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
|
||||||
|
this.gitCommitHash = output ? output : '?';
|
||||||
|
} else if (gitRevParse.error.code === 'ENOENT') {
|
||||||
|
console.log('git not found, cannot parse git hash');
|
||||||
|
this.gitCommitHash = '?';
|
||||||
|
}
|
||||||
|
} catch (e: any) {
|
||||||
|
console.log('Could not load git commit info: ' + e.message);
|
||||||
|
this.gitCommitHash = '?';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ import logger from '../logger';
|
|||||||
const sleep = (ms: number) => new Promise(res => setTimeout(res, ms));
|
const sleep = (ms: number) => new Promise(res => setTimeout(res, ms));
|
||||||
|
|
||||||
class DatabaseMigration {
|
class DatabaseMigration {
|
||||||
private static currentVersion = 15;
|
private static currentVersion = 16;
|
||||||
private queryTimeout = 120000;
|
private queryTimeout = 120000;
|
||||||
private statisticsAddedIndexed = false;
|
private statisticsAddedIndexed = false;
|
||||||
|
|
||||||
@ -175,6 +175,11 @@ class DatabaseMigration {
|
|||||||
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (databaseSchemaVersion < 16 && isBitcoin === true) {
|
||||||
|
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||||
|
await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
|
||||||
|
}
|
||||||
|
|
||||||
connection.release();
|
connection.release();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
connection.release();
|
connection.release();
|
||||||
|
@ -86,9 +86,9 @@ class Mining {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// We only run this once a week
|
// We only run this once a week
|
||||||
const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_weekly_hashrates_indexing');
|
const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_weekly_hashrates_indexing') * 1000;
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
if ((now.getTime() / 1000) - latestTimestamp < 604800) {
|
if (now.getTime() - latestTimestamp < 604800000) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -99,23 +99,23 @@ class Mining {
|
|||||||
|
|
||||||
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
|
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
|
||||||
const hashrates: any[] = [];
|
const hashrates: any[] = [];
|
||||||
const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||||
|
|
||||||
const lastMonday = new Date(now.setDate(now.getDate() - (now.getDay() + 6) % 7));
|
const lastMonday = new Date(now.setDate(now.getDate() - (now.getDay() + 6) % 7));
|
||||||
const lastMondayMidnight = this.getDateMidnight(lastMonday);
|
const lastMondayMidnight = this.getDateMidnight(lastMonday);
|
||||||
let toTimestamp = Math.round((lastMondayMidnight.getTime() - 604800) / 1000);
|
let toTimestamp = lastMondayMidnight.getTime();
|
||||||
|
|
||||||
const totalWeekIndexed = (await BlocksRepository.$blockCount(null, null)) / 1008;
|
const totalWeekIndexed = (await BlocksRepository.$blockCount(null, null)) / 1008;
|
||||||
let indexedThisRun = 0;
|
let indexedThisRun = 0;
|
||||||
let totalIndexed = 0;
|
let totalIndexed = 0;
|
||||||
let startedAt = new Date().getTime() / 1000;
|
let startedAt = new Date().getTime();
|
||||||
|
|
||||||
while (toTimestamp > genesisTimestamp) {
|
while (toTimestamp > genesisTimestamp) {
|
||||||
const fromTimestamp = toTimestamp - 604800;
|
const fromTimestamp = toTimestamp - 604800000;
|
||||||
|
|
||||||
// Skip already indexed weeks
|
// Skip already indexed weeks
|
||||||
if (indexedTimestamp.includes(toTimestamp)) {
|
if (indexedTimestamp.includes(toTimestamp / 1000)) {
|
||||||
toTimestamp -= 604800;
|
toTimestamp -= 604800000;
|
||||||
++totalIndexed;
|
++totalIndexed;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -123,17 +123,17 @@ class Mining {
|
|||||||
// Check if we have blocks for the previous week (which mean that the week
|
// Check if we have blocks for the previous week (which mean that the week
|
||||||
// we are currently indexing has complete data)
|
// we are currently indexing has complete data)
|
||||||
const blockStatsPreviousWeek: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
const blockStatsPreviousWeek: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||||
null, fromTimestamp - 604800, toTimestamp - 604800);
|
null, (fromTimestamp - 604800000) / 1000, (toTimestamp - 604800000) / 1000);
|
||||||
if (blockStatsPreviousWeek.blockCount === 0) { // We are done indexing
|
if (blockStatsPreviousWeek.blockCount === 0) { // We are done indexing
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||||
null, fromTimestamp, toTimestamp);
|
null, fromTimestamp / 1000, toTimestamp / 1000);
|
||||||
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||||
blockStats.lastBlockHeight);
|
blockStats.lastBlockHeight);
|
||||||
|
|
||||||
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp, toTimestamp);
|
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp / 1000, toTimestamp / 1000);
|
||||||
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
|
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
|
||||||
pools = pools.map((pool: any) => {
|
pools = pools.map((pool: any) => {
|
||||||
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
|
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
|
||||||
@ -143,7 +143,7 @@ class Mining {
|
|||||||
|
|
||||||
for (const pool of pools) {
|
for (const pool of pools) {
|
||||||
hashrates.push({
|
hashrates.push({
|
||||||
hashrateTimestamp: toTimestamp,
|
hashrateTimestamp: toTimestamp / 1000,
|
||||||
avgHashrate: pool['hashrate'],
|
avgHashrate: pool['hashrate'],
|
||||||
poolId: pool.poolId,
|
poolId: pool.poolId,
|
||||||
share: pool['share'],
|
share: pool['share'],
|
||||||
@ -154,17 +154,17 @@ class Mining {
|
|||||||
await HashratesRepository.$saveHashrates(hashrates);
|
await HashratesRepository.$saveHashrates(hashrates);
|
||||||
hashrates.length = 0;
|
hashrates.length = 0;
|
||||||
|
|
||||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime()) - startedAt)) / 1000;
|
||||||
if (elapsedSeconds > 1) {
|
if (elapsedSeconds > 1) {
|
||||||
const weeksPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
|
const weeksPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
|
||||||
const formattedDate = new Date(fromTimestamp * 1000).toUTCString();
|
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||||
const weeksLeft = Math.round(totalWeekIndexed - totalIndexed);
|
const weeksLeft = Math.round(totalWeekIndexed - totalIndexed);
|
||||||
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds} weeks/sec | ~${weeksLeft} weeks left to index`);
|
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds} weeks/sec | ~${weeksLeft} weeks left to index`);
|
||||||
startedAt = new Date().getTime() / 1000;
|
startedAt = new Date().getTime();
|
||||||
indexedThisRun = 0;
|
indexedThisRun = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
toTimestamp -= 604800;
|
toTimestamp -= 604800000;
|
||||||
++indexedThisRun;
|
++indexedThisRun;
|
||||||
++totalIndexed;
|
++totalIndexed;
|
||||||
}
|
}
|
||||||
@ -186,9 +186,9 @@ class Mining {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// We only run this once a day
|
// We only run this once a day
|
||||||
const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_hashrates_indexing');
|
const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_hashrates_indexing') * 1000;
|
||||||
const now = new Date().getTime() / 1000;
|
const now = new Date().getTime();
|
||||||
if (now - latestTimestamp < 86400) {
|
if (now - latestTimestamp < 86400000) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -198,22 +198,22 @@ class Mining {
|
|||||||
logger.info(`Indexing network daily hashrate`);
|
logger.info(`Indexing network daily hashrate`);
|
||||||
|
|
||||||
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
||||||
const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||||
const lastMidnight = this.getDateMidnight(new Date());
|
const lastMidnight = this.getDateMidnight(new Date());
|
||||||
let toTimestamp = Math.round(lastMidnight.getTime() / 1000);
|
let toTimestamp = Math.round(lastMidnight.getTime());
|
||||||
const hashrates: any[] = [];
|
const hashrates: any[] = [];
|
||||||
|
|
||||||
const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
|
const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
|
||||||
let indexedThisRun = 0;
|
let indexedThisRun = 0;
|
||||||
let totalIndexed = 0;
|
let totalIndexed = 0;
|
||||||
let startedAt = new Date().getTime() / 1000;
|
let startedAt = new Date().getTime();
|
||||||
|
|
||||||
while (toTimestamp > genesisTimestamp) {
|
while (toTimestamp > genesisTimestamp) {
|
||||||
const fromTimestamp = toTimestamp - 86400;
|
const fromTimestamp = toTimestamp - 86400000;
|
||||||
|
|
||||||
// Skip already indexed weeks
|
// Skip already indexed weeks
|
||||||
if (indexedTimestamp.includes(toTimestamp)) {
|
if (indexedTimestamp.includes(toTimestamp / 1000)) {
|
||||||
toTimestamp -= 86400;
|
toTimestamp -= 86400000;
|
||||||
++totalIndexed;
|
++totalIndexed;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -221,18 +221,18 @@ class Mining {
|
|||||||
// Check if we have blocks for the previous day (which mean that the day
|
// Check if we have blocks for the previous day (which mean that the day
|
||||||
// we are currently indexing has complete data)
|
// we are currently indexing has complete data)
|
||||||
const blockStatsPreviousDay: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
const blockStatsPreviousDay: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||||
null, fromTimestamp - 86400, toTimestamp - 86400);
|
null, (fromTimestamp - 86400000) / 1000, (toTimestamp - 86400000) / 1000);
|
||||||
if (blockStatsPreviousDay.blockCount === 0) { // We are done indexing
|
if (blockStatsPreviousDay.blockCount === 0) { // We are done indexing
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||||
null, fromTimestamp, toTimestamp);
|
null, fromTimestamp / 1000, toTimestamp / 1000);
|
||||||
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||||
blockStats.lastBlockHeight);
|
blockStats.lastBlockHeight);
|
||||||
|
|
||||||
hashrates.push({
|
hashrates.push({
|
||||||
hashrateTimestamp: toTimestamp,
|
hashrateTimestamp: toTimestamp / 1000,
|
||||||
avgHashrate: lastBlockHashrate,
|
avgHashrate: lastBlockHashrate,
|
||||||
poolId: 0,
|
poolId: 0,
|
||||||
share: 1,
|
share: 1,
|
||||||
@ -244,17 +244,17 @@ class Mining {
|
|||||||
hashrates.length = 0;
|
hashrates.length = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
const elapsedSeconds = Math.max(1, Math.round(new Date().getTime() - startedAt)) / 1000;
|
||||||
if (elapsedSeconds > 1) {
|
if (elapsedSeconds > 1) {
|
||||||
const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
|
const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
|
||||||
const formattedDate = new Date(fromTimestamp * 1000).toUTCString();
|
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||||
const daysLeft = Math.round(totalDayIndexed - totalIndexed);
|
const daysLeft = Math.round(totalDayIndexed - totalIndexed);
|
||||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`);
|
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`);
|
||||||
startedAt = new Date().getTime() / 1000;
|
startedAt = new Date().getTime();
|
||||||
indexedThisRun = 0;
|
indexedThisRun = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
toTimestamp -= 86400;
|
toTimestamp -= 86400000;
|
||||||
++indexedThisRun;
|
++indexedThisRun;
|
||||||
++totalIndexed;
|
++totalIndexed;
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
FROM node:16.10.0-buster-slim AS builder
|
FROM node:16.10.0-buster-slim AS builder
|
||||||
|
|
||||||
|
ARG commitHash
|
||||||
|
ENV DOCKER_COMMIT_HASH=${commitHash}
|
||||||
|
|
||||||
WORKDIR /build
|
WORKDIR /build
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
@ -51,9 +51,9 @@ if (process.env.DOCKER_COMMIT_HASH) {
|
|||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
|
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
|
||||||
|
|
||||||
if (!gitRevParse.error) {
|
if (!gitRevParse.error) {
|
||||||
gitCommitHash = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
|
const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
|
||||||
|
gitCommitHash = output ? output : '?';
|
||||||
console.log(`mempool revision ${gitCommitHash}`);
|
console.log(`mempool revision ${gitCommitHash}`);
|
||||||
} else if (gitRevParse.error.code === 'ENOENT') {
|
} else if (gitRevParse.error.code === 'ENOENT') {
|
||||||
console.log('git not found, cannot parse git hash');
|
console.log('git not found, cannot parse git hash');
|
||||||
|
Loading…
x
Reference in New Issue
Block a user