Merge branch 'master' into nymkappa/bugfix/blocks-list-pagination-error

This commit is contained in:
wiz 2022-07-10 12:37:51 +02:00 committed by GitHub
commit ac257b4165
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 136 additions and 71 deletions

View File

@ -20,7 +20,8 @@
"EXTERNAL_MAX_RETRY": 1,
"EXTERNAL_RETRY_INTERVAL": 0,
"USER_AGENT": "mempool",
"STDOUT_LOG_MIN_PRIORITY": "debug"
"STDOUT_LOG_MIN_PRIORITY": "debug",
"AUTOMATIC_BLOCK_REINDEXING": false
},
"CORE_RPC": {
"HOST": "127.0.0.1",

View File

@ -173,26 +173,25 @@ class Mining {
*/
public async $generatePoolHashrateHistory(): Promise<void> {
const now = new Date();
const lastestRunDate = await HashratesRepository.$getLatestRun('last_weekly_hashrates_indexing');
try {
const lastestRunDate = await HashratesRepository.$getLatestRun('last_weekly_hashrates_indexing');
// Run only if:
// * lastestRunDate is set to 0 (node backend restart, reorg)
// * we started a new week (around Monday midnight)
const runIndexing = lastestRunDate === 0 || now.getUTCDay() === 1 && lastestRunDate !== now.getUTCDate();
if (!runIndexing) {
return;
}
} catch (e) {
throw e;
// Run only if:
// * lastestRunDate is set to 0 (node backend restart, reorg)
// * we started a new week (around Monday midnight)
const runIndexing = lastestRunDate === 0 || now.getUTCDay() === 1 && lastestRunDate !== now.getUTCDate();
if (!runIndexing) {
return;
}
try {
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlockTimestamp());
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
const genesisTimestamp = genesisBlock.time * 1000;
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
const hashrates: any[] = [];
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
const lastMonday = new Date(now.setDate(now.getDate() - (now.getDay() + 6) % 7));
const lastMondayMidnight = this.getDateMidnight(lastMonday);
let toTimestamp = lastMondayMidnight.getTime();
@ -207,7 +206,7 @@ class Mining {
logger.debug(`Indexing weekly mining pool hashrate`);
loadingIndicators.setProgress('weekly-hashrate-indexing', 0);
while (toTimestamp > genesisTimestamp) {
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
const fromTimestamp = toTimestamp - 604800000;
// Skip already indexed weeks
@ -217,14 +216,6 @@ class Mining {
continue;
}
// Check if we have blocks for the previous week (which mean that the week
// we are currently indexing has complete data)
const blockStatsPreviousWeek: any = await BlocksRepository.$blockCountBetweenTimestamp(
null, (fromTimestamp - 604800000) / 1000, (toTimestamp - 604800000) / 1000);
if (blockStatsPreviousWeek.blockCount === 0) { // We are done indexing
break;
}
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
null, fromTimestamp / 1000, toTimestamp / 1000);
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
@ -232,25 +223,27 @@ class Mining {
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp / 1000, toTimestamp / 1000);
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
pools = pools.map((pool: any) => {
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
pool.share = (pool.blockCount / totalBlocks);
return pool;
});
for (const pool of pools) {
hashrates.push({
hashrateTimestamp: toTimestamp / 1000,
avgHashrate: pool['hashrate'],
poolId: pool.poolId,
share: pool['share'],
type: 'weekly',
if (totalBlocks > 0) {
pools = pools.map((pool: any) => {
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
pool.share = (pool.blockCount / totalBlocks);
return pool;
});
}
newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0;
for (const pool of pools) {
hashrates.push({
hashrateTimestamp: toTimestamp / 1000,
avgHashrate: pool['hashrate'] ,
poolId: pool.poolId,
share: pool['share'],
type: 'weekly',
});
}
newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0;
}
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 1) {
@ -285,20 +278,19 @@ class Mining {
* [INDEXING] Generate daily hashrate data
*/
public async $generateNetworkHashrateHistory(): Promise<void> {
try {
// We only run this once a day around midnight
const latestRunDate = await HashratesRepository.$getLatestRun('last_hashrates_indexing');
const now = new Date().getUTCDate();
if (now === latestRunDate) {
return;
}
} catch (e) {
throw e;
// We only run this once a day around midnight
const latestRunDate = await HashratesRepository.$getLatestRun('last_hashrates_indexing');
const now = new Date().getUTCDate();
if (now === latestRunDate) {
return;
}
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlockTimestamp());
try {
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
const genesisTimestamp = (config.MEMPOOL.NETWORK === 'signet') ? 1598918400000 : 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
const genesisTimestamp = genesisBlock.time * 1000;
const indexedTimestamp = (await HashratesRepository.$getRawNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
const lastMidnight = this.getDateMidnight(new Date());
let toTimestamp = Math.round(lastMidnight.getTime());
const hashrates: any[] = [];
@ -313,7 +305,7 @@ class Mining {
logger.debug(`Indexing daily network hashrate`);
loadingIndicators.setProgress('daily-hashrate-indexing', 0);
while (toTimestamp > genesisTimestamp) {
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
const fromTimestamp = toTimestamp - 86400000;
// Skip already indexed days
@ -323,17 +315,9 @@ class Mining {
continue;
}
// Check if we have blocks for the previous day (which mean that the day
// we are currently indexing has complete data)
const blockStatsPreviousDay: any = await BlocksRepository.$blockCountBetweenTimestamp(
null, (fromTimestamp - 86400000) / 1000, (toTimestamp - 86400000) / 1000);
if (blockStatsPreviousDay.blockCount === 0 && config.MEMPOOL.NETWORK === 'mainnet') { // We are done indexing
break;
}
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
null, fromTimestamp / 1000, toTimestamp / 1000);
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
const lastBlockHashrate = blockStats.blockCount === 0 ? 0 : await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
blockStats.lastBlockHeight);
hashrates.push({
@ -368,8 +352,8 @@ class Mining {
++totalIndexed;
}
// Add genesis block manually on mainnet and testnet
if ('signet' !== config.MEMPOOL.NETWORK && toTimestamp <= genesisTimestamp && !indexedTimestamp.includes(genesisTimestamp)) {
// Add genesis block manually
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && !indexedTimestamp.includes(genesisTimestamp / 1000)) {
hashrates.push({
hashrateTimestamp: genesisTimestamp / 1000,
avgHashrate: await bitcoinClient.getNetworkHashPs(1, 1),
@ -410,14 +394,18 @@ class Mining {
let totalIndexed = 0;
if (indexedHeights[0] !== true) {
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
await DifficultyAdjustmentsRepository.$saveAdjustments({
time: (config.MEMPOOL.NETWORK === 'signet') ? 1598918400 : 1231006505,
time: genesisBlock.time,
height: 0,
difficulty: (config.MEMPOOL.NETWORK === 'signet') ? 0.001126515290698186 : 1.0,
difficulty: genesisBlock.difficulty,
adjustment: 0.0,
});
}
let totalBlockChecked = 0;
let timer = new Date().getTime() / 1000;
for (const block of blocks) {
if (block.difficulty !== currentDifficulty) {
if (block.height === 0 || indexedHeights[block.height] === true) { // Already indexed
@ -438,6 +426,14 @@ class Mining {
totalIndexed++;
currentDifficulty = block.difficulty;
}
totalBlockChecked++;
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5) {
const progress = Math.round(totalBlockChecked / blocks.length * 100);
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`);
timer = new Date().getTime() / 1000;
}
}
if (totalIndexed > 0) {

View File

@ -222,6 +222,10 @@ class PoolsParser {
* Delete blocks which needs to be reindexed
*/
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
return;
}
const blockCount = await BlocksRepository.$blockCount(null, null);
if (blockCount === 0) {
return;

View File

@ -23,6 +23,7 @@ interface IConfig {
EXTERNAL_RETRY_INTERVAL: number;
USER_AGENT: string;
STDOUT_LOG_MIN_PRIORITY: 'emerg' | 'alert' | 'crit' | 'err' | 'warn' | 'notice' | 'info' | 'debug';
AUTOMATIC_BLOCK_REINDEXING: boolean;
};
ESPLORA: {
REST_API_URL: string;
@ -113,6 +114,7 @@ const defaults: IConfig = {
'EXTERNAL_RETRY_INTERVAL': 0,
'USER_AGENT': 'mempool',
'STDOUT_LOG_MIN_PRIORITY': 'debug',
'AUTOMATIC_BLOCK_REINDEXING': false,
},
'ESPLORA': {
'REST_API_URL': 'http://127.0.0.1:3000',

View File

@ -35,6 +35,8 @@ class Indexer {
this.runIndexer = false;
this.indexerRunning = true;
logger.debug(`Running mining indexer`);
try {
const chainValid = await blocks.$generateBlockDatabase();
if (chainValid === false) {
@ -54,9 +56,15 @@ class Indexer {
this.indexerRunning = false;
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));
setTimeout(() => this.reindex(), 10000);
this.indexerRunning = false;
return;
}
this.indexerRunning = false;
const runEvery = 1000 * 3600; // 1 hour
logger.debug(`Indexing completed. Next run planned at ${new Date(new Date().getTime() + runEvery).toUTCString()}`);
setTimeout(() => this.reindex(), runEvery);
}
async $resetHashratesIndexingState() {

View File

@ -610,6 +610,24 @@ class BlocksRepository {
throw e;
}
}
/**
* Return the oldest block timestamp from a consecutive chain of block from the most recent one
*/
public async $getOldestConsecutiveBlockTimestamp(): Promise<number> {
try {
const [rows]: any = await DB.query(`SELECT height, UNIX_TIMESTAMP(blockTimestamp) as timestamp FROM blocks ORDER BY height DESC`);
for (let i = 0; i < rows.length - 1; ++i) {
if (rows[i].height - rows[i + 1].height > 1) {
return rows[i].timestamp;
}
}
return rows[rows.length - 1].timestamp;
} catch (e) {
logger.err('Cannot generate block size and weight history. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new BlocksRepository();

View File

@ -1,6 +1,5 @@
import { escape } from 'mysql2';
import { Common } from '../api/common';
import config from '../config';
import DB from '../database';
import logger from '../logger';
import PoolsRepository from './PoolsRepository';
@ -30,6 +29,32 @@ class HashratesRepository {
}
}
public async $getRawNetworkDailyHashrate(interval: string | null): Promise<any[]> {
interval = Common.getSqlInterval(interval);
let query = `SELECT
UNIX_TIMESTAMP(hashrate_timestamp) as timestamp,
avg_hashrate as avgHashrate
FROM hashrates`;
if (interval) {
query += ` WHERE hashrate_timestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()
AND hashrates.type = 'daily'`;
} else {
query += ` WHERE hashrates.type = 'daily'`;
}
query += ` ORDER by hashrate_timestamp`;
try {
const [rows]: any[] = await DB.query(query);
return rows;
} catch (e) {
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getNetworkDailyHashrate(interval: string | null): Promise<any[]> {
interval = Common.getSqlInterval(interval);

View File

@ -4,6 +4,12 @@ import { Prices } from '../tasks/price-updater';
class PricesRepository {
public async $savePrices(time: number, prices: Prices): Promise<void> {
if (prices.USD === -1) {
// Some historical price entries have not USD prices, so we just ignore them to avoid future UX issues
// As of today there are only 4 (on 2013-09-05, 2013-09-19, 2013-09-12 and 2013-09-26) so that's fine
return;
}
try {
await DB.query(`
INSERT INTO prices(time, USD, EUR, GBP, CAD, CHF, AUD, JPY)
@ -17,17 +23,17 @@ class PricesRepository {
}
public async $getOldestPriceTime(): Promise<number> {
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices ORDER BY time LIMIT 1`);
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time LIMIT 1`);
return oldestRow[0] ? oldestRow[0].time : 0;
}
public async $getLatestPriceTime(): Promise<number> {
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices ORDER BY time DESC LIMIT 1`);
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
return oldestRow[0] ? oldestRow[0].time : 0;
}
public async $getPricesTimes(): Promise<number[]> {
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices`);
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1`);
return times.map(time => time.time);
}
}

View File

@ -20,7 +20,8 @@
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
"INDEXING_BLOCKS_AMOUNT": __MEMPOOL_INDEXING_BLOCKS_AMOUNT__,
"BLOCKS_SUMMARIES_INDEXING": __MEMPOOL_BLOCKS_SUMMARIES_INDEXING__
"BLOCKS_SUMMARIES_INDEXING": __MEMPOOL_BLOCKS_SUMMARIES_INDEXING__,
"AUTOMATIC_BLOCK_REINDEXING": __MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",

View File

@ -22,6 +22,8 @@ __MEMPOOL_EXTERNAL_MAX_RETRY__=${MEMPOOL_EXTERNAL_MAX_RETRY:=1}
__MEMPOOL_EXTERNAL_RETRY_INTERVAL__=${MEMPOOL_EXTERNAL_RETRY_INTERVAL:=0}
__MEMPOOL_USER_AGENT__=${MEMPOOL_USER_AGENT:=mempool}
__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__=${MEMPOOL_STDOUT_LOG_MIN_PRIORITY:=info}
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=false}
__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__=${MEMPOOL_AUTOMATIC_BLOCK_REINDEXING:=false}
# CORE_RPC
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
@ -110,6 +112,8 @@ sed -i "s!__MEMPOOL_EXTERNAL_MAX_RETRY__!${__MEMPOOL_EXTERNAL_MAX_RETRY__}!g" me
sed -i "s!__MEMPOOL_EXTERNAL_RETRY_INTERVAL__!${__MEMPOOL_EXTERNAL_RETRY_INTERVAL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_USER_AGENT__!${__MEMPOOL_USER_AGENT__}!g" mempool-config.json
sed -i "s/__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__/${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__/${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}/g" mempool-config.json
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json