Merge branch 'master' into mononaut/heap-monitor

This commit is contained in:
wiz 2023-03-05 15:35:00 +09:00 committed by GitHub
commit 9eeaf76369
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 185 additions and 124 deletions

View File

@ -1037,7 +1037,7 @@ class DatabaseMigration {
await this.$executeQuery('DELETE FROM `pools`'); await this.$executeQuery('DELETE FROM `pools`');
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1'); await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`); await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`);
} }
private async $convertCompactCpfpTables(): Promise<void> { private async $convertCompactCpfpTables(): Promise<void> {
try { try {

View File

@ -117,7 +117,7 @@ class Mining {
poolsStatistics['lastEstimatedHashrate'] = await bitcoinClient.getNetworkHashPs(totalBlock24h); poolsStatistics['lastEstimatedHashrate'] = await bitcoinClient.getNetworkHashPs(totalBlock24h);
} catch (e) { } catch (e) {
poolsStatistics['lastEstimatedHashrate'] = 0; poolsStatistics['lastEstimatedHashrate'] = 0;
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate'); logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate', logger.tags.mining);
} }
return poolsStatistics; return poolsStatistics;
@ -145,7 +145,7 @@ class Mining {
try { try {
currentEstimatedHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h); currentEstimatedHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h);
} catch (e) { } catch (e) {
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate'); logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate', logger.tags.mining);
} }
return { return {
@ -208,7 +208,7 @@ class Mining {
const startedAt = new Date().getTime() / 1000; const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000; let timer = new Date().getTime() / 1000;
logger.debug(`Indexing weekly mining pool hashrate`); logger.debug(`Indexing weekly mining pool hashrate`, logger.tags.mining);
loadingIndicators.setProgress('weekly-hashrate-indexing', 0); loadingIndicators.setProgress('weekly-hashrate-indexing', 0);
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) { while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
@ -245,7 +245,7 @@ class Mining {
}); });
} }
newlyIndexed += hashrates.length; newlyIndexed += hashrates.length / Math.max(1, pools.length);
await HashratesRepository.$saveHashrates(hashrates); await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0; hashrates.length = 0;
} }
@ -256,7 +256,7 @@ class Mining {
const weeksPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds)); const weeksPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
const progress = Math.round(totalIndexed / totalWeekIndexed * 10000) / 100; const progress = Math.round(totalIndexed / totalWeekIndexed * 10000) / 100;
const formattedDate = new Date(fromTimestamp).toUTCString(); const formattedDate = new Date(fromTimestamp).toUTCString();
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`); logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000; timer = new Date().getTime() / 1000;
indexedThisRun = 0; indexedThisRun = 0;
loadingIndicators.setProgress('weekly-hashrate-indexing', progress, false); loadingIndicators.setProgress('weekly-hashrate-indexing', progress, false);
@ -268,14 +268,14 @@ class Mining {
} }
this.lastWeeklyHashrateIndexingDate = new Date().getUTCDate(); this.lastWeeklyHashrateIndexingDate = new Date().getUTCDate();
if (newlyIndexed > 0) { if (newlyIndexed > 0) {
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining); logger.info(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed} weeks`, logger.tags.mining);
} else { } else {
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining); logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed} weeks`, logger.tags.mining);
} }
loadingIndicators.setProgress('weekly-hashrate-indexing', 100); loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
} catch (e) { } catch (e) {
loadingIndicators.setProgress('weekly-hashrate-indexing', 100); loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`); logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e; throw e;
} }
} }
@ -308,7 +308,7 @@ class Mining {
const startedAt = new Date().getTime() / 1000; const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000; let timer = new Date().getTime() / 1000;
logger.debug(`Indexing daily network hashrate`); logger.debug(`Indexing daily network hashrate`, logger.tags.mining);
loadingIndicators.setProgress('daily-hashrate-indexing', 0); loadingIndicators.setProgress('daily-hashrate-indexing', 0);
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) { while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
@ -346,7 +346,7 @@ class Mining {
const daysPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds)); const daysPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
const progress = Math.round(totalIndexed / totalDayIndexed * 10000) / 100; const progress = Math.round(totalIndexed / totalDayIndexed * 10000) / 100;
const formattedDate = new Date(fromTimestamp).toUTCString(); const formattedDate = new Date(fromTimestamp).toUTCString();
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`); logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000; timer = new Date().getTime() / 1000;
indexedThisRun = 0; indexedThisRun = 0;
loadingIndicators.setProgress('daily-hashrate-indexing', progress); loadingIndicators.setProgress('daily-hashrate-indexing', progress);
@ -373,14 +373,14 @@ class Mining {
this.lastHashrateIndexingDate = new Date().getUTCDate(); this.lastHashrateIndexingDate = new Date().getUTCDate();
if (newlyIndexed > 0) { if (newlyIndexed > 0) {
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining); logger.info(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
} else { } else {
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining); logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
} }
loadingIndicators.setProgress('daily-hashrate-indexing', 100); loadingIndicators.setProgress('daily-hashrate-indexing', 100);
} catch (e) { } catch (e) {
loadingIndicators.setProgress('daily-hashrate-indexing', 100); loadingIndicators.setProgress('daily-hashrate-indexing', 100);
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining); logger.err(`Daily network hashrate indexing failed. Trying again later. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e; throw e;
} }
} }
@ -446,13 +446,13 @@ class Mining {
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer)); const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5) { if (elapsedSeconds > 5) {
const progress = Math.round(totalBlockChecked / blocks.length * 100); const progress = Math.round(totalBlockChecked / blocks.length * 100);
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`); logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
timer = new Date().getTime() / 1000; timer = new Date().getTime() / 1000;
} }
} }
if (totalIndexed > 0) { if (totalIndexed > 0) {
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining); logger.info(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
} else { } else {
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining); logger.debug(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
} }
@ -499,7 +499,7 @@ class Mining {
if (blocksWithoutPrices.length > 200000) { if (blocksWithoutPrices.length > 200000) {
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`; logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
} }
logger.debug(logStr); logger.debug(logStr, logger.tags.mining);
await BlocksRepository.$saveBlockPrices(blocksPrices); await BlocksRepository.$saveBlockPrices(blocksPrices);
blocksPrices.length = 0; blocksPrices.length = 0;
} }
@ -511,7 +511,7 @@ class Mining {
if (blocksWithoutPrices.length > 200000) { if (blocksWithoutPrices.length > 200000) {
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`; logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
} }
logger.debug(logStr); logger.debug(logStr, logger.tags.mining);
await BlocksRepository.$saveBlockPrices(blocksPrices); await BlocksRepository.$saveBlockPrices(blocksPrices);
} }
} catch (e) { } catch (e) {

View File

@ -1,8 +1,8 @@
import logger from '../logger'; import logger from '../logger';
import * as WebSocket from 'ws'; import * as WebSocket from 'ws';
import { import {
BlockExtended, TransactionExtended, WebsocketResponse, MempoolBlock, MempoolBlockDelta, BlockExtended, TransactionExtended, WebsocketResponse,
OptimizedStatistic, ILoadingIndicators, IConversionRates OptimizedStatistic, ILoadingIndicators
} from '../mempool.interfaces'; } from '../mempool.interfaces';
import blocks from './blocks'; import blocks from './blocks';
import memPool from './mempool'; import memPool from './mempool';
@ -20,6 +20,7 @@ import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository
import Audit from './audit'; import Audit from './audit';
import { deepClone } from '../utils/clone'; import { deepClone } from '../utils/clone';
import priceUpdater from '../tasks/price-updater'; import priceUpdater from '../tasks/price-updater';
import { ApiPrice } from '../repositories/PricesRepository';
class WebsocketHandler { class WebsocketHandler {
private wss: WebSocket.Server | undefined; private wss: WebSocket.Server | undefined;
@ -193,7 +194,7 @@ class WebsocketHandler {
}); });
} }
handleNewConversionRates(conversionRates: IConversionRates) { handleNewConversionRates(conversionRates: ApiPrice) {
if (!this.wss) { if (!this.wss) {
throw new Error('WebSocket.Server is not set'); throw new Error('WebSocket.Server is not set');
} }
@ -214,7 +215,7 @@ class WebsocketHandler {
'mempoolInfo': memPool.getMempoolInfo(), 'mempoolInfo': memPool.getMempoolInfo(),
'vBytesPerSecond': memPool.getVBytesPerSecond(), 'vBytesPerSecond': memPool.getVBytesPerSecond(),
'blocks': _blocks, 'blocks': _blocks,
'conversions': priceUpdater.latestPrices, 'conversions': priceUpdater.getLatestPrices(),
'mempool-blocks': mempoolBlocks.getMempoolBlocks(), 'mempool-blocks': mempoolBlocks.getMempoolBlocks(),
'transactions': memPool.getLatestTransactions(), 'transactions': memPool.getLatestTransactions(),
'backendInfo': backendInfo.getBackendInfo(), 'backendInfo': backendInfo.getBackendInfo(),

View File

@ -76,13 +76,13 @@ class Indexer {
this.tasksRunning.push(task); this.tasksRunning.push(task);
const lastestPriceId = await PricesRepository.$getLatestPriceId(); const lastestPriceId = await PricesRepository.$getLatestPriceId();
if (priceUpdater.historyInserted === false || lastestPriceId === null) { if (priceUpdater.historyInserted === false || lastestPriceId === null) {
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`); logger.debug(`Blocks prices indexer is waiting for the price updater to complete`, logger.tags.mining);
setTimeout(() => { setTimeout(() => {
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task); this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
this.runSingleTask('blocksPrices'); this.runSingleTask('blocksPrices');
}, 10000); }, 10000);
} else { } else {
logger.debug(`Blocks prices indexer will run now`); logger.debug(`Blocks prices indexer will run now`, logger.tags.mining);
await mining.$indexBlockPrices(); await mining.$indexBlockPrices();
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task); this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
} }
@ -112,7 +112,7 @@ class Indexer {
this.runIndexer = false; this.runIndexer = false;
this.indexerRunning = true; this.indexerRunning = true;
logger.info(`Running mining indexer`); logger.debug(`Running mining indexer`);
await this.checkAvailableCoreIndexes(); await this.checkAvailableCoreIndexes();
@ -122,7 +122,7 @@ class Indexer {
const chainValid = await blocks.$generateBlockDatabase(); const chainValid = await blocks.$generateBlockDatabase();
if (chainValid === false) { if (chainValid === false) {
// Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration // Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration
logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`); logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`, logger.tags.mining);
setTimeout(() => this.reindex(), 10000); setTimeout(() => this.reindex(), 10000);
this.indexerRunning = false; this.indexerRunning = false;
return; return;

View File

@ -293,7 +293,6 @@ interface RequiredParams {
} }
export interface ILoadingIndicators { [name: string]: number; } export interface ILoadingIndicators { [name: string]: number; }
export interface IConversionRates { [currency: string]: number; }
export interface IBackendInfo { export interface IBackendInfo {
hostname: string; hostname: string;

View File

@ -20,9 +20,9 @@ class DifficultyAdjustmentsRepository {
await DB.query(query, params); await DB.query(query, params);
} catch (e: any) { } catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`); logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`, logger.tags.mining);
} else { } else {
logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`); logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
throw e; throw e;
} }
} }
@ -54,7 +54,7 @@ class DifficultyAdjustmentsRepository {
const [rows] = await DB.query(query); const [rows] = await DB.query(query);
return rows as IndexedDifficultyAdjustment[]; return rows as IndexedDifficultyAdjustment[];
} catch (e) { } catch (e) {
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e)); logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -83,7 +83,7 @@ class DifficultyAdjustmentsRepository {
const [rows] = await DB.query(query); const [rows] = await DB.query(query);
return rows as IndexedDifficultyAdjustment[]; return rows as IndexedDifficultyAdjustment[];
} catch (e) { } catch (e) {
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e)); logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -93,27 +93,27 @@ class DifficultyAdjustmentsRepository {
const [rows]: any[] = await DB.query(`SELECT height FROM difficulty_adjustments`); const [rows]: any[] = await DB.query(`SELECT height FROM difficulty_adjustments`);
return rows.map(block => block.height); return rows.map(block => block.height);
} catch (e: any) { } catch (e: any) {
logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`); logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
throw e; throw e;
} }
} }
public async $deleteAdjustementsFromHeight(height: number): Promise<void> { public async $deleteAdjustementsFromHeight(height: number): Promise<void> {
try { try {
logger.info(`Delete newer difficulty adjustments from height ${height} from the database`); logger.info(`Delete newer difficulty adjustments from height ${height} from the database`, logger.tags.mining);
await DB.query(`DELETE FROM difficulty_adjustments WHERE height >= ?`, [height]); await DB.query(`DELETE FROM difficulty_adjustments WHERE height >= ?`, [height]);
} catch (e: any) { } catch (e: any) {
logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`); logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
throw e; throw e;
} }
} }
public async $deleteLastAdjustment(): Promise<void> { public async $deleteLastAdjustment(): Promise<void> {
try { try {
logger.info(`Delete last difficulty adjustment from the database`); logger.info(`Delete last difficulty adjustment from the database`, logger.tags.mining);
await DB.query(`DELETE FROM difficulty_adjustments ORDER BY time LIMIT 1`); await DB.query(`DELETE FROM difficulty_adjustments ORDER BY time LIMIT 1`);
} catch (e: any) { } catch (e: any) {
logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`); logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
throw e; throw e;
} }
} }

View File

@ -25,7 +25,7 @@ class HashratesRepository {
try { try {
await DB.query(query); await DB.query(query);
} catch (e: any) { } catch (e: any) {
logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -51,7 +51,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query); const [rows]: any[] = await DB.query(query);
return rows; return rows;
} catch (e) { } catch (e) {
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -78,7 +78,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query); const [rows]: any[] = await DB.query(query);
return rows; return rows;
} catch (e) { } catch (e) {
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -93,7 +93,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query); const [rows]: any[] = await DB.query(query);
return rows.map(row => row.timestamp); return rows.map(row => row.timestamp);
} catch (e) { } catch (e) {
logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -128,7 +128,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query); const [rows]: any[] = await DB.query(query);
return rows; return rows;
} catch (e) { } catch (e) {
logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -158,7 +158,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query, [pool.id]); const [rows]: any[] = await DB.query(query, [pool.id]);
boundaries = rows[0]; boundaries = rows[0];
} catch (e) { } catch (e) {
logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
} }
// Get hashrates entries between boundaries // Get hashrates entries between boundaries
@ -173,7 +173,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query, [boundaries.firstTimestamp, boundaries.lastTimestamp, pool.id]); const [rows]: any[] = await DB.query(query, [boundaries.firstTimestamp, boundaries.lastTimestamp, pool.id]);
return rows; return rows;
} catch (e) { } catch (e) {
logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -192,7 +192,7 @@ class HashratesRepository {
} }
return rows[0]['number']; return rows[0]['number'];
} catch (e) { } catch (e) {
logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e)); logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e; throw e;
} }
} }
@ -201,7 +201,7 @@ class HashratesRepository {
* Delete most recent data points for re-indexing * Delete most recent data points for re-indexing
*/ */
public async $deleteLastEntries() { public async $deleteLastEntries() {
logger.info(`Delete latest hashrates data points from the database`); logger.info(`Delete latest hashrates data points from the database`, logger.tags.mining);
try { try {
const [rows]: any[] = await DB.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`); const [rows]: any[] = await DB.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`);
@ -212,7 +212,7 @@ class HashratesRepository {
mining.lastHashrateIndexingDate = null; mining.lastHashrateIndexingDate = null;
mining.lastWeeklyHashrateIndexingDate = null; mining.lastWeeklyHashrateIndexingDate = null;
} catch (e) { } catch (e) {
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
} }
} }
@ -228,7 +228,7 @@ class HashratesRepository {
mining.lastHashrateIndexingDate = null; mining.lastHashrateIndexingDate = null;
mining.lastWeeklyHashrateIndexingDate = null; mining.lastWeeklyHashrateIndexingDate = null;
} catch (e) { } catch (e) {
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
} }
} }
} }

View File

@ -1,6 +1,5 @@
import DB from '../database'; import DB from '../database';
import logger from '../logger'; import logger from '../logger';
import { IConversionRates } from '../mempool.interfaces';
import priceUpdater from '../tasks/price-updater'; import priceUpdater from '../tasks/price-updater';
export interface ApiPrice { export interface ApiPrice {
@ -13,6 +12,16 @@ export interface ApiPrice {
AUD: number, AUD: number,
JPY: number, JPY: number,
} }
const ApiPriceFields = `
UNIX_TIMESTAMP(time) as time,
USD,
EUR,
GBP,
CAD,
CHF,
AUD,
JPY
`;
export interface ExchangeRates { export interface ExchangeRates {
USDEUR: number, USDEUR: number,
@ -39,7 +48,7 @@ export const MAX_PRICES = {
}; };
class PricesRepository { class PricesRepository {
public async $savePrices(time: number, prices: IConversionRates): Promise<void> { public async $savePrices(time: number, prices: ApiPrice): Promise<void> {
if (prices.USD === -1) { if (prices.USD === -1) {
// Some historical price entries have no USD prices, so we just ignore them to avoid future UX issues // Some historical price entries have no USD prices, so we just ignore them to avoid future UX issues
// As of today there are only 4 (on 2013-09-05, 2013-0909, 2013-09-12 and 2013-09-26) so that's fine // As of today there are only 4 (on 2013-09-05, 2013-0909, 2013-09-12 and 2013-09-26) so that's fine
@ -60,77 +69,115 @@ class PricesRepository {
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ? )`, VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ? )`,
[time, prices.USD, prices.EUR, prices.GBP, prices.CAD, prices.CHF, prices.AUD, prices.JPY] [time, prices.USD, prices.EUR, prices.GBP, prices.CAD, prices.CHF, prices.AUD, prices.JPY]
); );
} catch (e: any) { } catch (e) {
logger.err(`Cannot save exchange rate into db. Reason: ` + (e instanceof Error ? e.message : e)); logger.err(`Cannot save exchange rate into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e; throw e;
} }
} }
public async $getOldestPriceTime(): Promise<number> { public async $getOldestPriceTime(): Promise<number> {
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != 0 ORDER BY time LIMIT 1`); const [oldestRow] = await DB.query(`
SELECT UNIX_TIMESTAMP(time) AS time
FROM prices
ORDER BY time
LIMIT 1
`);
return oldestRow[0] ? oldestRow[0].time : 0; return oldestRow[0] ? oldestRow[0].time : 0;
} }
public async $getLatestPriceId(): Promise<number | null> { public async $getLatestPriceId(): Promise<number | null> {
const [oldestRow] = await DB.query(`SELECT id from prices WHERE USD != 0 ORDER BY time DESC LIMIT 1`); const [oldestRow] = await DB.query(`
return oldestRow[0] ? oldestRow[0].id : null; SELECT id
}
public async $getLatestPriceTime(): Promise<number> {
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != 0 ORDER BY time DESC LIMIT 1`);
return oldestRow[0] ? oldestRow[0].time : 0;
}
public async $getPricesTimes(): Promise<number[]> {
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != 0 ORDER BY time`);
return times.map(time => time.time);
}
public async $getPricesTimesAndId(): Promise<number[]> {
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time, id, USD from prices ORDER BY time`);
return times;
}
public async $getLatestConversionRates(): Promise<any> {
const [rates]: any[] = await DB.query(`
SELECT USD, EUR, GBP, CAD, CHF, AUD, JPY
FROM prices FROM prices
ORDER BY time DESC ORDER BY time DESC
LIMIT 1` LIMIT 1`
); );
if (!rates || rates.length === 0) { return oldestRow[0] ? oldestRow[0].id : null;
}
public async $getLatestPriceTime(): Promise<number> {
const [oldestRow] = await DB.query(`
SELECT UNIX_TIMESTAMP(time) AS time
FROM prices
ORDER BY time DESC
LIMIT 1`
);
return oldestRow[0] ? oldestRow[0].time : 0;
}
public async $getPricesTimes(): Promise<number[]> {
const [times] = await DB.query(`
SELECT UNIX_TIMESTAMP(time) AS time
FROM prices
WHERE USD != -1
ORDER BY time
`);
if (!Array.isArray(times)) {
return [];
}
return times.map(time => time.time);
}
public async $getPricesTimesAndId(): Promise<{time: number, id: number, USD: number}[]> {
const [times] = await DB.query(`
SELECT
UNIX_TIMESTAMP(time) AS time,
id,
USD
FROM prices
ORDER BY time
`);
return times as {time: number, id: number, USD: number}[];
}
public async $getLatestConversionRates(): Promise<ApiPrice> {
const [rates] = await DB.query(`
SELECT ${ApiPriceFields}
FROM prices
ORDER BY time DESC
LIMIT 1`
);
if (!Array.isArray(rates) || rates.length === 0) {
return priceUpdater.getEmptyPricesObj(); return priceUpdater.getEmptyPricesObj();
} }
return rates[0]; return rates[0] as ApiPrice;
} }
public async $getNearestHistoricalPrice(timestamp: number | undefined): Promise<Conversion | null> { public async $getNearestHistoricalPrice(timestamp: number | undefined): Promise<Conversion | null> {
try { try {
const [rates]: any[] = await DB.query(` const [rates] = await DB.query(`
SELECT *, UNIX_TIMESTAMP(time) AS time SELECT ${ApiPriceFields}
FROM prices FROM prices
WHERE UNIX_TIMESTAMP(time) < ? WHERE UNIX_TIMESTAMP(time) < ?
ORDER BY time DESC ORDER BY time DESC
LIMIT 1`, LIMIT 1`,
[timestamp] [timestamp]
); );
if (!rates) { if (!Array.isArray(rates)) {
throw Error(`Cannot get single historical price from the database`); throw Error(`Cannot get single historical price from the database`);
} }
// Compute fiat exchange rates // Compute fiat exchange rates
const latestPrice = await this.$getLatestConversionRates(); let latestPrice = rates[0] as ApiPrice;
if (latestPrice.USD === -1) {
latestPrice = priceUpdater.getEmptyPricesObj();
}
const computeFx = (usd: number, other: number): number =>
Math.round(Math.max(other, 0) / Math.max(usd, 1) * 100) / 100;
const exchangeRates: ExchangeRates = { const exchangeRates: ExchangeRates = {
USDEUR: Math.round(latestPrice.EUR / latestPrice.USD * 100) / 100, USDEUR: computeFx(latestPrice.USD, latestPrice.EUR),
USDGBP: Math.round(latestPrice.GBP / latestPrice.USD * 100) / 100, USDGBP: computeFx(latestPrice.USD, latestPrice.GBP),
USDCAD: Math.round(latestPrice.CAD / latestPrice.USD * 100) / 100, USDCAD: computeFx(latestPrice.USD, latestPrice.CAD),
USDCHF: Math.round(latestPrice.CHF / latestPrice.USD * 100) / 100, USDCHF: computeFx(latestPrice.USD, latestPrice.CHF),
USDAUD: Math.round(latestPrice.AUD / latestPrice.USD * 100) / 100, USDAUD: computeFx(latestPrice.USD, latestPrice.AUD),
USDJPY: Math.round(latestPrice.JPY / latestPrice.USD * 100) / 100, USDJPY: computeFx(latestPrice.USD, latestPrice.JPY),
}; };
return { return {
prices: rates, prices: rates as ApiPrice[],
exchangeRates: exchangeRates exchangeRates: exchangeRates
}; };
} catch (e) { } catch (e) {
@ -141,28 +188,35 @@ class PricesRepository {
public async $getHistoricalPrices(): Promise<Conversion | null> { public async $getHistoricalPrices(): Promise<Conversion | null> {
try { try {
const [rates]: any[] = await DB.query(` const [rates] = await DB.query(`
SELECT *, UNIX_TIMESTAMP(time) AS time SELECT ${ApiPriceFields}
FROM prices FROM prices
ORDER BY time DESC ORDER BY time DESC
`); `);
if (!rates) { if (!Array.isArray(rates)) {
throw Error(`Cannot get average historical price from the database`); throw Error(`Cannot get average historical price from the database`);
} }
// Compute fiat exchange rates // Compute fiat exchange rates
const latestPrice: ApiPrice = rates[0]; let latestPrice = rates[0] as ApiPrice;
if (latestPrice.USD === -1) {
latestPrice = priceUpdater.getEmptyPricesObj();
}
const computeFx = (usd: number, other: number): number =>
Math.round(Math.max(other, 0) / Math.max(usd, 1) * 100) / 100;
const exchangeRates: ExchangeRates = { const exchangeRates: ExchangeRates = {
USDEUR: Math.round(latestPrice.EUR / latestPrice.USD * 100) / 100, USDEUR: computeFx(latestPrice.USD, latestPrice.EUR),
USDGBP: Math.round(latestPrice.GBP / latestPrice.USD * 100) / 100, USDGBP: computeFx(latestPrice.USD, latestPrice.GBP),
USDCAD: Math.round(latestPrice.CAD / latestPrice.USD * 100) / 100, USDCAD: computeFx(latestPrice.USD, latestPrice.CAD),
USDCHF: Math.round(latestPrice.CHF / latestPrice.USD * 100) / 100, USDCHF: computeFx(latestPrice.USD, latestPrice.CHF),
USDAUD: Math.round(latestPrice.AUD / latestPrice.USD * 100) / 100, USDAUD: computeFx(latestPrice.USD, latestPrice.AUD),
USDJPY: Math.round(latestPrice.JPY / latestPrice.USD * 100) / 100, USDJPY: computeFx(latestPrice.USD, latestPrice.JPY),
}; };
return { return {
prices: rates, prices: rates as ApiPrice[],
exchangeRates: exchangeRates exchangeRates: exchangeRates
}; };
} catch (e) { } catch (e) {

View File

@ -411,7 +411,7 @@ class LightningStatsImporter {
} }
if (totalProcessed > 0) { if (totalProcessed > 0) {
logger.notice(`Lightning network stats historical import completed`, logger.tags.ln); logger.info(`Lightning network stats historical import completed`, logger.tags.ln);
} }
} catch (e) { } catch (e) {
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln); logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln);

View File

@ -82,7 +82,7 @@ class PoolsUpdater {
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining); logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
await DB.query('ROLLBACK;'); await DB.query('ROLLBACK;');
} }
logger.notice('PoolsUpdater completed'); logger.info('PoolsUpdater completed');
} catch (e) { } catch (e) {
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week

View File

@ -8,9 +8,6 @@ class BitfinexApi implements PriceFeed {
public url: string = 'https://api.bitfinex.com/v1/pubticker/BTC'; public url: string = 'https://api.bitfinex.com/v1/pubticker/BTC';
public urlHist: string = 'https://api-pub.bitfinex.com/v2/candles/trade:{GRANULARITY}:tBTC{CURRENCY}/hist'; public urlHist: string = 'https://api-pub.bitfinex.com/v2/candles/trade:{GRANULARITY}:tBTC{CURRENCY}/hist';
constructor() {
}
public async $fetchPrice(currency): Promise<number> { public async $fetchPrice(currency): Promise<number> {
const response = await query(this.url + currency); const response = await query(this.url + currency);
if (response && response['last_price']) { if (response && response['last_price']) {

View File

@ -98,7 +98,7 @@ class KrakenApi implements PriceFeed {
} }
if (Object.keys(priceHistory).length > 0) { if (Object.keys(priceHistory).length > 0) {
logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining); logger.info(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining);
} }
} }
} }

View File

@ -2,8 +2,7 @@ import * as fs from 'fs';
import path from 'path'; import path from 'path';
import config from '../config'; import config from '../config';
import logger from '../logger'; import logger from '../logger';
import { IConversionRates } from '../mempool.interfaces'; import PricesRepository, { ApiPrice, MAX_PRICES } from '../repositories/PricesRepository';
import PricesRepository, { MAX_PRICES } from '../repositories/PricesRepository';
import BitfinexApi from './price-feeds/bitfinex-api'; import BitfinexApi from './price-feeds/bitfinex-api';
import BitflyerApi from './price-feeds/bitflyer-api'; import BitflyerApi from './price-feeds/bitflyer-api';
import CoinbaseApi from './price-feeds/coinbase-api'; import CoinbaseApi from './price-feeds/coinbase-api';
@ -21,18 +20,18 @@ export interface PriceFeed {
} }
export interface PriceHistory { export interface PriceHistory {
[timestamp: number]: IConversionRates; [timestamp: number]: ApiPrice;
} }
class PriceUpdater { class PriceUpdater {
public historyInserted = false; public historyInserted = false;
lastRun = 0; private lastRun = 0;
lastHistoricalRun = 0; private lastHistoricalRun = 0;
running = false; private running = false;
feeds: PriceFeed[] = []; private feeds: PriceFeed[] = [];
currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY']; private currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
latestPrices: IConversionRates; private latestPrices: ApiPrice;
private ratesChangedCallback: ((rates: IConversionRates) => void) | undefined; private ratesChangedCallback: ((rates: ApiPrice) => void) | undefined;
constructor() { constructor() {
this.latestPrices = this.getEmptyPricesObj(); this.latestPrices = this.getEmptyPricesObj();
@ -44,8 +43,13 @@ class PriceUpdater {
this.feeds.push(new GeminiApi()); this.feeds.push(new GeminiApi());
} }
public getEmptyPricesObj(): IConversionRates { public getLatestPrices(): ApiPrice {
return this.latestPrices;
}
public getEmptyPricesObj(): ApiPrice {
return { return {
time: 0,
USD: -1, USD: -1,
EUR: -1, EUR: -1,
GBP: -1, GBP: -1,
@ -56,7 +60,7 @@ class PriceUpdater {
}; };
} }
public setRatesChangedCallback(fn: (rates: IConversionRates) => void) { public setRatesChangedCallback(fn: (rates: ApiPrice) => void): void {
this.ratesChangedCallback = fn; this.ratesChangedCallback = fn;
} }
@ -156,6 +160,10 @@ class PriceUpdater {
} }
this.lastRun = new Date().getTime() / 1000; this.lastRun = new Date().getTime() / 1000;
if (this.latestPrices.USD === -1) {
this.latestPrices = await PricesRepository.$getLatestConversionRates();
}
} }
/** /**
@ -224,7 +232,7 @@ class PriceUpdater {
// Group them by timestamp and currency, for example // Group them by timestamp and currency, for example
// grouped[123456789]['USD'] = [1, 2, 3, 4]; // grouped[123456789]['USD'] = [1, 2, 3, 4];
const grouped: any = {}; const grouped = {};
for (const historicalEntry of historicalPrices) { for (const historicalEntry of historicalPrices) {
for (const time in historicalEntry) { for (const time in historicalEntry) {
if (existingPriceTimes.includes(parseInt(time, 10))) { if (existingPriceTimes.includes(parseInt(time, 10))) {
@ -249,7 +257,7 @@ class PriceUpdater {
// Average prices and insert everything into the db // Average prices and insert everything into the db
let totalInserted = 0; let totalInserted = 0;
for (const time in grouped) { for (const time in grouped) {
const prices: IConversionRates = this.getEmptyPricesObj(); const prices: ApiPrice = this.getEmptyPricesObj();
for (const currency in grouped[time]) { for (const currency in grouped[time]) {
if (grouped[time][currency].length === 0) { if (grouped[time][currency].length === 0) {
continue; continue;

View File

@ -3,13 +3,15 @@
{{ addPlus && satoshis >= 0 ? '+' : '' }} {{ addPlus && satoshis >= 0 ? '+' : '' }}
{{ {{
( (
(blockConversion.price[currency] >= 0 ? blockConversion.price[currency] : null) ?? (blockConversion.price[currency] > -1 ? blockConversion.price[currency] : null) ??
(blockConversion.price['USD'] * blockConversion.exchangeRates['USD' + currency]) ?? 0 (blockConversion.price['USD'] > -1 ? blockConversion.price['USD'] * blockConversion.exchangeRates['USD' + currency] : null) ?? 0
) * satoshis / 100000000 | fiatCurrency : digitsInfo : currency ) * satoshis / 100000000 | fiatCurrency : digitsInfo : currency
}} }}
</span> </span>
<ng-template #noblockconversion> <ng-template #noblockconversion>
<span class="fiat">{{ addPlus && satoshis >= 0 ? '+' : '' }}{{ (conversions ? conversions[currency] : 0) * satoshis / 100000000 | fiatCurrency : digitsInfo : currency }}</span> <span class="fiat">{{ addPlus && satoshis >= 0 ? '+' : '' }}
{{ (conversions[currency] > -1 ? conversions[currency] : 0) * satoshis / 100000000 | fiatCurrency : digitsInfo : currency }}
</span>
</ng-template> </ng-template>
</ng-container> </ng-container>

View File

@ -1,14 +1,14 @@
<span class="green-color" *ngIf="blockConversion; else noblockconversion"> <span class="green-color" *ngIf="blockConversion; else noblockconversion">
{{ {{
( (
(blockConversion.price[currency] >= 0 ? blockConversion.price[currency] : null) ?? (blockConversion.price[currency] > -1 ? blockConversion.price[currency] : null) ??
(blockConversion.price['USD'] * blockConversion.exchangeRates['USD' + currency]) ?? 0 (blockConversion.price['USD'] > -1 ? blockConversion.price['USD'] * blockConversion.exchangeRates['USD' + currency] : null) ?? 0
) * value / 100000000 | fiatCurrency : digitsInfo : currency ) * value / 100000000 | fiatCurrency : digitsInfo : currency
}} }}
</span> </span>
<ng-template #noblockconversion> <ng-template #noblockconversion>
<span class="green-color" *ngIf="(conversions$ | async) as conversions"> <span class="green-color" *ngIf="(conversions$ | async) as conversions">
{{ (conversions[currency] ?? conversions['USD'] ?? 0) * value / 100000000 | fiatCurrency : digitsInfo : currency }} {{ (conversions[currency] > -1 ? conversions[currency] : 0) * value / 100000000 | fiatCurrency : digitsInfo : currency }}
</span> </span>
</ng-template> </ng-template>

View File

@ -89,7 +89,7 @@ export class PriceService {
return this.singlePriceObservable$.pipe( return this.singlePriceObservable$.pipe(
map((conversion) => { map((conversion) => {
if (conversion.prices.length <= 0) { if (conversion.prices.length <= 0) {
return this.getEmptyPrice(); return undefined;
} }
return { return {
price: { price: {
@ -113,7 +113,7 @@ export class PriceService {
return this.priceObservable$.pipe( return this.priceObservable$.pipe(
map((conversion) => { map((conversion) => {
if (!blockTimestamp) { if (!blockTimestamp || !conversion) {
return undefined; return undefined;
} }