Merge branch 'master' into script-display-2

This commit is contained in:
Antoni Spaanderman
2022-04-11 21:46:06 +02:00
47 changed files with 1896 additions and 339 deletions

View File

@@ -25,7 +25,7 @@ class BitcoinApi implements AbstractBitcoinApi {
.then((transaction: IBitcoinApi.Transaction) => {
if (skipConversion) {
transaction.vout.forEach((vout) => {
vout.value = vout.value * 100000000;
vout.value = Math.round(vout.value * 100000000);
});
return transaction;
}
@@ -143,7 +143,7 @@ class BitcoinApi implements AbstractBitcoinApi {
esploraTransaction.vout = transaction.vout.map((vout) => {
return {
value: vout.value * 100000000,
value: Math.round(vout.value * 100000000),
scriptpubkey: vout.scriptPubKey.hex,
scriptpubkey_address: vout.scriptPubKey && vout.scriptPubKey.address ? vout.scriptPubKey.address
: vout.scriptPubKey.addresses ? vout.scriptPubKey.addresses[0] : '',
@@ -236,7 +236,7 @@ class BitcoinApi implements AbstractBitcoinApi {
} else {
mempoolEntry = await this.$getMempoolEntry(transaction.txid);
}
transaction.fee = mempoolEntry.fees.base * 100000000;
transaction.fee = Math.round(mempoolEntry.fees.base * 100000000);
return transaction;
}

View File

@@ -23,6 +23,7 @@ class Blocks {
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
private blockIndexingStarted = false;
public blockIndexingCompleted = false;
public reindexFlag = true; // Always re-index the latest indexed data in case the node went offline with an invalid block tip (reorg)
constructor() { }
@@ -135,6 +136,12 @@ class Blocks {
} else {
pool = await poolsRepository.$getUnknownPool();
}
if (!pool) { // Something is wrong with the pools table, ignore pool indexing
logger.err('Unable to find pool, nor getting the unknown pool. Is the "pools" table empty?');
return blockExtended;
}
blockExtended.extras.pool = {
id: pool.id,
name: pool.name,
@@ -183,16 +190,19 @@ class Blocks {
* [INDEXING] Index all blocks metadata for the mining dashboard
*/
public async $generateBlockDatabase() {
if (this.blockIndexingStarted) {
if (this.blockIndexingStarted && !this.reindexFlag) {
return;
}
this.reindexFlag = false;
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
if (blockchainInfo.blocks !== blockchainInfo.headers) { // Wait for node to sync
return;
}
this.blockIndexingStarted = true;
this.blockIndexingCompleted = false;
try {
let currentBlockHeight = blockchainInfo.blocks;
@@ -310,6 +320,12 @@ class Blocks {
if (Common.indexingEnabled()) {
await blocksRepository.$saveBlockInDatabase(blockExtended);
// If the last 10 blocks chain is not valid, re-index them (reorg)
const chainValid = await blocksRepository.$validateRecentBlocks();
if (!chainValid) {
this.reindexFlag = true;
}
}
if (block.height % 2016 === 0) {

View File

@@ -5,6 +5,7 @@ import HashratesRepository from '../repositories/HashratesRepository';
import bitcoinClient from './bitcoin/bitcoin-client';
import logger from '../logger';
import blocks from './blocks';
import { Common } from './common';
class Mining {
hashrateIndexingStarted = false;
@@ -13,6 +14,26 @@ class Mining {
constructor() {
}
/**
* Get historical block reward and total fee
*/
public async $getHistoricalBlockFees(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockFees(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
}
/**
* Get historical block rewards
*/
public async $getHistoricalBlockRewards(interval: string | null = null): Promise<any> {
return await BlocksRepository.$getHistoricalBlockRewards(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
}
/**
* Generate high level overview of the pool ranks and general stats
*/
@@ -45,8 +66,8 @@ class Mining {
const blockCount: number = await BlocksRepository.$blockCount(null, interval);
poolsStatistics['blockCount'] = blockCount;
const blockHeightTip = await bitcoinClient.getBlockCount();
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(144, blockHeightTip);
const totalBlock24h: number = await BlocksRepository.$blockCount(null, '24h');
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h);
poolsStatistics['lastEstimatedHashrate'] = lastBlockHashrate;
return poolsStatistics;
@@ -62,12 +83,30 @@ class Mining {
}
const blockCount: number = await BlocksRepository.$blockCount(pool.id);
const emptyBlocksCount = await BlocksRepository.$countEmptyBlocks(pool.id);
const totalBlock: number = await BlocksRepository.$blockCount(null, null);
const blockCount24h: number = await BlocksRepository.$blockCount(pool.id, '24h');
const totalBlock24h: number = await BlocksRepository.$blockCount(null, '24h');
const blockCount1w: number = await BlocksRepository.$blockCount(pool.id, '1w');
const totalBlock1w: number = await BlocksRepository.$blockCount(null, '1w');
const currentEstimatedkHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h);
return {
pool: pool,
blockCount: blockCount,
emptyBlocks: emptyBlocksCount.length > 0 ? emptyBlocksCount[0]['count'] : 0,
blockCount: {
'all': blockCount,
'24h': blockCount24h,
'1w': blockCount1w,
},
blockShare: {
'all': blockCount / totalBlock,
'24h': blockCount24h / totalBlock24h,
'1w': blockCount1w / totalBlock1w,
},
estimatedHashrate: currentEstimatedkHashrate * (blockCount24h / totalBlock24h),
reportedHashrate: null,
};
}
@@ -303,6 +342,21 @@ class Mining {
return date;
}
private getTimeRange(interval: string | null): number {
switch (interval) {
case '3y': return 43200; // 12h
case '2y': return 28800; // 8h
case '1y': return 28800; // 8h
case '6m': return 10800; // 3h
case '3m': return 7200; // 2h
case '1m': return 1800; // 30min
case '1w': return 300; // 5min
case '3d': return 1;
case '24h': return 1;
default: return 86400; // 24h
}
}
}
export default new Mining();

View File

@@ -17,23 +17,11 @@ class PoolsParser {
/**
* Parse the pools.json file, consolidate the data and dump it into the database
*/
public async migratePoolsJson() {
public async migratePoolsJson(poolsJson: object) {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
logger.debug('Importing pools.json to the database, open ./pools.json');
let poolsJson: object = {};
try {
const fileContent: string = readFileSync('./pools.json', 'utf8');
poolsJson = JSON.parse(fileContent);
} catch (e) {
logger.err('Unable to open ./pools.json, does the file exist?');
await this.insertUnknownPool();
return;
}
// First we save every entries without paying attention to pool duplication
const poolsDuplicated: Pool[] = [];

View File

@@ -22,12 +22,13 @@ import loadingIndicators from './api/loading-indicators';
import mempool from './api/mempool';
import elementsParser from './api/liquid/elements-parser';
import databaseMigration from './api/database-migration';
import poolsParser from './api/pools-parser';
import syncAssets from './sync-assets';
import icons from './api/liquid/icons';
import { Common } from './api/common';
import mining from './api/mining';
import HashratesRepository from './repositories/HashratesRepository';
import BlocksRepository from './repositories/BlocksRepository';
import poolsUpdater from './tasks/pools-updater';
class Server {
private wss: WebSocket.Server | undefined;
@@ -99,7 +100,6 @@ class Server {
await databaseMigration.$initializeOrMigrateDatabase();
if (Common.indexingEnabled()) {
await this.$resetHashratesIndexingState();
await poolsParser.migratePoolsJson();
}
} catch (e) {
throw new Error(e instanceof Error ? e.message : 'Error');
@@ -179,6 +179,11 @@ class Server {
}
try {
await poolsUpdater.updatePoolsJson();
if (blocks.reindexFlag) {
await BlocksRepository.$deleteBlocks(10);
await HashratesRepository.$deleteLastEntries();
}
blocks.$generateBlockDatabase();
await mining.$generateNetworkHashrateHistory();
await mining.$generatePoolHashrateHistory();
@@ -311,6 +316,8 @@ class Server {
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate', routes.$getHistoricalHashrate)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/:interval', routes.$getHistoricalHashrate)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/reward-stats/:blockCount', routes.$getRewardStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fees/:interval', routes.$getHistoricalBlockFees)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/rewards/:interval', routes.$getHistoricalBlockRewards)
;
}

View File

@@ -10,9 +10,11 @@ class BlocksRepository {
* Save indexed block data in the database
*/
public async $saveBlockInDatabase(block: BlockExtended) {
const connection = await DB.getConnection();
let connection;
try {
connection = await DB.getConnection();
const query = `INSERT INTO blocks(
height, hash, blockTimestamp, size,
weight, tx_count, coinbase_raw, difficulty,
@@ -72,8 +74,9 @@ class BlocksRepository {
return [];
}
const connection = await DB.getConnection();
let connection;
try {
connection = await DB.getConnection();
const [rows]: any[] = await connection.query(`
SELECT height
FROM blocks
@@ -118,8 +121,9 @@ class BlocksRepository {
query += ` GROUP by pools.id`;
const connection = await DB.getConnection();
let connection;
try {
connection = await DB.getConnection();
const [rows] = await connection.query(query, params);
connection.release();
@@ -155,8 +159,9 @@ class BlocksRepository {
query += ` blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
}
const connection = await DB.getConnection();
let connection;
try {
connection = await DB.getConnection();
const [rows] = await connection.query(query, params);
connection.release();
@@ -194,8 +199,9 @@ class BlocksRepository {
}
query += ` blockTimestamp BETWEEN FROM_UNIXTIME('${from}') AND FROM_UNIXTIME('${to}')`;
const connection = await DB.getConnection();
let connection;
try {
connection = await DB.getConnection();
const [rows] = await connection.query(query, params);
connection.release();
@@ -216,8 +222,9 @@ class BlocksRepository {
ORDER BY height
LIMIT 1;`;
const connection = await DB.getConnection();
let connection;
try {
connection = await DB.getConnection();
const [rows]: any[] = await connection.query(query);
connection.release();
@@ -257,8 +264,9 @@ class BlocksRepository {
query += ` ORDER BY height DESC
LIMIT 10`;
const connection = await DB.getConnection();
let connection;
try {
connection = await DB.getConnection();
const [rows] = await connection.query(query, params);
connection.release();
@@ -279,8 +287,9 @@ class BlocksRepository {
* Get one block by height
*/
public async $getBlockByHeight(height: number): Promise<object | null> {
const connection = await DB.getConnection();
let connection;
try {
connection = await DB.getConnection();
const [rows]: any[] = await connection.query(`
SELECT *, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
pools.id as pool_id, pools.name as pool_name, pools.link as pool_link, pools.slug as pool_slug,
@@ -310,8 +319,6 @@ class BlocksRepository {
public async $getBlocksDifficulty(interval: string | null): Promise<object[]> {
interval = Common.getSqlInterval(interval);
const connection = await DB.getConnection();
// :D ... Yeah don't ask me about this one https://stackoverflow.com/a/40303162
// Basically, using temporary user defined fields, we are able to extract all
// difficulty adjustments from the blocks tables.
@@ -344,14 +351,17 @@ class BlocksRepository {
ORDER BY t.height
`;
let connection;
try {
connection = await DB.getConnection();
const [rows]: any[] = await connection.query(query);
connection.release();
for (let row of rows) {
for (const row of rows) {
delete row['rn'];
}
connection.release();
return rows;
} catch (e) {
connection.release();
@@ -360,23 +370,6 @@ class BlocksRepository {
}
}
/**
* Return oldest blocks height
*/
public async $getOldestIndexedBlockHeight(): Promise<number> {
const connection = await DB.getConnection();
try {
const [rows]: any[] = await connection.query(`SELECT MIN(height) as minHeight FROM blocks`);
connection.release();
return rows[0].minHeight;
} catch (e) {
connection.release();
logger.err('$getOldestIndexedBlockHeight() error' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get general block stats
*/
@@ -403,6 +396,107 @@ class BlocksRepository {
throw e;
}
}
/*
* Check if the last 10 blocks chain is valid
*/
public async $validateRecentBlocks(): Promise<boolean> {
let connection;
try {
connection = await DB.getConnection();
const [lastBlocks] = await connection.query(`SELECT height, hash, previous_block_hash FROM blocks ORDER BY height DESC LIMIT 10`);
connection.release();
for (let i = 0; i < lastBlocks.length - 1; ++i) {
if (lastBlocks[i].previous_block_hash !== lastBlocks[i + 1].hash) {
logger.notice(`Chain divergence detected at block ${lastBlocks[i].height}, re-indexing most recent data`);
return false;
}
}
return true;
} catch (e) {
connection.release();
return true; // Don't do anything if there is a db error
}
}
/**
* Delete $count blocks from the database
*/
public async $deleteBlocks(count: number) {
let connection;
try {
connection = await DB.getConnection();
logger.debug(`Delete ${count} most recent indexed blocks from the database`);
await connection.query(`DELETE FROM blocks ORDER BY height DESC LIMIT ${count};`);
} catch (e) {
logger.err('$deleteBlocks() error' + (e instanceof Error ? e.message : e));
}
connection.release();
}
/**
* Get the historical averaged block reward and total fees
*/
public async $getHistoricalBlockFees(div: number, interval: string | null): Promise<any> {
let connection;
try {
connection = await DB.getConnection();
let query = `SELECT CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
CAST(AVG(fees) as INT) as avg_fees
FROM blocks`;
if (interval !== null) {
query += ` WHERE blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
}
query += ` GROUP BY UNIX_TIMESTAMP(blockTimestamp) DIV ${div}`;
const [rows]: any = await connection.query(query);
connection.release();
return rows;
} catch (e) {
connection.release();
logger.err('$getHistoricalBlockFees() error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get the historical averaged block rewards
*/
public async $getHistoricalBlockRewards(div: number, interval: string | null): Promise<any> {
let connection;
try {
connection = await DB.getConnection();
let query = `SELECT CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
CAST(AVG(reward) as INT) as avg_rewards
FROM blocks`;
if (interval !== null) {
query += ` WHERE blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
}
query += ` GROUP BY UNIX_TIMESTAMP(blockTimestamp) DIV ${div}`;
const [rows]: any = await connection.query(query);
connection.release();
return rows;
} catch (e) {
connection.release();
logger.err('$getHistoricalBlockRewards() error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new BlocksRepository();

View File

@@ -169,6 +169,9 @@ class HashratesRepository {
}
}
/**
* Set latest run timestamp
*/
public async $setLatestRunTimestamp(key: string, val: any = null) {
const connection = await DB.getConnection();
const query = `UPDATE state SET number = ? WHERE name = ?`;
@@ -181,6 +184,9 @@ class HashratesRepository {
}
}
/**
* Get latest run timestamp
*/
public async $getLatestRunTimestamp(key: string): Promise<number> {
const connection = await DB.getConnection();
const query = `SELECT number FROM state WHERE name = ?`;
@@ -199,6 +205,29 @@ class HashratesRepository {
throw e;
}
}
/**
* Delete most recent data points for re-indexing
*/
public async $deleteLastEntries() {
logger.debug(`Delete latest hashrates data points from the database`);
let connection;
try {
connection = await DB.getConnection();
const [rows] = await connection.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`);
for (const row of rows) {
await connection.query(`DELETE FROM hashrates WHERE hashrate_timestamp = ?`, [row.timestamp]);
}
// Re-run the hashrate indexing to fill up missing data
await this.$setLatestRunTimestamp('last_hashrates_indexing', 0);
await this.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
} catch (e) {
logger.err('$deleteLastEntries() error' + (e instanceof Error ? e.message : e));
}
connection.release();
}
}
export default new HashratesRepository();

View File

@@ -638,6 +638,38 @@ class Routes {
}
}
public async $getHistoricalBlockFees(req: Request, res: Response) {
try {
const blockFees = await mining.$getHistoricalBlockFees(req.params.interval ?? null);
const oldestIndexedBlockTimestamp = await BlocksRepository.$oldestBlockTimestamp();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json({
oldestIndexedBlockTimestamp: oldestIndexedBlockTimestamp,
blockFees: blockFees,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
public async $getHistoricalBlockRewards(req: Request, res: Response) {
try {
const blockRewards = await mining.$getHistoricalBlockRewards(req.params.interval ?? null);
const oldestIndexedBlockTimestamp = await BlocksRepository.$oldestBlockTimestamp();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json({
oldestIndexedBlockTimestamp: oldestIndexedBlockTimestamp,
blockRewards: blockRewards,
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
public async getBlock(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getBlock(req.params.hash);

View File

@@ -0,0 +1,148 @@
const https = require('https');
import poolsParser from "../api/pools-parser";
import config from "../config";
import { DB } from "../database";
import logger from "../logger";
/**
* Maintain the most recent version of pools.json
*/
class PoolsUpdater {
lastRun: number = 0;
constructor() {
}
public async updatePoolsJson() {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
const oneWeek = 604800;
const oneDay = 86400;
const now = new Date().getTime() / 1000;
if (now - this.lastRun < oneWeek) { // Execute the PoolsUpdate only once a week, or upon restart
return;
}
this.lastRun = now;
try {
const dbSha = await this.getShaFromDb();
const githubSha = await this.fetchPoolsSha(); // Fetch pools.json sha from github
if (githubSha === undefined) {
return;
}
logger.debug(`Pools.json sha | Current: ${dbSha} | Github: ${githubSha}`);
if (dbSha !== undefined && dbSha === githubSha) {
return;
}
logger.warn('Pools.json is outdated, fetch latest from github');
const poolsJson = await this.fetchPools();
await poolsParser.migratePoolsJson(poolsJson);
await this.updateDBSha(githubSha);
logger.notice('PoolsUpdater completed');
} catch (e) {
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
logger.err('PoolsUpdater failed. Will try again in 24h. Error: ' + e);
}
}
/**
* Fetch pools.json from github repo
*/
private async fetchPools(): Promise<object> {
const response = await this.query('/repos/mempool/mining-pools/contents/pools.json');
return JSON.parse(Buffer.from(response['content'], 'base64').toString('utf8'));
}
/**
* Fetch our latest pools.json sha from the db
*/
private async updateDBSha(githubSha: string) {
let connection;
try {
connection = await DB.getConnection();
await connection.query('DELETE FROM state where name="pools_json_sha"');
await connection.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
connection.release();
} catch (e) {
logger.err('Unable save github pools.json sha into the DB, error: ' + e);
connection.release();
return undefined;
}
}
/**
* Fetch our latest pools.json sha from the db
*/
private async getShaFromDb(): Promise<string | undefined> {
let connection;
try {
connection = await DB.getConnection();
const [rows] = await connection.query('SELECT string FROM state WHERE name="pools_json_sha"');
connection.release();
return (rows.length > 0 ? rows[0].string : undefined);
} catch (e) {
logger.err('Unable fetch pools.json sha from DB, error: ' + e);
connection.release();
return undefined;
}
}
/**
* Fetch our latest pools.json sha from github
*/
private async fetchPoolsSha(): Promise<string | undefined> {
const response = await this.query('/repos/mempool/mining-pools/git/trees/master');
for (const file of response['tree']) {
if (file['path'] === 'pools.json') {
return file['sha'];
}
}
logger.err('Unable to find latest pools.json sha from github');
return undefined;
}
/**
* Http request wrapper
*/
private query(path): Promise<string> {
return new Promise((resolve, reject) => {
const options = {
host: 'api.github.com',
path: path,
method: 'GET',
headers: { 'user-agent': 'node.js' }
};
logger.debug('Querying: api.github.com' + path);
const request = https.get(options, (response) => {
const chunks_of_data: any[] = [];
response.on('data', (fragments) => {
chunks_of_data.push(fragments);
});
response.on('end', () => {
resolve(JSON.parse(Buffer.concat(chunks_of_data).toString()));
});
response.on('error', (error) => {
reject(error);
});
});
request.on('error', (error) => {
logger.err('Query failed with error: ' + error);
reject(error);
})
});
}
}
export default new PoolsUpdater();