2022-02-19 22:09:35 +09:00
|
|
|
import { Common } from '../api/common';
|
2022-04-13 17:38:42 +04:00
|
|
|
import DB from '../database';
|
2022-02-19 20:45:02 +09:00
|
|
|
import logger from '../logger';
|
2022-02-24 16:55:18 +09:00
|
|
|
import PoolsRepository from './PoolsRepository';
|
2022-02-19 20:45:02 +09:00
|
|
|
|
|
|
|
class HashratesRepository {
|
|
|
|
/**
|
|
|
|
* Save indexed block data in the database
|
|
|
|
*/
|
2022-02-21 17:34:07 +09:00
|
|
|
public async $saveHashrates(hashrates: any) {
|
2022-03-06 12:32:16 +01:00
|
|
|
if (hashrates.length === 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-02-21 17:34:07 +09:00
|
|
|
let query = `INSERT INTO
|
2022-02-24 16:55:18 +09:00
|
|
|
hashrates(hashrate_timestamp, avg_hashrate, pool_id, share, type) VALUES`;
|
2022-02-19 20:45:02 +09:00
|
|
|
|
2022-02-21 17:34:07 +09:00
|
|
|
for (const hashrate of hashrates) {
|
2022-02-24 16:55:18 +09:00
|
|
|
query += ` (FROM_UNIXTIME(${hashrate.hashrateTimestamp}), ${hashrate.avgHashrate}, ${hashrate.poolId}, ${hashrate.share}, "${hashrate.type}"),`;
|
2022-02-21 17:34:07 +09:00
|
|
|
}
|
|
|
|
query = query.slice(0, -1);
|
2022-02-19 20:45:02 +09:00
|
|
|
|
2022-02-21 17:34:07 +09:00
|
|
|
try {
|
2022-04-12 15:15:57 +09:00
|
|
|
await DB.query(query);
|
2022-02-19 20:45:02 +09:00
|
|
|
} catch (e: any) {
|
2022-04-13 16:29:52 +09:00
|
|
|
logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e));
|
2022-03-05 16:23:01 +01:00
|
|
|
throw e;
|
2022-02-19 20:45:02 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-24 16:55:18 +09:00
|
|
|
public async $getNetworkDailyHashrate(interval: string | null): Promise<any[]> {
|
2022-02-19 22:09:35 +09:00
|
|
|
interval = Common.getSqlInterval(interval);
|
|
|
|
|
|
|
|
let query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp, avg_hashrate as avgHashrate
|
|
|
|
FROM hashrates`;
|
|
|
|
|
|
|
|
if (interval) {
|
2022-02-24 16:55:18 +09:00
|
|
|
query += ` WHERE hashrate_timestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()
|
2022-03-12 14:47:33 +01:00
|
|
|
AND hashrates.type = 'daily'`;
|
2022-02-24 16:55:18 +09:00
|
|
|
} else {
|
2022-03-12 14:47:33 +01:00
|
|
|
query += ` WHERE hashrates.type = 'daily'`;
|
2022-02-19 22:09:35 +09:00
|
|
|
}
|
|
|
|
|
2022-02-22 00:17:41 +09:00
|
|
|
query += ` ORDER by hashrate_timestamp`;
|
2022-02-21 12:22:20 +09:00
|
|
|
|
2022-03-06 16:44:09 +01:00
|
|
|
try {
|
2022-04-12 15:15:57 +09:00
|
|
|
const [rows]: any[] = await DB.query(query);
|
2022-03-06 16:44:09 +01:00
|
|
|
return rows;
|
|
|
|
} catch (e) {
|
2022-04-13 16:29:52 +09:00
|
|
|
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
2022-03-06 16:44:09 +01:00
|
|
|
throw e;
|
|
|
|
}
|
2022-02-19 20:45:02 +09:00
|
|
|
}
|
2022-02-21 12:22:20 +09:00
|
|
|
|
2022-03-06 12:32:16 +01:00
|
|
|
public async $getWeeklyHashrateTimestamps(): Promise<number[]> {
|
2022-03-12 14:47:33 +01:00
|
|
|
const query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp
|
|
|
|
FROM hashrates
|
|
|
|
WHERE type = 'weekly'
|
|
|
|
GROUP BY hashrate_timestamp`;
|
2022-03-06 12:32:16 +01:00
|
|
|
|
2022-03-06 16:44:09 +01:00
|
|
|
try {
|
2022-04-12 15:15:57 +09:00
|
|
|
const [rows]: any[] = await DB.query(query);
|
2022-03-06 16:44:09 +01:00
|
|
|
return rows.map(row => row.timestamp);
|
|
|
|
} catch (e) {
|
2022-04-13 16:29:52 +09:00
|
|
|
logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e));
|
2022-03-06 16:44:09 +01:00
|
|
|
throw e;
|
|
|
|
}
|
2022-03-06 12:32:16 +01:00
|
|
|
}
|
|
|
|
|
2022-02-24 16:55:18 +09:00
|
|
|
/**
|
|
|
|
* Returns the current biggest pool hashrate history
|
|
|
|
*/
|
|
|
|
public async $getPoolsWeeklyHashrate(interval: string | null): Promise<any[]> {
|
|
|
|
interval = Common.getSqlInterval(interval);
|
|
|
|
|
|
|
|
const topPoolsId = (await PoolsRepository.$getPoolsInfo('1w')).map((pool) => pool.poolId);
|
|
|
|
|
|
|
|
let query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp, avg_hashrate as avgHashrate, share, pools.name as poolName
|
|
|
|
FROM hashrates
|
|
|
|
JOIN pools on pools.id = pool_id`;
|
|
|
|
|
|
|
|
if (interval) {
|
|
|
|
query += ` WHERE hashrate_timestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()
|
|
|
|
AND hashrates.type = 'weekly'
|
|
|
|
AND pool_id IN (${topPoolsId})`;
|
|
|
|
} else {
|
|
|
|
query += ` WHERE hashrates.type = 'weekly'
|
|
|
|
AND pool_id IN (${topPoolsId})`;
|
|
|
|
}
|
|
|
|
|
|
|
|
query += ` ORDER by hashrate_timestamp, FIELD(pool_id, ${topPoolsId})`;
|
|
|
|
|
2022-03-06 16:44:09 +01:00
|
|
|
try {
|
2022-04-12 15:15:57 +09:00
|
|
|
const [rows]: any[] = await DB.query(query);
|
2022-03-06 16:44:09 +01:00
|
|
|
return rows;
|
|
|
|
} catch (e) {
|
2022-04-13 16:29:52 +09:00
|
|
|
logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
2022-03-06 16:44:09 +01:00
|
|
|
throw e;
|
|
|
|
}
|
2022-02-24 16:55:18 +09:00
|
|
|
}
|
|
|
|
|
2022-03-08 12:50:47 +01:00
|
|
|
/**
|
|
|
|
* Returns a pool hashrate history
|
|
|
|
*/
|
2022-04-23 15:50:45 +09:00
|
|
|
public async $getPoolWeeklyHashrate(slug: string): Promise<any[]> {
|
2022-03-25 14:22:22 +09:00
|
|
|
const pool = await PoolsRepository.$getPool(slug);
|
|
|
|
if (!pool) {
|
|
|
|
throw new Error(`This mining pool does not exist`);
|
|
|
|
}
|
2022-03-08 12:50:47 +01:00
|
|
|
|
2022-03-08 13:54:04 +01:00
|
|
|
// Find hashrate boundaries
|
|
|
|
let query = `SELECT MIN(hashrate_timestamp) as firstTimestamp, MAX(hashrate_timestamp) as lastTimestamp
|
2022-04-12 15:15:57 +09:00
|
|
|
FROM hashrates
|
|
|
|
JOIN pools on pools.id = pool_id
|
2022-03-08 16:55:49 +01:00
|
|
|
WHERE hashrates.type = 'weekly' AND pool_id = ? AND avg_hashrate != 0
|
|
|
|
ORDER by hashrate_timestamp LIMIT 1`;
|
2022-03-08 13:54:04 +01:00
|
|
|
|
|
|
|
let boundaries = {
|
|
|
|
firstTimestamp: '1970-01-01',
|
|
|
|
lastTimestamp: '9999-01-01'
|
|
|
|
};
|
2022-03-25 14:22:22 +09:00
|
|
|
|
2022-03-08 13:54:04 +01:00
|
|
|
try {
|
2022-04-12 15:15:57 +09:00
|
|
|
const [rows]: any[] = await DB.query(query, [pool.id]);
|
2022-03-08 13:54:04 +01:00
|
|
|
boundaries = rows[0];
|
|
|
|
} catch (e) {
|
2022-04-13 16:29:52 +09:00
|
|
|
logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e));
|
2022-03-08 12:50:47 +01:00
|
|
|
}
|
|
|
|
|
2022-03-08 13:54:04 +01:00
|
|
|
// Get hashrates entries between boundaries
|
|
|
|
query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp, avg_hashrate as avgHashrate, share, pools.name as poolName
|
|
|
|
FROM hashrates
|
|
|
|
JOIN pools on pools.id = pool_id
|
|
|
|
WHERE hashrates.type = 'weekly' AND hashrate_timestamp BETWEEN ? AND ?
|
2022-03-08 16:55:49 +01:00
|
|
|
AND pool_id = ?
|
2022-03-08 13:54:04 +01:00
|
|
|
ORDER by hashrate_timestamp`;
|
2022-03-08 12:50:47 +01:00
|
|
|
|
|
|
|
try {
|
2022-04-12 15:15:57 +09:00
|
|
|
const [rows]: any[] = await DB.query(query, [boundaries.firstTimestamp, boundaries.lastTimestamp, pool.id]);
|
2022-03-08 12:50:47 +01:00
|
|
|
return rows;
|
|
|
|
} catch (e) {
|
2022-04-13 16:29:52 +09:00
|
|
|
logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e));
|
2022-03-08 12:50:47 +01:00
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-15 13:07:06 +01:00
|
|
|
/**
|
|
|
|
* Set latest run timestamp
|
|
|
|
*/
|
2022-04-30 17:54:49 +09:00
|
|
|
public async $setLatestRun(key: string, val: number) {
|
2022-03-06 12:32:16 +01:00
|
|
|
const query = `UPDATE state SET number = ? WHERE name = ?`;
|
2022-02-24 20:20:18 +09:00
|
|
|
|
2022-03-06 16:44:09 +01:00
|
|
|
try {
|
2022-04-30 17:54:49 +09:00
|
|
|
await DB.query(query, [val, key]);
|
2022-03-06 16:44:09 +01:00
|
|
|
} catch (e) {
|
2022-04-30 17:54:49 +09:00
|
|
|
logger.err(`Cannot set last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
|
2022-04-13 16:29:52 +09:00
|
|
|
throw e;
|
2022-03-06 16:44:09 +01:00
|
|
|
}
|
2022-02-21 12:22:20 +09:00
|
|
|
}
|
|
|
|
|
2022-03-15 13:07:06 +01:00
|
|
|
/**
|
|
|
|
* Get latest run timestamp
|
|
|
|
*/
|
2022-04-30 17:54:49 +09:00
|
|
|
public async $getLatestRun(key: string): Promise<number> {
|
2022-03-06 12:32:16 +01:00
|
|
|
const query = `SELECT number FROM state WHERE name = ?`;
|
2022-03-06 16:44:09 +01:00
|
|
|
|
|
|
|
try {
|
2022-04-12 15:15:57 +09:00
|
|
|
const [rows]: any[] = await DB.query(query, [key]);
|
2022-03-06 16:44:09 +01:00
|
|
|
|
2022-03-08 16:55:49 +01:00
|
|
|
if (rows.length === 0) {
|
|
|
|
return 0;
|
|
|
|
}
|
2022-03-06 16:44:09 +01:00
|
|
|
return rows[0]['number'];
|
|
|
|
} catch (e) {
|
2022-04-30 17:54:49 +09:00
|
|
|
logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
|
2022-03-06 16:44:09 +01:00
|
|
|
throw e;
|
|
|
|
}
|
2022-02-21 12:22:20 +09:00
|
|
|
}
|
2022-03-15 13:07:06 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Delete most recent data points for re-indexing
|
|
|
|
*/
|
|
|
|
public async $deleteLastEntries() {
|
2022-04-13 16:29:52 +09:00
|
|
|
logger.info(`Delete latest hashrates data points from the database`);
|
2022-03-15 13:07:06 +01:00
|
|
|
|
|
|
|
try {
|
2022-04-12 15:15:57 +09:00
|
|
|
const [rows]: any[] = await DB.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`);
|
2022-03-15 13:07:06 +01:00
|
|
|
for (const row of rows) {
|
2022-04-12 15:15:57 +09:00
|
|
|
await DB.query(`DELETE FROM hashrates WHERE hashrate_timestamp = ?`, [row.timestamp]);
|
2022-03-15 13:07:06 +01:00
|
|
|
}
|
|
|
|
// Re-run the hashrate indexing to fill up missing data
|
2022-04-30 17:54:49 +09:00
|
|
|
await this.$setLatestRun('last_hashrates_indexing', 0);
|
|
|
|
await this.$setLatestRun('last_weekly_hashrates_indexing', 0);
|
2022-03-15 13:07:06 +01:00
|
|
|
} catch (e) {
|
2022-04-13 16:29:52 +09:00
|
|
|
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e));
|
2022-03-15 13:07:06 +01:00
|
|
|
}
|
|
|
|
}
|
2022-04-18 17:49:22 +09:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Delete hashrates from the database from timestamp
|
|
|
|
*/
|
|
|
|
public async $deleteHashratesFromTimestamp(timestamp: number) {
|
|
|
|
logger.info(`Delete newer hashrates from timestamp ${new Date(timestamp * 1000).toUTCString()} from the database`);
|
|
|
|
|
|
|
|
try {
|
|
|
|
await DB.query(`DELETE FROM hashrates WHERE hashrate_timestamp >= FROM_UNIXTIME(?)`, [timestamp]);
|
|
|
|
// Re-run the hashrate indexing to fill up missing data
|
2022-04-30 17:54:49 +09:00
|
|
|
await this.$setLatestRun('last_hashrates_indexing', 0);
|
|
|
|
await this.$setLatestRun('last_weekly_hashrates_indexing', 0);
|
2022-04-18 17:49:22 +09:00
|
|
|
} catch (e) {
|
|
|
|
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e));
|
|
|
|
}
|
|
|
|
}
|
2022-02-19 20:45:02 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
export default new HashratesRepository();
|