Backfill node and capacity
This commit is contained in:
parent
1f6008f269
commit
faafa6db3b
@ -1,13 +1,9 @@
|
|||||||
import config from './config';
|
|
||||||
import logger from './logger';
|
|
||||||
import DB from './database';
|
import DB from './database';
|
||||||
import databaseMigration from './database-migration';
|
import databaseMigration from './database-migration';
|
||||||
import statsUpdater from './tasks/stats-updater.service';
|
import statsUpdater from './tasks/stats-updater.service';
|
||||||
import nodeSyncService from './tasks/node-sync.service';
|
import nodeSyncService from './tasks/node-sync.service';
|
||||||
import server from './server';
|
import server from './server';
|
||||||
|
|
||||||
logger.notice(`Mempool Server is running on port ${config.MEMPOOL.HTTP_PORT}`);
|
|
||||||
|
|
||||||
class LightningServer {
|
class LightningServer {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.init();
|
this.init();
|
||||||
@ -17,8 +13,8 @@ class LightningServer {
|
|||||||
await DB.checkDbConnection();
|
await DB.checkDbConnection();
|
||||||
await databaseMigration.$initializeOrMigrateDatabase();
|
await databaseMigration.$initializeOrMigrateDatabase();
|
||||||
|
|
||||||
statsUpdater.startService();
|
nodeSyncService.$startService();
|
||||||
nodeSyncService.startService();
|
statsUpdater.$startService();
|
||||||
|
|
||||||
server.startServer();
|
server.startServer();
|
||||||
}
|
}
|
||||||
|
@ -8,15 +8,14 @@ import bitcoinClient from '../api/bitcoin/bitcoin-client';
|
|||||||
import bitcoinApi from '../api/bitcoin/bitcoin-api-factory';
|
import bitcoinApi from '../api/bitcoin/bitcoin-api-factory';
|
||||||
import config from '../config';
|
import config from '../config';
|
||||||
import { IEsploraApi } from '../api/bitcoin/esplora-api.interface';
|
import { IEsploraApi } from '../api/bitcoin/esplora-api.interface';
|
||||||
import e from 'express';
|
|
||||||
|
|
||||||
class NodeSyncService {
|
class NodeSyncService {
|
||||||
constructor() {}
|
constructor() {}
|
||||||
|
|
||||||
public async startService() {
|
public async $startService() {
|
||||||
logger.info('Starting node sync service');
|
logger.info('Starting node sync service');
|
||||||
|
|
||||||
this.$updateNodes();
|
await this.$updateNodes();
|
||||||
|
|
||||||
setInterval(async () => {
|
setInterval(async () => {
|
||||||
await this.$updateNodes();
|
await this.$updateNodes();
|
||||||
|
@ -6,22 +6,26 @@ import lightningApi from '../api/lightning/lightning-api-factory';
|
|||||||
class LightningStatsUpdater {
|
class LightningStatsUpdater {
|
||||||
constructor() {}
|
constructor() {}
|
||||||
|
|
||||||
public async startService() {
|
public async $startService() {
|
||||||
logger.info('Starting Stats service');
|
logger.info('Starting Stats service');
|
||||||
|
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const nextHourInterval = new Date(now.getFullYear(), now.getMonth(), now.getDate(), Math.floor(now.getHours() / 1) + 1, 0, 0, 0);
|
const nextHourInterval = new Date(now.getFullYear(), now.getMonth(), now.getDate(), Math.floor(now.getHours() / 1) + 1, 0, 0, 0);
|
||||||
const difference = nextHourInterval.getTime() - now.getTime();
|
const difference = nextHourInterval.getTime() - now.getTime();
|
||||||
|
|
||||||
setTimeout(() => {
|
// setTimeout(() => {
|
||||||
this.$logLightningStats();
|
setInterval(async () => {
|
||||||
setInterval(() => {
|
await this.$runTasks();
|
||||||
this.$logLightningStats();
|
|
||||||
this.$logNodeStatsDaily();
|
|
||||||
}, 1000 * 60 * 60);
|
}, 1000 * 60 * 60);
|
||||||
}, difference);
|
//}, difference);
|
||||||
|
|
||||||
this.$logNodeStatsDaily();
|
await this.$runTasks();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async $runTasks() {
|
||||||
|
await this.$populateHistoricalData();
|
||||||
|
await this.$logLightningStatsDaily();
|
||||||
|
await this.$logNodeStatsDaily();
|
||||||
}
|
}
|
||||||
|
|
||||||
private async $logNodeStatsDaily() {
|
private async $logNodeStatsDaily() {
|
||||||
@ -54,8 +58,91 @@ class LightningStatsUpdater {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async $logLightningStats() {
|
// We only run this on first launch
|
||||||
|
private async $populateHistoricalData() {
|
||||||
|
const startTime = '2018-01-13';
|
||||||
try {
|
try {
|
||||||
|
const [rows]: any = await DB.query(`SELECT COUNT(*) FROM statistics`);
|
||||||
|
// Only store once per day
|
||||||
|
if (rows[0]['COUNT(*)'] > 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const [channels]: any = await DB.query(`SELECT capacity, created, closing_date FROM channels ORDER BY created ASC`);
|
||||||
|
|
||||||
|
let date: Date = new Date(startTime);
|
||||||
|
const currentDate = new Date();
|
||||||
|
|
||||||
|
while (date < currentDate) {
|
||||||
|
let totalCapacity = 0;
|
||||||
|
let channelsCount = 0;
|
||||||
|
for (const channel of channels) {
|
||||||
|
if (new Date(channel.created) > date) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (channel.closing_date !== null && new Date(channel.closing_date) < date) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
totalCapacity += channel.capacity;
|
||||||
|
channelsCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = `INSERT INTO statistics(
|
||||||
|
added,
|
||||||
|
channel_count,
|
||||||
|
node_count,
|
||||||
|
total_capacity
|
||||||
|
)
|
||||||
|
VALUES (FROM_UNIXTIME(?), ?, ?, ?)`;
|
||||||
|
|
||||||
|
await DB.query(query, [
|
||||||
|
date.getTime() / 1000,
|
||||||
|
channelsCount,
|
||||||
|
0,
|
||||||
|
totalCapacity,
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Add one day and continue
|
||||||
|
date.setDate(date.getDate() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [nodes]: any = await DB.query(`SELECT first_seen FROM nodes ORDER BY first_seen ASC`);
|
||||||
|
date = new Date(startTime);
|
||||||
|
|
||||||
|
while (date < currentDate) {
|
||||||
|
let nodeCount = 0;
|
||||||
|
for (const node of nodes) {
|
||||||
|
if (new Date(node.first_seen) > date) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
nodeCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = `UPDATE statistics SET node_count = ? WHERE added = FROM_UNIXTIME(?)`;
|
||||||
|
|
||||||
|
await DB.query(query, [
|
||||||
|
nodeCount,
|
||||||
|
date.getTime() / 1000,
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Add one day and continue
|
||||||
|
date.setDate(date.getDate() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Historical stats populated.');
|
||||||
|
} catch (e) {
|
||||||
|
logger.err('$populateHistoricalData() error: ' + (e instanceof Error ? e.message : e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async $logLightningStatsDaily() {
|
||||||
|
const currentDate = new Date().toISOString().split('T')[0];
|
||||||
|
try {
|
||||||
|
const [state]: any = await DB.query(`SELECT string FROM state WHERE name = 'last_node_stats'`);
|
||||||
|
// Only store once per day
|
||||||
|
if (state[0].string === currentDate) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||||
let total_capacity = 0;
|
let total_capacity = 0;
|
||||||
for (const channel of networkGraph.channels) {
|
for (const channel of networkGraph.channels) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user