Merge pull request #2255 from mempool/nymkappa/feature/update-node-stats-often
Update latest stats every 10 minutes
This commit is contained in:
commit
76f261eb38
@ -32,6 +32,7 @@ interface IConfig {
|
|||||||
ENABLED: boolean;
|
ENABLED: boolean;
|
||||||
BACKEND: 'lnd' | 'cln' | 'ldk';
|
BACKEND: 'lnd' | 'cln' | 'ldk';
|
||||||
TOPOLOGY_FOLDER: string;
|
TOPOLOGY_FOLDER: string;
|
||||||
|
NODE_STATS_REFRESH_INTERVAL: number;
|
||||||
};
|
};
|
||||||
LND: {
|
LND: {
|
||||||
TLS_CERT_PATH: string;
|
TLS_CERT_PATH: string;
|
||||||
@ -183,6 +184,7 @@ const defaults: IConfig = {
|
|||||||
'ENABLED': false,
|
'ENABLED': false,
|
||||||
'BACKEND': 'lnd',
|
'BACKEND': 'lnd',
|
||||||
'TOPOLOGY_FOLDER': '',
|
'TOPOLOGY_FOLDER': '',
|
||||||
|
'NODE_STATS_REFRESH_INTERVAL': 600,
|
||||||
},
|
},
|
||||||
'LND': {
|
'LND': {
|
||||||
'TLS_CERT_PATH': '',
|
'TLS_CERT_PATH': '',
|
||||||
|
@ -2,25 +2,14 @@ import DB from '../../database';
|
|||||||
import logger from '../../logger';
|
import logger from '../../logger';
|
||||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||||
import LightningStatsImporter from './sync-tasks/stats-importer';
|
import LightningStatsImporter from './sync-tasks/stats-importer';
|
||||||
|
import config from '../../config';
|
||||||
|
|
||||||
class LightningStatsUpdater {
|
class LightningStatsUpdater {
|
||||||
hardCodedStartTime = '2018-01-12';
|
|
||||||
|
|
||||||
public async $startService(): Promise<void> {
|
public async $startService(): Promise<void> {
|
||||||
logger.info('Starting Lightning Stats service');
|
logger.info('Starting Lightning Stats service');
|
||||||
|
|
||||||
LightningStatsImporter.$run();
|
// LightningStatsImporter.$run();
|
||||||
|
this.$runTasks();
|
||||||
setTimeout(() => {
|
|
||||||
this.$runTasks();
|
|
||||||
}, this.timeUntilMidnight());
|
|
||||||
}
|
|
||||||
|
|
||||||
private timeUntilMidnight(): number {
|
|
||||||
const date = new Date();
|
|
||||||
this.setDateMidnight(date);
|
|
||||||
date.setUTCHours(24);
|
|
||||||
return date.getTime() - new Date().getTime();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private setDateMidnight(date: Date): void {
|
private setDateMidnight(date: Date): void {
|
||||||
@ -35,20 +24,18 @@ class LightningStatsUpdater {
|
|||||||
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
this.$runTasks();
|
this.$runTasks();
|
||||||
}, this.timeUntilMidnight());
|
}, 1000 * config.LIGHTNING.NODE_STATS_REFRESH_INTERVAL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the latest entry for each node every config.LIGHTNING.NODE_STATS_REFRESH_INTERVAL seconds
|
||||||
|
*/
|
||||||
private async $logStatsDaily(): Promise<void> {
|
private async $logStatsDaily(): Promise<void> {
|
||||||
const date = new Date();
|
const date = new Date();
|
||||||
this.setDateMidnight(date);
|
this.setDateMidnight(date);
|
||||||
date.setUTCHours(24);
|
date.setUTCHours(24);
|
||||||
|
|
||||||
const [rows] = await DB.query(`SELECT UNIX_TIMESTAMP(MAX(added)) as lastAdded from lightning_stats`);
|
logger.info(`Updating latest node stats`);
|
||||||
if ((rows[0].lastAdded ?? 0) === date.getTime() / 1000) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Running lightning daily stats log...`);
|
|
||||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||||
LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
|
LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,7 @@ class LightningStatsImporter {
|
|||||||
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
||||||
logger.info('Caching funding txs for currently existing channels');
|
logger.info('Caching funding txs for currently existing channels');
|
||||||
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
||||||
|
|
||||||
await this.$importHistoricalLightningStats();
|
await this.$importHistoricalLightningStats();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,15 +114,15 @@ class LightningStatsImporter {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeStats[channel.node1_pub].capacity += Math.round(tx.value * 100000000);
|
|
||||||
nodeStats[channel.node1_pub].channels++;
|
|
||||||
nodeStats[channel.node2_pub].capacity += Math.round(tx.value * 100000000);
|
|
||||||
nodeStats[channel.node2_pub].channels++;
|
|
||||||
|
|
||||||
if (!alreadyCountedChannels[short_id]) {
|
if (!alreadyCountedChannels[short_id]) {
|
||||||
capacity += Math.round(tx.value * 100000000);
|
capacity += Math.round(tx.value * 100000000);
|
||||||
capacities.push(Math.round(tx.value * 100000000));
|
capacities.push(Math.round(tx.value * 100000000));
|
||||||
alreadyCountedChannels[short_id] = true;
|
alreadyCountedChannels[short_id] = true;
|
||||||
|
|
||||||
|
nodeStats[channel.node1_pub].capacity += Math.round(tx.value * 100000000);
|
||||||
|
nodeStats[channel.node1_pub].channels++;
|
||||||
|
nodeStats[channel.node2_pub].capacity += Math.round(tx.value * 100000000);
|
||||||
|
nodeStats[channel.node2_pub].channels++;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (channel.node1_policy !== undefined) { // Coming from the node
|
if (channel.node1_policy !== undefined) { // Coming from the node
|
||||||
@ -154,24 +154,40 @@ class LightningStatsImporter {
|
|||||||
const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
|
const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
|
||||||
const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
|
const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
|
||||||
const avgCapacity = Math.round(capacity / capacities.length);
|
const avgCapacity = Math.round(capacity / capacities.length);
|
||||||
|
|
||||||
let query = `INSERT INTO lightning_stats(
|
let query = `INSERT INTO lightning_stats(
|
||||||
added,
|
added,
|
||||||
channel_count,
|
channel_count,
|
||||||
node_count,
|
node_count,
|
||||||
total_capacity,
|
total_capacity,
|
||||||
tor_nodes,
|
tor_nodes,
|
||||||
clearnet_nodes,
|
clearnet_nodes,
|
||||||
unannounced_nodes,
|
unannounced_nodes,
|
||||||
clearnet_tor_nodes,
|
clearnet_tor_nodes,
|
||||||
avg_capacity,
|
avg_capacity,
|
||||||
avg_fee_rate,
|
avg_fee_rate,
|
||||||
avg_base_fee_mtokens,
|
avg_base_fee_mtokens,
|
||||||
med_capacity,
|
med_capacity,
|
||||||
med_fee_rate,
|
med_fee_rate,
|
||||||
med_base_fee_mtokens
|
med_base_fee_mtokens
|
||||||
)
|
)
|
||||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
|
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
added = FROM_UNIXTIME(?),
|
||||||
|
channel_count = ?,
|
||||||
|
node_count = ?,
|
||||||
|
total_capacity = ?,
|
||||||
|
tor_nodes = ?,
|
||||||
|
clearnet_nodes = ?,
|
||||||
|
unannounced_nodes = ?,
|
||||||
|
clearnet_tor_nodes = ?,
|
||||||
|
avg_capacity = ?,
|
||||||
|
avg_fee_rate = ?,
|
||||||
|
avg_base_fee_mtokens = ?,
|
||||||
|
med_capacity = ?,
|
||||||
|
med_fee_rate = ?,
|
||||||
|
med_base_fee_mtokens = ?
|
||||||
|
`;
|
||||||
|
|
||||||
await DB.query(query, [
|
await DB.query(query, [
|
||||||
timestamp,
|
timestamp,
|
||||||
@ -188,22 +204,44 @@ class LightningStatsImporter {
|
|||||||
medCapacity,
|
medCapacity,
|
||||||
medFeeRate,
|
medFeeRate,
|
||||||
medBaseFee,
|
medBaseFee,
|
||||||
|
timestamp,
|
||||||
|
capacities.length,
|
||||||
|
networkGraph.nodes.length,
|
||||||
|
capacity,
|
||||||
|
torNodes,
|
||||||
|
clearnetNodes,
|
||||||
|
unannouncedNodes,
|
||||||
|
clearnetTorNodes,
|
||||||
|
avgCapacity,
|
||||||
|
avgFeeRate,
|
||||||
|
avgBaseFee,
|
||||||
|
medCapacity,
|
||||||
|
medFeeRate,
|
||||||
|
medBaseFee,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
for (const public_key of Object.keys(nodeStats)) {
|
for (const public_key of Object.keys(nodeStats)) {
|
||||||
query = `INSERT INTO node_stats(
|
query = `INSERT INTO node_stats(
|
||||||
public_key,
|
public_key,
|
||||||
added,
|
added,
|
||||||
capacity,
|
capacity,
|
||||||
channels
|
channels
|
||||||
)
|
)
|
||||||
VALUES (?, FROM_UNIXTIME(?), ?, ?)`;
|
VALUES (?, FROM_UNIXTIME(?), ?, ?)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
added = FROM_UNIXTIME(?),
|
||||||
|
capacity = ?,
|
||||||
|
channels = ?
|
||||||
|
`;
|
||||||
|
|
||||||
await DB.query(query, [
|
await DB.query(query, [
|
||||||
public_key,
|
public_key,
|
||||||
timestamp,
|
timestamp,
|
||||||
nodeStats[public_key].capacity,
|
nodeStats[public_key].capacity,
|
||||||
nodeStats[public_key].channels,
|
nodeStats[public_key].channels,
|
||||||
|
timestamp,
|
||||||
|
nodeStats[public_key].capacity,
|
||||||
|
nodeStats[public_key].channels,
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -278,7 +316,7 @@ class LightningStatsImporter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
latestNodeCount = graph.nodes.length;
|
latestNodeCount = graph.nodes.length;
|
||||||
|
|
||||||
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
||||||
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
|
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
|
||||||
|
|
||||||
@ -367,4 +405,4 @@ class LightningStatsImporter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default new LightningStatsImporter;
|
export default new LightningStatsImporter;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user