Merge branch 'master' into nymkappa/bugfix/missing-data-node-page
This commit is contained in:
commit
f7cbe30a16
@ -116,7 +116,7 @@ class NodesApi {
|
|||||||
const latestDate = rows[0].maxAdded;
|
const latestDate = rows[0].maxAdded;
|
||||||
|
|
||||||
const query = `
|
const query = `
|
||||||
SELECT nodes.public_key, nodes.alias, node_stats.capacity, node_stats.channels
|
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.capacity, node_stats.channels
|
||||||
FROM node_stats
|
FROM node_stats
|
||||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||||
@ -138,7 +138,7 @@ class NodesApi {
|
|||||||
const latestDate = rows[0].maxAdded;
|
const latestDate = rows[0].maxAdded;
|
||||||
|
|
||||||
const query = `
|
const query = `
|
||||||
SELECT nodes.public_key, nodes.alias, node_stats.capacity, node_stats.channels
|
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.capacity, node_stats.channels
|
||||||
FROM node_stats
|
FROM node_stats
|
||||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { ILightningApi } from '../lightning-api.interface';
|
import { ILightningApi } from '../lightning-api.interface';
|
||||||
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
|
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||||
|
import logger from '../../../logger';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert a clightning "listnode" entry to a lnd node entry
|
* Convert a clightning "listnode" entry to a lnd node entry
|
||||||
@ -23,12 +24,17 @@ export function convertNode(clNode: any): ILightningApi.Node {
|
|||||||
/**
|
/**
|
||||||
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
|
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
|
||||||
*/
|
*/
|
||||||
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
|
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
|
||||||
|
logger.info('Converting clightning nodes and channels to lnd graph format');
|
||||||
|
|
||||||
|
let loggerTimer = new Date().getTime() / 1000;
|
||||||
|
let channelProcessed = 0;
|
||||||
|
|
||||||
const consolidatedChannelList: ILightningApi.Channel[] = [];
|
const consolidatedChannelList: ILightningApi.Channel[] = [];
|
||||||
const clChannelsDict = {};
|
const clChannelsDict = {};
|
||||||
const clChannelsDictCount = {};
|
const clChannelsDictCount = {};
|
||||||
|
|
||||||
for (const clChannel of clChannels) {
|
for (const clChannel of clChannels) {
|
||||||
if (!clChannelsDict[clChannel.short_channel_id]) {
|
if (!clChannelsDict[clChannel.short_channel_id]) {
|
||||||
clChannelsDict[clChannel.short_channel_id] = clChannel;
|
clChannelsDict[clChannel.short_channel_id] = clChannel;
|
||||||
clChannelsDictCount[clChannel.short_channel_id] = 1;
|
clChannelsDictCount[clChannel.short_channel_id] = 1;
|
||||||
@ -39,9 +45,26 @@ export function convertNode(clNode: any): ILightningApi.Node {
|
|||||||
delete clChannelsDict[clChannel.short_channel_id];
|
delete clChannelsDict[clChannel.short_channel_id];
|
||||||
clChannelsDictCount[clChannel.short_channel_id]++;
|
clChannelsDictCount[clChannel.short_channel_id]++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||||
|
if (elapsedSeconds > 10) {
|
||||||
|
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`);
|
||||||
|
loggerTimer = new Date().getTime() / 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
++channelProcessed;
|
||||||
}
|
}
|
||||||
for (const short_channel_id of Object.keys(clChannelsDict)) {
|
|
||||||
|
channelProcessed = 0;
|
||||||
|
const keys = Object.keys(clChannelsDict);
|
||||||
|
for (const short_channel_id of keys) {
|
||||||
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
|
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
|
||||||
|
|
||||||
|
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||||
|
if (elapsedSeconds > 10) {
|
||||||
|
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
|
||||||
|
loggerTimer = new Date().getTime() / 1000;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return consolidatedChannelList;
|
return consolidatedChannelList;
|
||||||
@ -79,7 +102,7 @@ async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILigh
|
|||||||
* Convert one clightning "getchannels" entry into a full a lnd "describegraph.edges" format
|
* Convert one clightning "getchannels" entry into a full a lnd "describegraph.edges" format
|
||||||
* In this case, clightning knows the channel policy of only one node
|
* In this case, clightning knows the channel policy of only one node
|
||||||
*/
|
*/
|
||||||
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel> {
|
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel> {
|
||||||
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannel.short_channel_id);
|
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannel.short_channel_id);
|
||||||
const parts = clChannel.short_channel_id.split('x');
|
const parts = clChannel.short_channel_id.split('x');
|
||||||
const outputIdx = parts[2];
|
const outputIdx = parts[2];
|
||||||
@ -99,7 +122,7 @@ async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILigh
|
|||||||
/**
|
/**
|
||||||
* Convert a clightning "listnode" response to a lnd channel policy format
|
* Convert a clightning "listnode" response to a lnd channel policy format
|
||||||
*/
|
*/
|
||||||
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
|
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
|
||||||
return {
|
return {
|
||||||
time_lock_delta: 0, // TODO
|
time_lock_delta: 0, // TODO
|
||||||
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
|
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
|
||||||
|
@ -32,6 +32,7 @@ interface IConfig {
|
|||||||
ENABLED: boolean;
|
ENABLED: boolean;
|
||||||
BACKEND: 'lnd' | 'cln' | 'ldk';
|
BACKEND: 'lnd' | 'cln' | 'ldk';
|
||||||
TOPOLOGY_FOLDER: string;
|
TOPOLOGY_FOLDER: string;
|
||||||
|
NODE_STATS_REFRESH_INTERVAL: number;
|
||||||
};
|
};
|
||||||
LND: {
|
LND: {
|
||||||
TLS_CERT_PATH: string;
|
TLS_CERT_PATH: string;
|
||||||
@ -183,6 +184,7 @@ const defaults: IConfig = {
|
|||||||
'ENABLED': false,
|
'ENABLED': false,
|
||||||
'BACKEND': 'lnd',
|
'BACKEND': 'lnd',
|
||||||
'TOPOLOGY_FOLDER': '',
|
'TOPOLOGY_FOLDER': '',
|
||||||
|
'NODE_STATS_REFRESH_INTERVAL': 600,
|
||||||
},
|
},
|
||||||
'LND': {
|
'LND': {
|
||||||
'TLS_CERT_PATH': '',
|
'TLS_CERT_PATH': '',
|
||||||
|
@ -137,9 +137,7 @@ class Server {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (config.LIGHTNING.ENABLED) {
|
if (config.LIGHTNING.ENABLED) {
|
||||||
fundingTxFetcher.$init()
|
this.$runLightningBackend();
|
||||||
.then(() => networkSyncService.$startService())
|
|
||||||
.then(() => lightningStatsUpdater.$startService());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.server.listen(config.MEMPOOL.HTTP_PORT, () => {
|
this.server.listen(config.MEMPOOL.HTTP_PORT, () => {
|
||||||
@ -185,6 +183,18 @@ class Server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async $runLightningBackend() {
|
||||||
|
try {
|
||||||
|
await fundingTxFetcher.$init();
|
||||||
|
await networkSyncService.$startService();
|
||||||
|
await lightningStatsUpdater.$startService();
|
||||||
|
} catch(e) {
|
||||||
|
logger.err(`Lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||||
|
await Common.sleep$(1000 * 60);
|
||||||
|
this.$runLightningBackend();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
setUpWebsocketHandling() {
|
setUpWebsocketHandling() {
|
||||||
if (this.wss) {
|
if (this.wss) {
|
||||||
websocketHandler.setWebsocketServer(this.wss);
|
websocketHandler.setWebsocketServer(this.wss);
|
||||||
|
@ -1,25 +1,15 @@
|
|||||||
|
import DB from '../../database';
|
||||||
import logger from '../../logger';
|
import logger from '../../logger';
|
||||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||||
import LightningStatsImporter from './sync-tasks/stats-importer';
|
import LightningStatsImporter from './sync-tasks/stats-importer';
|
||||||
|
import config from '../../config';
|
||||||
|
|
||||||
class LightningStatsUpdater {
|
class LightningStatsUpdater {
|
||||||
hardCodedStartTime = '2018-01-12';
|
|
||||||
|
|
||||||
public async $startService(): Promise<void> {
|
public async $startService(): Promise<void> {
|
||||||
logger.info('Starting Lightning Stats service');
|
logger.info('Starting Lightning Stats service');
|
||||||
|
|
||||||
LightningStatsImporter.$run();
|
// LightningStatsImporter.$run();
|
||||||
|
this.$runTasks();
|
||||||
setTimeout(() => {
|
|
||||||
this.$runTasks();
|
|
||||||
}, this.timeUntilMidnight());
|
|
||||||
}
|
|
||||||
|
|
||||||
private timeUntilMidnight(): number {
|
|
||||||
const date = new Date();
|
|
||||||
this.setDateMidnight(date);
|
|
||||||
date.setUTCHours(24);
|
|
||||||
return date.getTime() - new Date().getTime();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private setDateMidnight(date: Date): void {
|
private setDateMidnight(date: Date): void {
|
||||||
@ -34,17 +24,20 @@ class LightningStatsUpdater {
|
|||||||
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
this.$runTasks();
|
this.$runTasks();
|
||||||
}, this.timeUntilMidnight());
|
}, 1000 * config.LIGHTNING.NODE_STATS_REFRESH_INTERVAL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the latest entry for each node every config.LIGHTNING.NODE_STATS_REFRESH_INTERVAL seconds
|
||||||
|
*/
|
||||||
private async $logStatsDaily(): Promise<void> {
|
private async $logStatsDaily(): Promise<void> {
|
||||||
const date = new Date();
|
const date = new Date();
|
||||||
this.setDateMidnight(date);
|
this.setDateMidnight(date);
|
||||||
date.setUTCHours(24);
|
date.setUTCHours(24);
|
||||||
|
|
||||||
logger.info(`Running lightning daily stats log...`);
|
logger.info(`Updating latest node stats`);
|
||||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||||
LightningStatsImporter.computeNetworkStats(date.getTime(), networkGraph);
|
LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ class FundingTxFetcher {
|
|||||||
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||||
if (elapsedSeconds > 10) {
|
if (elapsedSeconds > 10) {
|
||||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
|
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
|
||||||
logger.debug(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
|
logger.info(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
|
||||||
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
|
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
|
||||||
`elapsed: ${elapsedSeconds} seconds`
|
`elapsed: ${elapsedSeconds} seconds`
|
||||||
);
|
);
|
||||||
|
@ -13,19 +13,19 @@ interface Node {
|
|||||||
features: string;
|
features: string;
|
||||||
rgb_color: string;
|
rgb_color: string;
|
||||||
alias: string;
|
alias: string;
|
||||||
addresses: string;
|
addresses: unknown[];
|
||||||
out_degree: number;
|
out_degree: number;
|
||||||
in_degree: number;
|
in_degree: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Channel {
|
interface Channel {
|
||||||
scid: string;
|
channel_id: string;
|
||||||
source: string;
|
node1_pub: string;
|
||||||
destination: string;
|
node2_pub: string;
|
||||||
timestamp: number;
|
timestamp: number;
|
||||||
features: string;
|
features: string;
|
||||||
fee_base_msat: number;
|
fee_base_msat: number;
|
||||||
fee_proportional_millionths: number;
|
fee_rate_milli_msat: number;
|
||||||
htlc_minimim_msat: number;
|
htlc_minimim_msat: number;
|
||||||
cltv_expiry_delta: number;
|
cltv_expiry_delta: number;
|
||||||
htlc_maximum_msat: number;
|
htlc_maximum_msat: number;
|
||||||
@ -41,7 +41,7 @@ class LightningStatsImporter {
|
|||||||
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
||||||
logger.info('Caching funding txs for currently existing channels');
|
logger.info('Caching funding txs for currently existing channels');
|
||||||
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
||||||
|
|
||||||
await this.$importHistoricalLightningStats();
|
await this.$importHistoricalLightningStats();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,10 +60,9 @@ class LightningStatsImporter {
|
|||||||
let hasClearnet = false;
|
let hasClearnet = false;
|
||||||
let isUnnanounced = true;
|
let isUnnanounced = true;
|
||||||
|
|
||||||
const sockets = node.addresses.split(',');
|
for (const socket of (node.addresses ?? [])) {
|
||||||
for (const socket of sockets) {
|
hasOnion = hasOnion || ['torv2', 'torv3'].includes(socket.network);
|
||||||
hasOnion = hasOnion || (socket.indexOf('torv3://') !== -1);
|
hasClearnet = hasClearnet || ['ipv4', 'ipv6'].includes(socket.network);
|
||||||
hasClearnet = hasClearnet || (socket.indexOf('ipv4://') !== -1 || socket.indexOf('ipv6://') !== -1);
|
|
||||||
}
|
}
|
||||||
if (hasOnion && hasClearnet) {
|
if (hasOnion && hasClearnet) {
|
||||||
clearnetTorNodes++;
|
clearnetTorNodes++;
|
||||||
@ -90,8 +89,11 @@ class LightningStatsImporter {
|
|||||||
const baseFees: number[] = [];
|
const baseFees: number[] = [];
|
||||||
const alreadyCountedChannels = {};
|
const alreadyCountedChannels = {};
|
||||||
|
|
||||||
for (const channel of networkGraph.channels) {
|
for (const channel of networkGraph.edges) {
|
||||||
const short_id = channel.scid.slice(0, -2);
|
let short_id = channel.channel_id;
|
||||||
|
if (short_id.indexOf('/') !== -1) {
|
||||||
|
short_id = short_id.slice(0, -2);
|
||||||
|
}
|
||||||
|
|
||||||
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
|
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
|
||||||
if (!tx) {
|
if (!tx) {
|
||||||
@ -99,65 +101,93 @@ class LightningStatsImporter {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!nodeStats[channel.source]) {
|
if (!nodeStats[channel.node1_pub]) {
|
||||||
nodeStats[channel.source] = {
|
nodeStats[channel.node1_pub] = {
|
||||||
capacity: 0,
|
capacity: 0,
|
||||||
channels: 0,
|
channels: 0,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (!nodeStats[channel.destination]) {
|
if (!nodeStats[channel.node2_pub]) {
|
||||||
nodeStats[channel.destination] = {
|
nodeStats[channel.node2_pub] = {
|
||||||
capacity: 0,
|
capacity: 0,
|
||||||
channels: 0,
|
channels: 0,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeStats[channel.source].capacity += Math.round(tx.value * 100000000);
|
|
||||||
nodeStats[channel.source].channels++;
|
|
||||||
nodeStats[channel.destination].capacity += Math.round(tx.value * 100000000);
|
|
||||||
nodeStats[channel.destination].channels++;
|
|
||||||
|
|
||||||
if (!alreadyCountedChannels[short_id]) {
|
if (!alreadyCountedChannels[short_id]) {
|
||||||
capacity += Math.round(tx.value * 100000000);
|
capacity += Math.round(tx.value * 100000000);
|
||||||
capacities.push(Math.round(tx.value * 100000000));
|
capacities.push(Math.round(tx.value * 100000000));
|
||||||
alreadyCountedChannels[short_id] = true;
|
alreadyCountedChannels[short_id] = true;
|
||||||
|
|
||||||
|
nodeStats[channel.node1_pub].capacity += Math.round(tx.value * 100000000);
|
||||||
|
nodeStats[channel.node1_pub].channels++;
|
||||||
|
nodeStats[channel.node2_pub].capacity += Math.round(tx.value * 100000000);
|
||||||
|
nodeStats[channel.node2_pub].channels++;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (channel.fee_proportional_millionths < 5000) {
|
if (channel.node1_policy !== undefined) { // Coming from the node
|
||||||
avgFeeRate += channel.fee_proportional_millionths;
|
for (const policy of [channel.node1_policy, channel.node2_policy]) {
|
||||||
feeRates.push(channel.fee_proportional_millionths);
|
if (policy && policy.fee_rate_milli_msat < 5000) {
|
||||||
}
|
avgFeeRate += policy.fee_rate_milli_msat;
|
||||||
|
feeRates.push(policy.fee_rate_milli_msat);
|
||||||
if (channel.fee_base_msat < 5000) {
|
}
|
||||||
avgBaseFee += channel.fee_base_msat;
|
if (policy && policy.fee_base_msat < 5000) {
|
||||||
baseFees.push(channel.fee_base_msat);
|
avgBaseFee += policy.fee_base_msat;
|
||||||
|
baseFees.push(policy.fee_base_msat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else { // Coming from the historical import
|
||||||
|
if (channel.fee_rate_milli_msat < 5000) {
|
||||||
|
avgFeeRate += channel.fee_rate_milli_msat;
|
||||||
|
feeRates.push(channel.fee_rate_milli_msat);
|
||||||
|
}
|
||||||
|
if (channel.fee_base_msat < 5000) {
|
||||||
|
avgBaseFee += channel.fee_base_msat;
|
||||||
|
baseFees.push(channel.fee_base_msat);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
avgFeeRate /= networkGraph.channels.length;
|
avgFeeRate /= networkGraph.edges.length;
|
||||||
avgBaseFee /= networkGraph.channels.length;
|
avgBaseFee /= networkGraph.edges.length;
|
||||||
const medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
|
const medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
|
||||||
const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
|
const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
|
||||||
const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
|
const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
|
||||||
const avgCapacity = Math.round(capacity / capacities.length);
|
const avgCapacity = Math.round(capacity / capacities.length);
|
||||||
|
|
||||||
let query = `INSERT INTO lightning_stats(
|
let query = `INSERT INTO lightning_stats(
|
||||||
added,
|
added,
|
||||||
channel_count,
|
channel_count,
|
||||||
node_count,
|
node_count,
|
||||||
total_capacity,
|
total_capacity,
|
||||||
tor_nodes,
|
tor_nodes,
|
||||||
clearnet_nodes,
|
clearnet_nodes,
|
||||||
unannounced_nodes,
|
unannounced_nodes,
|
||||||
clearnet_tor_nodes,
|
clearnet_tor_nodes,
|
||||||
avg_capacity,
|
avg_capacity,
|
||||||
avg_fee_rate,
|
avg_fee_rate,
|
||||||
avg_base_fee_mtokens,
|
avg_base_fee_mtokens,
|
||||||
med_capacity,
|
med_capacity,
|
||||||
med_fee_rate,
|
med_fee_rate,
|
||||||
med_base_fee_mtokens
|
med_base_fee_mtokens
|
||||||
)
|
)
|
||||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
|
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
added = FROM_UNIXTIME(?),
|
||||||
|
channel_count = ?,
|
||||||
|
node_count = ?,
|
||||||
|
total_capacity = ?,
|
||||||
|
tor_nodes = ?,
|
||||||
|
clearnet_nodes = ?,
|
||||||
|
unannounced_nodes = ?,
|
||||||
|
clearnet_tor_nodes = ?,
|
||||||
|
avg_capacity = ?,
|
||||||
|
avg_fee_rate = ?,
|
||||||
|
avg_base_fee_mtokens = ?,
|
||||||
|
med_capacity = ?,
|
||||||
|
med_fee_rate = ?,
|
||||||
|
med_base_fee_mtokens = ?
|
||||||
|
`;
|
||||||
|
|
||||||
await DB.query(query, [
|
await DB.query(query, [
|
||||||
timestamp,
|
timestamp,
|
||||||
@ -174,22 +204,44 @@ class LightningStatsImporter {
|
|||||||
medCapacity,
|
medCapacity,
|
||||||
medFeeRate,
|
medFeeRate,
|
||||||
medBaseFee,
|
medBaseFee,
|
||||||
|
timestamp,
|
||||||
|
capacities.length,
|
||||||
|
networkGraph.nodes.length,
|
||||||
|
capacity,
|
||||||
|
torNodes,
|
||||||
|
clearnetNodes,
|
||||||
|
unannouncedNodes,
|
||||||
|
clearnetTorNodes,
|
||||||
|
avgCapacity,
|
||||||
|
avgFeeRate,
|
||||||
|
avgBaseFee,
|
||||||
|
medCapacity,
|
||||||
|
medFeeRate,
|
||||||
|
medBaseFee,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
for (const public_key of Object.keys(nodeStats)) {
|
for (const public_key of Object.keys(nodeStats)) {
|
||||||
query = `INSERT INTO node_stats(
|
query = `INSERT INTO node_stats(
|
||||||
public_key,
|
public_key,
|
||||||
added,
|
added,
|
||||||
capacity,
|
capacity,
|
||||||
channels
|
channels
|
||||||
)
|
)
|
||||||
VALUES (?, FROM_UNIXTIME(?), ?, ?)`;
|
VALUES (?, FROM_UNIXTIME(?), ?, ?)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
added = FROM_UNIXTIME(?),
|
||||||
|
capacity = ?,
|
||||||
|
channels = ?
|
||||||
|
`;
|
||||||
|
|
||||||
await DB.query(query, [
|
await DB.query(query, [
|
||||||
public_key,
|
public_key,
|
||||||
timestamp,
|
timestamp,
|
||||||
nodeStats[public_key].capacity,
|
nodeStats[public_key].capacity,
|
||||||
nodeStats[public_key].channels,
|
nodeStats[public_key].channels,
|
||||||
|
timestamp,
|
||||||
|
nodeStats[public_key].capacity,
|
||||||
|
nodeStats[public_key].channels,
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -203,15 +255,28 @@ class LightningStatsImporter {
|
|||||||
let latestNodeCount = 1;
|
let latestNodeCount = 1;
|
||||||
|
|
||||||
const fileList = await fsPromises.readdir(this.topologiesFolder);
|
const fileList = await fsPromises.readdir(this.topologiesFolder);
|
||||||
|
// Insert history from the most recent to the oldest
|
||||||
|
// This also put the .json cached files first
|
||||||
fileList.sort().reverse();
|
fileList.sort().reverse();
|
||||||
|
|
||||||
const [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(added) as added, node_count FROM lightning_stats');
|
const [rows]: any[] = await DB.query(`
|
||||||
|
SELECT UNIX_TIMESTAMP(added) AS added, node_count
|
||||||
|
FROM lightning_stats
|
||||||
|
ORDER BY added DESC
|
||||||
|
`);
|
||||||
const existingStatsTimestamps = {};
|
const existingStatsTimestamps = {};
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
existingStatsTimestamps[row.added] = rows[0];
|
existingStatsTimestamps[row.added] = row;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For logging purpose
|
||||||
|
let processed = 10;
|
||||||
|
let totalProcessed = -1;
|
||||||
|
|
||||||
for (const filename of fileList) {
|
for (const filename of fileList) {
|
||||||
|
processed++;
|
||||||
|
totalProcessed++;
|
||||||
|
|
||||||
const timestamp = parseInt(filename.split('_')[1], 10);
|
const timestamp = parseInt(filename.split('_')[1], 10);
|
||||||
|
|
||||||
// Stats exist already, don't calculate/insert them
|
// Stats exist already, don't calculate/insert them
|
||||||
@ -220,7 +285,7 @@ class LightningStatsImporter {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug(`Processing ${this.topologiesFolder}/${filename}`);
|
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
|
||||||
const fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
const fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
||||||
|
|
||||||
let graph;
|
let graph;
|
||||||
@ -228,12 +293,13 @@ class LightningStatsImporter {
|
|||||||
try {
|
try {
|
||||||
graph = JSON.parse(fileContent);
|
graph = JSON.parse(fileContent);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.debug(`Invalid topology file, cannot parse the content`);
|
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content`);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
graph = this.parseFile(fileContent);
|
graph = this.parseFile(fileContent);
|
||||||
if (!graph) {
|
if (!graph) {
|
||||||
logger.debug(`Invalid topology file, cannot parse the content`);
|
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
await fsPromises.writeFile(`${this.topologiesFolder}/${filename}.json`, JSON.stringify(graph));
|
await fsPromises.writeFile(`${this.topologiesFolder}/${filename}.json`, JSON.stringify(graph));
|
||||||
@ -245,19 +311,22 @@ class LightningStatsImporter {
|
|||||||
const diffRatio = graph.nodes.length / latestNodeCount;
|
const diffRatio = graph.nodes.length / latestNodeCount;
|
||||||
if (diffRatio < 0.9) {
|
if (diffRatio < 0.9) {
|
||||||
// Ignore drop of more than 90% of the node count as it's probably a missing data point
|
// Ignore drop of more than 90% of the node count as it's probably a missing data point
|
||||||
|
logger.debug(`Nodes count diff ratio threshold reached, ignore the data for this day ${graph.nodes.length} nodes vs ${latestNodeCount}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
latestNodeCount = graph.nodes.length;
|
latestNodeCount = graph.nodes.length;
|
||||||
|
|
||||||
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
||||||
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.channels.length} channels`);
|
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
|
||||||
|
|
||||||
// Cache funding txs
|
if (processed > 10) {
|
||||||
logger.debug(`Caching funding txs for ${datestr}`);
|
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||||
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.channels.map(channel => channel.scid.slice(0, -2)));
|
processed = 0;
|
||||||
|
} else {
|
||||||
logger.debug(`Generating LN network stats for ${datestr}`);
|
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||||
|
}
|
||||||
|
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
|
||||||
const stat = await this.computeNetworkStats(timestamp, graph);
|
const stat = await this.computeNetworkStats(timestamp, graph);
|
||||||
|
|
||||||
existingStatsTimestamps[timestamp] = stat;
|
existingStatsTimestamps[timestamp] = stat;
|
||||||
@ -290,13 +359,22 @@ class LightningStatsImporter {
|
|||||||
if (!node.data) {
|
if (!node.data) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
const addresses: unknown[] = [];
|
||||||
|
const sockets = node.data[5].split(',');
|
||||||
|
for (const socket of sockets) {
|
||||||
|
const parts = socket.split('://');
|
||||||
|
addresses.push({
|
||||||
|
network: parts[0],
|
||||||
|
addr: parts[1],
|
||||||
|
});
|
||||||
|
}
|
||||||
nodes.push({
|
nodes.push({
|
||||||
id: node.data[0],
|
id: node.data[0],
|
||||||
timestamp: node.data[1],
|
timestamp: node.data[1],
|
||||||
features: node.data[2],
|
features: node.data[2],
|
||||||
rgb_color: node.data[3],
|
rgb_color: node.data[3],
|
||||||
alias: node.data[4],
|
alias: node.data[4],
|
||||||
addresses: node.data[5],
|
addresses: addresses,
|
||||||
out_degree: node.data[6],
|
out_degree: node.data[6],
|
||||||
in_degree: node.data[7],
|
in_degree: node.data[7],
|
||||||
});
|
});
|
||||||
@ -307,13 +385,13 @@ class LightningStatsImporter {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
channels.push({
|
channels.push({
|
||||||
scid: channel.data[0],
|
channel_id: channel.data[0],
|
||||||
source: channel.data[1],
|
node1_pub: channel.data[1],
|
||||||
destination: channel.data[2],
|
node2_pub: channel.data[2],
|
||||||
timestamp: channel.data[3],
|
timestamp: channel.data[3],
|
||||||
features: channel.data[4],
|
features: channel.data[4],
|
||||||
fee_base_msat: channel.data[5],
|
fee_base_msat: channel.data[5],
|
||||||
fee_proportional_millionths: channel.data[6],
|
fee_rate_milli_msat: channel.data[6],
|
||||||
htlc_minimim_msat: channel.data[7],
|
htlc_minimim_msat: channel.data[7],
|
||||||
cltv_expiry_delta: channel.data[8],
|
cltv_expiry_delta: channel.data[8],
|
||||||
htlc_maximum_msat: channel.data[9],
|
htlc_maximum_msat: channel.data[9],
|
||||||
@ -322,9 +400,9 @@ class LightningStatsImporter {
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
nodes: nodes,
|
nodes: nodes,
|
||||||
channels: channels,
|
edges: channels,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default new LightningStatsImporter;
|
export default new LightningStatsImporter;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user