diff --git a/backend/mempool-config.sample.json b/backend/mempool-config.sample.json index e636c9e2d..86d226154 100644 --- a/backend/mempool-config.sample.json +++ b/backend/mempool-config.sample.json @@ -77,13 +77,19 @@ }, "LIGHTNING": { "ENABLED": false, - "BACKEND": "lnd" + "BACKEND": "lnd", + "STATS_REFRESH_INTERVAL": 600, + "GRAPH_REFRESH_INTERVAL": 600, + "LOGGER_UPDATE_INTERVAL": 30 }, "LND": { "TLS_CERT_PATH": "tls.cert", "MACAROON_PATH": "readonly.macaroon", "REST_API_URL": "https://localhost:8080" }, + "CLIGHTNING": { + "SOCKET": "lightning-rpc" + }, "SOCKS5PROXY": { "ENABLED": false, "USE_ONION": true, diff --git a/backend/src/__fixtures__/mempool-config.template.json b/backend/src/__fixtures__/mempool-config.template.json index e9c5c3576..a42426249 100644 --- a/backend/src/__fixtures__/mempool-config.template.json +++ b/backend/src/__fixtures__/mempool-config.template.json @@ -88,5 +88,21 @@ "LIQUID_ONION": "__EXTERNAL_DATA_SERVER_LIQUID_ONION__", "BISQ_URL": "__EXTERNAL_DATA_SERVER_BISQ_URL__", "BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__" + }, + "LIGHTNING": { + "ENABLED": "__LIGHTNING_ENABLED__", + "BACKEND": "__LIGHTNING_BACKEND__", + "TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__", + "STATS_REFRESH_INTERVAL": 600, + "GRAPH_REFRESH_INTERVAL": 600, + "LOGGER_UPDATE_INTERVAL": 30 + }, + "LND": { + "TLS_CERT_PATH": "", + "MACAROON_PATH": "", + "REST_API_URL": "https://localhost:8080" + }, + "CLIGHTNING": { + "SOCKET": "__CLIGHTNING_SOCKET__" } } diff --git a/backend/src/api/database-migration.ts b/backend/src/api/database-migration.ts index 46b7ded6e..1d6f10a2c 100644 --- a/backend/src/api/database-migration.ts +++ b/backend/src/api/database-migration.ts @@ -4,7 +4,7 @@ import logger from '../logger'; import { Common } from './common'; class DatabaseMigration { - private static currentVersion = 38; + private static currentVersion = 39; private queryTimeout = 120000; private statisticsAddedIndexed = false; private uniqueLogs: string[] = []; @@ -248,7 +248,6 @@ class DatabaseMigration { } if (databaseSchemaVersion < 25 && isBitcoin === true) { - await this.$executeQuery(`INSERT INTO state VALUES('last_node_stats', 0, '1970-01-01');`); await this.$executeQuery(this.getCreateLightningStatisticsQuery(), await this.$checkIfTableExists('lightning_stats')); await this.$executeQuery(this.getCreateNodesQuery(), await this.$checkIfTableExists('nodes')); await this.$executeQuery(this.getCreateChannelsQuery(), await this.$checkIfTableExists('channels')); @@ -338,6 +337,11 @@ class DatabaseMigration { await this.$executeQuery('ALTER TABLE `lightning_stats` CHANGE `added` `added` timestamp NULL'); await this.$executeQuery('ALTER TABLE `node_stats` CHANGE `added` `added` timestamp NULL'); } + + if (databaseSchemaVersion < 39 && isBitcoin === true) { + await this.$executeQuery('ALTER TABLE `nodes` ADD alias_search TEXT NULL DEFAULT NULL AFTER `alias`'); + await this.$executeQuery('ALTER TABLE nodes ADD FULLTEXT(alias_search)'); + } } /** diff --git a/backend/src/api/explorer/channels.api.ts b/backend/src/api/explorer/channels.api.ts index 67003be57..a0a617e43 100644 --- a/backend/src/api/explorer/channels.api.ts +++ b/backend/src/api/explorer/channels.api.ts @@ -17,32 +17,60 @@ class ChannelsApi { } } - public async $getAllChannelsGeo(publicKey?: string): Promise { + public async $getAllChannelsGeo(publicKey?: string, style?: string): Promise { try { + let select: string; + if (style === 'widget') { + select = ` + nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude, + nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude + `; + } else { + select = ` + nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias, + nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude, + nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias, + nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude + `; + } + const params: string[] = []; - let query = `SELECT nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias, - nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude, - nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias, - nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude, - channels.capacity - FROM channels - JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key - JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key - WHERE nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL - AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL + let query = `SELECT ${select} + FROM channels + JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key + JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key + WHERE nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL + AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL `; if (publicKey !== undefined) { query += ' AND (nodes_1.public_key = ? OR nodes_2.public_key = ?)'; params.push(publicKey); params.push(publicKey); + } else { + query += ` AND channels.capacity > 1000000 + GROUP BY nodes_1.public_key, nodes_2.public_key + ORDER BY channels.capacity DESC + LIMIT 10000 + `; } const [rows]: any = await DB.query(query, params); - return rows.map((row) => [ - row.node1_public_key, row.node1_alias, row.node1_longitude, row.node1_latitude, - row.node2_public_key, row.node2_alias, row.node2_longitude, row.node2_latitude, - row.capacity]); + return rows.map((row) => { + if (style === 'widget') { + return [ + row.node1_longitude, row.node1_latitude, + row.node2_longitude, row.node2_latitude, + ]; + } else { + return [ + row.node1_public_key, row.node1_alias, + row.node1_longitude, row.node1_latitude, + row.node2_public_key, row.node2_alias, + row.node2_longitude, row.node2_latitude, + ]; + } + }); } catch (e) { logger.err('$getAllChannelsGeo error: ' + (e instanceof Error ? e.message : e)); throw e; @@ -61,9 +89,14 @@ class ChannelsApi { } } - public async $getChannelsByStatus(status: number): Promise { + public async $getChannelsByStatus(status: number | number[]): Promise { try { - const query = `SELECT * FROM channels WHERE status = ?`; + let query: string; + if (Array.isArray(status)) { + query = `SELECT * FROM channels WHERE status IN (${status.join(',')})`; + } else { + query = `SELECT * FROM channels WHERE status = ?`; + } const [rows]: any = await DB.query(query, [status]); return rows; } catch (e) { @@ -212,41 +245,53 @@ class ChannelsApi { let channelStatusFilter; if (status === 'open') { channelStatusFilter = '< 2'; + } else if (status === 'active') { + channelStatusFilter = '= 1'; } else if (status === 'closed') { channelStatusFilter = '= 2'; + } else { + throw new Error('getChannelsForNode: Invalid status requested'); } // Channels originating from node let query = ` - SELECT node2.alias, node2.public_key, channels.status, channels.node1_fee_rate, - channels.capacity, channels.short_id, channels.id + SELECT COALESCE(node2.alias, SUBSTRING(node2_public_key, 0, 20)) AS alias, COALESCE(node2.public_key, node2_public_key) AS public_key, + channels.status, channels.node1_fee_rate, + channels.capacity, channels.short_id, channels.id, channels.closing_reason FROM channels - JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key + LEFT JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key WHERE node1_public_key = ? AND channels.status ${channelStatusFilter} `; - const [channelsFromNode]: any = await DB.query(query, [public_key, index, length]); + const [channelsFromNode]: any = await DB.query(query, [public_key]); // Channels incoming to node query = ` - SELECT node1.alias, node1.public_key, channels.status, channels.node2_fee_rate, - channels.capacity, channels.short_id, channels.id + SELECT COALESCE(node1.alias, SUBSTRING(node1_public_key, 0, 20)) AS alias, COALESCE(node1.public_key, node1_public_key) AS public_key, + channels.status, channels.node2_fee_rate, + channels.capacity, channels.short_id, channels.id, channels.closing_reason FROM channels - JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key + LEFT JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key WHERE node2_public_key = ? AND channels.status ${channelStatusFilter} `; - const [channelsToNode]: any = await DB.query(query, [public_key, index, length]); + const [channelsToNode]: any = await DB.query(query, [public_key]); let allChannels = channelsFromNode.concat(channelsToNode); allChannels.sort((a, b) => { return b.capacity - a.capacity; }); - allChannels = allChannels.slice(index, index + length); + + if (index >= 0) { + allChannels = allChannels.slice(index, index + length); + } else if (index === -1) { // Node channels tree chart + allChannels = allChannels.slice(0, 1000); + } const channels: any[] = [] for (const row of allChannels) { const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key); channels.push({ status: row.status, + closing_reason: row.closing_reason, capacity: row.capacity ?? 0, short_id: row.short_id, id: row.id, @@ -337,7 +382,7 @@ class ChannelsApi { /** * Save or update a channel present in the graph */ - public async $saveChannel(channel: ILightningApi.Channel): Promise { + public async $saveChannel(channel: ILightningApi.Channel, status = 1): Promise { const [ txid, vout ] = channel.chan_point.split(':'); const policy1: Partial = channel.node1_policy || {}; @@ -369,11 +414,11 @@ class ChannelsApi { node2_min_htlc_mtokens, node2_updated_at ) - VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + VALUES (?, ?, ?, ?, ?, ?, ${status}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE capacity = ?, updated_at = ?, - status = 1, + status = ${status}, node1_public_key = ?, node1_base_fee_mtokens = ?, node1_cltv_delta = ?, diff --git a/backend/src/api/explorer/channels.routes.ts b/backend/src/api/explorer/channels.routes.ts index 09c3da668..0fa91db92 100644 --- a/backend/src/api/explorer/channels.routes.ts +++ b/backend/src/api/explorer/channels.routes.ts @@ -102,7 +102,11 @@ class ChannelsRoutes { private async $getAllChannelsGeo(req: Request, res: Response) { try { - const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey); + const style: string = typeof req.query.style === 'string' ? req.query.style : ''; + const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey, style); + res.header('Pragma', 'public'); + res.header('Cache-control', 'public'); + res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString()); res.json(channels); } catch (e) { res.status(500).send(e instanceof Error ? e.message : e); diff --git a/backend/src/api/explorer/nodes.api.ts b/backend/src/api/explorer/nodes.api.ts index 88b434711..379df4213 100644 --- a/backend/src/api/explorer/nodes.api.ts +++ b/backend/src/api/explorer/nodes.api.ts @@ -169,7 +169,7 @@ class NodesApi { let query: string; if (full === false) { query = ` - SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, + SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.channels FROM node_stats JOIN nodes ON nodes.public_key = node_stats.public_key @@ -259,9 +259,10 @@ class NodesApi { public async $searchNodeByPublicKeyOrAlias(search: string) { try { - const searchStripped = search.replace('%', '') + '%'; - const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR nodes.alias LIKE ? GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`; - const [rows]: any = await DB.query(query, [searchStripped, searchStripped]); + const publicKeySearch = search.replace('%', '') + '%'; + const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z ]/g, '').split(' ').map((search) => '+' + search + '*').join(' '); + const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR MATCH nodes.alias_search AGAINST (? IN BOOLEAN MODE) GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`; + const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]); return rows; } catch (e) { logger.err('$searchNodeByPublicKeyOrAlias error: ' + (e instanceof Error ? e.message : e)); @@ -296,19 +297,24 @@ class NodesApi { if (!ispList[isp1]) { ispList[isp1] = { - id: channel.isp1ID, + id: channel.isp1ID.toString(), capacity: 0, channels: 0, nodes: {}, }; + } else if (ispList[isp1].id.indexOf(channel.isp1ID) === -1) { + ispList[isp1].id += ',' + channel.isp1ID.toString(); } + if (!ispList[isp2]) { ispList[isp2] = { - id: channel.isp2ID, + id: channel.isp2ID.toString(), capacity: 0, channels: 0, nodes: {}, }; + } else if (ispList[isp2].id.indexOf(channel.isp2ID) === -1) { + ispList[isp2].id += ',' + channel.isp2ID.toString(); } ispList[isp1].capacity += channel.capacity; @@ -385,9 +391,10 @@ class NodesApi { public async $getNodesPerCountry(countryId: string) { try { const query = ` - SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels, - nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at, - geo_names_city.names as city + SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels, + nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at, + geo_names_city.names as city, geo_names_country.names as country, + geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision FROM node_stats JOIN ( SELECT public_key, MAX(added) as last_added @@ -395,15 +402,19 @@ class NodesApi { GROUP BY public_key ) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key - JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country' + LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country' LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city' + LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code' + LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division' WHERE geo_names_country.id = ? ORDER BY capacity DESC `; const [rows]: any = await DB.query(query, [countryId]); for (let i = 0; i < rows.length; ++i) { + rows[i].country = JSON.parse(rows[i].country); rows[i].city = JSON.parse(rows[i].city); + rows[i].subdivision = JSON.parse(rows[i].subdivision); } return rows; } catch (e) { @@ -417,7 +428,8 @@ class NodesApi { const query = ` SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels, nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at, - geo_names_city.names as city, geo_names_country.names as country + geo_names_city.names as city, geo_names_country.names as country, + geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision FROM node_stats JOIN ( SELECT public_key, MAX(added) as last_added @@ -425,8 +437,10 @@ class NodesApi { GROUP BY public_key ) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key - JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country' + LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country' LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city' + LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code' + LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division' WHERE nodes.as_number IN (?) ORDER BY capacity DESC `; @@ -435,6 +449,7 @@ class NodesApi { for (let i = 0; i < rows.length; ++i) { rows[i].country = JSON.parse(rows[i].country); rows[i].city = JSON.parse(rows[i].city); + rows[i].subdivision = JSON.parse(rows[i].subdivision); } return rows; } catch (e) { @@ -487,21 +502,24 @@ class NodesApi { first_seen, updated_at, alias, + alias_search, color, sockets, status ) - VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, 1) - ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, color = ?, sockets = ?, status = 1`; + VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1) + ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, alias_search = ?, color = ?, sockets = ?, status = 1`; await DB.query(query, [ node.pub_key, node.last_update, node.alias, + this.aliasToSearchText(node.alias), node.color, sockets, node.last_update, node.alias, + this.aliasToSearchText(node.alias), node.color, sockets, ]); @@ -535,6 +553,10 @@ class NodesApi { logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e)); } } + + private aliasToSearchText(str: string): string { + return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z ]/g, ''); + } } export default new NodesApi(); diff --git a/backend/src/api/explorer/nodes.routes.ts b/backend/src/api/explorer/nodes.routes.ts index c5b442723..7a5ff880a 100644 --- a/backend/src/api/explorer/nodes.routes.ts +++ b/backend/src/api/explorer/nodes.routes.ts @@ -15,8 +15,8 @@ class NodesRoutes { .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp/:isp', this.$getNodesPerISP) .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/countries', this.$getNodesCountries) .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings', this.$getNodesRanking) - .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/capacity', this.$getTopNodesByCapacity) - .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/channels', this.$getTopNodesByChannels) + .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/liquidity', this.$getTopNodesByCapacity) + .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/connectivity', this.$getTopNodesByChannels) .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/age', this.$getOldestNodes) .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats) .get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode) diff --git a/backend/src/config.ts b/backend/src/config.ts index 6f6301a65..8c91e104b 100644 --- a/backend/src/config.ts +++ b/backend/src/config.ts @@ -36,6 +36,7 @@ interface IConfig { TOPOLOGY_FOLDER: string; STATS_REFRESH_INTERVAL: number; GRAPH_REFRESH_INTERVAL: number; + LOGGER_UPDATE_INTERVAL: number; }; LND: { TLS_CERT_PATH: string; @@ -191,6 +192,7 @@ const defaults: IConfig = { 'TOPOLOGY_FOLDER': '', 'STATS_REFRESH_INTERVAL': 600, 'GRAPH_REFRESH_INTERVAL': 600, + 'LOGGER_UPDATE_INTERVAL': 30, }, 'LND': { 'TLS_CERT_PATH': '', diff --git a/backend/src/index.ts b/backend/src/index.ts index 683f964f0..d1e3cee8d 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -189,7 +189,7 @@ class Server { await networkSyncService.$startService(); await lightningStatsUpdater.$startService(); } catch(e) { - logger.err(`Lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`); + logger.err(`Nodejs lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`); await Common.sleep$(1000 * 60); this.$runLightningBackend(); }; diff --git a/backend/src/tasks/lightning/network-sync.service.ts b/backend/src/tasks/lightning/network-sync.service.ts index f0122c5ca..8d6d64d66 100644 --- a/backend/src/tasks/lightning/network-sync.service.ts +++ b/backend/src/tasks/lightning/network-sync.service.ts @@ -83,7 +83,7 @@ class NetworkSyncService { logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted`); // If a channel if not present in the graph, mark it as inactive - nodesApi.$setNodesInactive(graphNodesPubkeys); + await nodesApi.$setNodesInactive(graphNodesPubkeys); if (config.MAXMIND.ENABLED) { $lookupNodeLocation(); @@ -95,11 +95,19 @@ class NetworkSyncService { */ private async $updateChannelsList(channels: ILightningApi.Channel[]): Promise { try { + const [closedChannelsRaw]: any[] = await DB.query(`SELECT id FROM channels WHERE status = 2`); + const closedChannels = {}; + for (const closedChannel of closedChannelsRaw) { + closedChannels[closedChannel.id] = true; + } + let progress = 0; const graphChannelsIds: string[] = []; for (const channel of channels) { - await channelsApi.$saveChannel(channel); + if (!closedChannels[channel.channel_id]) { + await channelsApi.$saveChannel(channel); + } graphChannelsIds.push(channel.channel_id); ++progress; @@ -113,7 +121,7 @@ class NetworkSyncService { logger.info(`${progress} channels updated`); // If a channel if not present in the graph, mark it as inactive - channelsApi.$setChannelsInactive(graphChannelsIds); + await channelsApi.$setChannelsInactive(graphChannelsIds); } catch (e) { logger.err(`Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`); } @@ -232,8 +240,8 @@ class NetworkSyncService { let progress = 0; try { - logger.info(`Starting closed channels scan...`); - const channels = await channelsApi.$getChannelsByStatus(0); + logger.info(`Starting closed channels scan`); + const channels = await channelsApi.$getChannelsByStatus([0, 1]); for (const channel of channels) { const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout); if (spendingTx.spent === true && spendingTx.status?.confirmed === true) { @@ -277,44 +285,66 @@ class NetworkSyncService { for (const channel of channels) { let reason = 0; // Only Esplora backend can retrieve spent transaction outputs - const outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id); - const lightningScriptReasons: number[] = []; - for (const outspend of outspends) { - if (outspend.spent && outspend.txid) { - const spendingTx = await bitcoinApi.$getRawTransaction(outspend.txid); - const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]); - lightningScriptReasons.push(lightningScript); + try { + let outspends: IEsploraApi.Outspend[] | undefined; + try { + outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id); + } catch (e) { + logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`); + continue; } - } - if (lightningScriptReasons.length === outspends.length - && lightningScriptReasons.filter((r) => r === 1).length === outspends.length) { - reason = 1; - } else { - const filteredReasons = lightningScriptReasons.filter((r) => r !== 1); - if (filteredReasons.length) { - if (filteredReasons.some((r) => r === 2 || r === 4)) { - reason = 3; - } else { - reason = 2; + const lightningScriptReasons: number[] = []; + for (const outspend of outspends) { + if (outspend.spent && outspend.txid) { + let spendingTx: IEsploraApi.Transaction | undefined; + try { + spendingTx = await bitcoinApi.$getRawTransaction(outspend.txid); + } catch (e) { + logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + outspend.txid}. Reason ${e instanceof Error ? e.message : e}`); + continue; + } + const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]); + lightningScriptReasons.push(lightningScript); } + } + if (lightningScriptReasons.length === outspends.length + && lightningScriptReasons.filter((r) => r === 1).length === outspends.length) { + reason = 1; } else { - /* - We can detect a commitment transaction (force close) by reading Sequence and Locktime - https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction - */ - const closingTx = await bitcoinApi.$getRawTransaction(channel.closing_transaction_id); - const sequenceHex: string = closingTx.vin[0].sequence.toString(16); - const locktimeHex: string = closingTx.locktime.toString(16); - if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') { - reason = 2; // Here we can't be sure if it's a penalty or not + const filteredReasons = lightningScriptReasons.filter((r) => r !== 1); + if (filteredReasons.length) { + if (filteredReasons.some((r) => r === 2 || r === 4)) { + reason = 3; + } else { + reason = 2; + } } else { - reason = 1; + /* + We can detect a commitment transaction (force close) by reading Sequence and Locktime + https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction + */ + let closingTx: IEsploraApi.Transaction | undefined; + try { + closingTx = await bitcoinApi.$getRawTransaction(channel.closing_transaction_id); + } catch (e) { + logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id}. Reason ${e instanceof Error ? e.message : e}`); + continue; + } + const sequenceHex: string = closingTx.vin[0].sequence.toString(16); + const locktimeHex: string = closingTx.locktime.toString(16); + if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') { + reason = 2; // Here we can't be sure if it's a penalty or not + } else { + reason = 1; + } } } - } - if (reason) { - logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.'); - await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]); + if (reason) { + logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.'); + await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]); + } + } catch (e) { + logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`); } ++progress; diff --git a/backend/src/tasks/lightning/sync-tasks/stats-importer.ts b/backend/src/tasks/lightning/sync-tasks/stats-importer.ts index 20d5d65d9..e05ba4ab3 100644 --- a/backend/src/tasks/lightning/sync-tasks/stats-importer.ts +++ b/backend/src/tasks/lightning/sync-tasks/stats-importer.ts @@ -5,6 +5,8 @@ import fundingTxFetcher from './funding-tx-fetcher'; import config from '../../../config'; import { ILightningApi } from '../../../api/lightning/lightning-api.interface'; import { isIP } from 'net'; +import { Common } from '../../../api/common'; +import channelsApi from '../../../api/explorer/channels.api'; const fsPromises = promises; @@ -22,7 +24,8 @@ class LightningStatsImporter { /** * Generate LN network stats for one day */ - public async computeNetworkStats(timestamp: number, networkGraph: ILightningApi.NetworkGraph): Promise { + public async computeNetworkStats(timestamp: number, + networkGraph: ILightningApi.NetworkGraph, isHistorical: boolean = false): Promise { // Node counts and network shares let clearnetNodes = 0; let torNodes = 0; @@ -66,11 +69,14 @@ class LightningStatsImporter { const baseFees: number[] = []; const alreadyCountedChannels = {}; + const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id, created FROM channels`); + const channelsInDb = {}; + for (const channel of channelsInDbRaw) { + channelsInDb[channel.short_id] = channel; + } + for (const channel of networkGraph.edges) { - let short_id = channel.channel_id; - if (short_id.indexOf('/') !== -1) { - short_id = short_id.slice(0, -2); - } + const short_id = Common.channelIntegerIdToShortId(channel.channel_id); const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id); if (!tx) { @@ -78,6 +84,31 @@ class LightningStatsImporter { continue; } + // Channel is already in db, check if we need to update 'created' field + if (isHistorical === true) { + //@ts-ignore + if (channelsInDb[short_id] && channel.timestamp < channel.created) { + await DB.query(` + UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.short_id = ?`, + //@ts-ignore + [channel.timestamp, short_id] + ); + } else if (!channelsInDb[short_id]) { + await channelsApi.$saveChannel({ + channel_id: short_id, + chan_point: `${tx.txid}:${short_id.split('x')[2]}`, + //@ts-ignore + last_update: channel.timestamp, + node1_pub: channel.node1_pub, + node2_pub: channel.node2_pub, + capacity: (tx.value * 100000000).toString(), + node1_policy: null, + node2_policy: null, + }, 0); + channelsInDb[channel.channel_id] = channel; + } + } + if (!nodeStats[channel.node1_pub]) { nodeStats[channel.node1_pub] = { capacity: 0, @@ -102,7 +133,7 @@ class LightningStatsImporter { nodeStats[channel.node2_pub].channels++; } - if (channel.node1_policy !== undefined) { // Coming from the node + if (isHistorical === false) { // Coming from the node for (const policy of [channel.node1_policy, channel.node2_policy]) { if (policy && parseInt(policy.fee_rate_milli_msat, 10) < 5000) { avgFeeRate += parseInt(policy.fee_rate_milli_msat, 10); @@ -113,30 +144,42 @@ class LightningStatsImporter { baseFees.push(parseInt(policy.fee_base_msat, 10)); } } - } else { // Coming from the historical import + } else { // @ts-ignore - if (channel.fee_rate_milli_msat < 5000) { + if (channel.node1_policy.fee_rate_milli_msat < 5000) { // @ts-ignore - avgFeeRate += parseInt(channel.fee_rate_milli_msat, 10); + avgFeeRate += parseInt(channel.node1_policy.fee_rate_milli_msat, 10); // @ts-ignore - feeRates.push(parseInt(channel.fee_rate_milli_msat), 10); + feeRates.push(parseInt(channel.node1_policy.fee_rate_milli_msat), 10); } // @ts-ignore - if (channel.fee_base_msat < 5000) { + if (channel.node1_policy.fee_base_msat < 5000) { // @ts-ignore - avgBaseFee += parseInt(channel.fee_base_msat, 10); + avgBaseFee += parseInt(channel.node1_policy.fee_base_msat, 10); // @ts-ignore - baseFees.push(parseInt(channel.fee_base_msat), 10); + baseFees.push(parseInt(channel.node1_policy.fee_base_msat), 10); } } } + let medCapacity = 0; + let medFeeRate = 0; + let medBaseFee = 0; + let avgCapacity = 0; + avgFeeRate /= Math.max(networkGraph.edges.length, 1); avgBaseFee /= Math.max(networkGraph.edges.length, 1); - const medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)]; - const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)]; - const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)]; - const avgCapacity = Math.round(capacity / Math.max(capacities.length, 1)); + + if (capacities.length > 0) { + medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)]; + avgCapacity = Math.round(capacity / Math.max(capacities.length, 1)); + } + if (feeRates.length > 0) { + medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)]; + } + if (baseFees.length > 0) { + medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)]; + } let query = `INSERT INTO lightning_stats( added, @@ -319,7 +362,7 @@ class LightningStatsImporter { logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`); } await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2))); - const stat = await this.computeNetworkStats(timestamp, graph); + const stat = await this.computeNetworkStats(timestamp, graph, true); existingStatsTimestamps[timestamp] = stat; } diff --git a/docker/README.md b/docker/README.md index dd42462a7..affe1d460 100644 --- a/docker/README.md +++ b/docker/README.md @@ -350,3 +350,68 @@ Corresponding `docker-compose.yml` overrides: PRICE_DATA_SERVER_CLEARNET_URL: "" ... ``` + +
+ +`mempool-config.json`: +``` + "LIGHTNING": { + "ENABLED": false + "BACKEND": "lnd" + "TOPOLOGY_FOLDER": "" + "STATS_REFRESH_INTERVAL": 600 + "GRAPH_REFRESH_INTERVAL": 600 + "LOGGER_UPDATE_INTERVAL": 30 + } +``` + +Corresponding `docker-compose.yml` overrides: +``` + api: + environment: + LIGHTNING_ENABLED: false + LIGHTNING_BACKEND: "lnd" + LIGHTNING_TOPOLOGY_FOLDER: "" + LIGHTNING_STATS_REFRESH_INTERVAL: 600 + LIGHTNING_GRAPH_REFRESH_INTERVAL: 600 + LIGHTNING_LOGGER_UPDATE_INTERVAL: 30 + ... +``` + +
+ +`mempool-config.json`: +``` + "LND": { + "TLS_CERT_PATH": "" + "MACAROON_PATH": "" + "REST_API_URL": "https://localhost:8080" + } +``` + +Corresponding `docker-compose.yml` overrides: +``` + api: + environment: + LND_TLS_CERT_PATH: "" + LND_MACAROON_PATH: "" + LND_REST_API_URL: "https://localhost:8080" + ... +``` + +
+ +`mempool-config.json`: +``` + "CLN": { + "SOCKET": "" + } +``` + +Corresponding `docker-compose.yml` overrides: +``` + api: + environment: + CLN_SOCKET: "" + ... +``` diff --git a/docker/backend/start.sh b/docker/backend/start.sh index d56b4bfeb..8e6a34013 100644 --- a/docker/backend/start.sh +++ b/docker/backend/start.sh @@ -91,6 +91,22 @@ __EXTERNAL_DATA_SERVER_LIQUID_ONION__=${EXTERNAL_DATA_SERVER_LIQUID_ONION:=http: __EXTERNAL_DATA_SERVER_BISQ_URL__=${EXTERNAL_DATA_SERVER_BISQ_URL:=https://bisq.markets/api} __EXTERNAL_DATA_SERVER_BISQ_ONION__=${EXTERNAL_DATA_SERVER_BISQ_ONION:=http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api} +# LIGHTNING +__LIGHTNING_ENABLED__=${LIGHTNING_ENABLED:=false} +__LIGHTNING_BACKEND__=${LIGHTNING_BACKEND:="lnd"} +__LIGHTNING_TOPOLOGY_FOLDER__=${LIGHTNING_TOPOLOGY_FOLDER:=""} +__LIGHTNING_STATS_REFRESH_INTERVAL__=${LIGHTNING_STATS_REFRESH_INTERVAL:=600} +__LIGHTNING_GRAPH_REFRESH_INTERVAL__=${LIGHTNING_GRAPH_REFRESH_INTERVAL:=600} +__LIGHTNING_LOGGER_UPDATE_INTERVAL__=${LIGHTNING_LOGGER_UPDATE_INTERVAL:=30} + +# LND +__LND_TLS_CERT_PATH__=${LND_TLS_CERT_PATH:=""} +__LND_MACAROON_PATH__=${LND_MACAROON_PATH:=""} +__LND_REST_API_URL__=${LND_REST_API_URL:="https://localhost:8080"} + +# CLN +__CLN_SOCKET__=${CLN_SOCKET:=""} + mkdir -p "${__MEMPOOL_CACHE_DIR__}" sed -i "s/__MEMPOOL_NETWORK__/${__MEMPOOL_NETWORK__}/g" mempool-config.json @@ -173,4 +189,20 @@ sed -i "s!__EXTERNAL_DATA_SERVER_LIQUID_ONION__!${__EXTERNAL_DATA_SERVER_LIQUID_ sed -i "s!__EXTERNAL_DATA_SERVER_BISQ_URL__!${__EXTERNAL_DATA_SERVER_BISQ_URL__}!g" mempool-config.json sed -i "s!__EXTERNAL_DATA_SERVER_BISQ_ONION__!${__EXTERNAL_DATA_SERVER_BISQ_ONION__}!g" mempool-config.json +# LIGHTNING +sed -i "s!__LIGHTNING_ENABLED__!${__LIGHTNING_ENABLED__}!g" mempool-config.json +sed -i "s!__LIGHTNING_BACKEND__!${__LIGHTNING_BACKEND__}!g" mempool-config.json +sed -i "s!__LIGHTNING_TOPOLOGY_FOLDER__!${__LIGHTNING_TOPOLOGY_FOLDER__}!g" mempool-config.json +sed -i "s!__LIGHTNING_STATS_REFRESH_INTERVAL__!${__LIGHTNING_STATS_REFRESH_INTERVAL__}!g" mempool-config.json +sed -i "s!__LIGHTNING_GRAPH_REFRESH_INTERVAL__!${__LIGHTNING_GRAPH_REFRESH_INTERVAL__}!g" mempool-config.json +sed -i "s!__LIGHTNING_LOGGER_UPDATE_INTERVAL__!${__LIGHTNING_LOGGER_UPDATE_INTERVAL__}!g" mempool-config.json + +# LND +sed -i "s!__LND_TLS_CERT_PATH__!${__LND_TLS_CERT_PATH__}!g" mempool-config.json +sed -i "s!__LND_MACAROON_PATH__!${__LND_MACAROON_PATH__}!g" mempool-config.json +sed -i "s!__LND_REST_API_URL__!${__LND_REST_API_URL__}!g" mempool-config.json + +# CLN +sed -i "s!__CLN_SOCKET__!${__CLN_SOCKET__}!g" mempool-config.json + node /backend/dist/index.js diff --git a/frontend/src/app/components/master-page/master-page.component.html b/frontend/src/app/components/master-page/master-page.component.html index 39acf122d..90bfa3828 100644 --- a/frontend/src/app/components/master-page/master-page.component.html +++ b/frontend/src/app/components/master-page/master-page.component.html @@ -41,7 +41,9 @@