Merge branch 'master' into nymkappa/feature/config-pools-json
This commit is contained in:
		
						commit
						8c21cc56d4
					
				@ -22,6 +22,8 @@ import poolsParser from './pools-parser';
 | 
			
		||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
 | 
			
		||||
import mining from './mining/mining';
 | 
			
		||||
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
 | 
			
		||||
import PricesRepository from '../repositories/PricesRepository';
 | 
			
		||||
import priceUpdater from '../tasks/price-updater';
 | 
			
		||||
 | 
			
		||||
class Blocks {
 | 
			
		||||
  private blocks: BlockExtended[] = [];
 | 
			
		||||
@ -457,6 +459,19 @@ class Blocks {
 | 
			
		||||
          }
 | 
			
		||||
          await blocksRepository.$saveBlockInDatabase(blockExtended);
 | 
			
		||||
 | 
			
		||||
          const lastestPriceId = await PricesRepository.$getLatestPriceId();
 | 
			
		||||
          if (priceUpdater.historyInserted === true && lastestPriceId !== null) {
 | 
			
		||||
            await blocksRepository.$saveBlockPrices([{
 | 
			
		||||
              height: blockExtended.height,
 | 
			
		||||
              priceId: lastestPriceId,
 | 
			
		||||
            }]);
 | 
			
		||||
          } else {
 | 
			
		||||
            logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`)
 | 
			
		||||
            setTimeout(() => {
 | 
			
		||||
              indexer.runSingleTask('blocksPrices');
 | 
			
		||||
            }, 10000);
 | 
			
		||||
          }
 | 
			
		||||
 | 
			
		||||
          // Save blocks summary for visualization if it's enabled
 | 
			
		||||
          if (Common.blocksSummariesIndexingEnabled() === true) {
 | 
			
		||||
            await this.$getStrippedBlockTransactions(blockExtended.id, true);
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,6 @@
 | 
			
		||||
import { CpfpInfo, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
 | 
			
		||||
import config from '../config';
 | 
			
		||||
import { convertChannelId } from './lightning/clightning/clightning-convert';
 | 
			
		||||
export class Common {
 | 
			
		||||
  static nativeAssetId = config.MEMPOOL.NETWORK === 'liquidtestnet' ?
 | 
			
		||||
    '144c654344aa716d6f3abcc1ca90e5641e4e2a7f633bc09fe3baf64585819a49'
 | 
			
		||||
@ -184,4 +185,37 @@ export class Common {
 | 
			
		||||
      config.MEMPOOL.BLOCKS_SUMMARIES_INDEXING === true
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  static setDateMidnight(date: Date): void {
 | 
			
		||||
    date.setUTCHours(0);
 | 
			
		||||
    date.setUTCMinutes(0);
 | 
			
		||||
    date.setUTCSeconds(0);
 | 
			
		||||
    date.setUTCMilliseconds(0);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  static channelShortIdToIntegerId(id: string): string {
 | 
			
		||||
    if (config.LIGHTNING.BACKEND === 'lnd') {
 | 
			
		||||
      return id;
 | 
			
		||||
    }
 | 
			
		||||
    return convertChannelId(id);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /** Decodes a channel id returned by lnd as uint64 to a short channel id */
 | 
			
		||||
  static channelIntegerIdToShortId(id: string): string {
 | 
			
		||||
    if (config.LIGHTNING.BACKEND === 'cln') {
 | 
			
		||||
      return id;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const n = BigInt(id);
 | 
			
		||||
    return [
 | 
			
		||||
      n >> 40n, // nth block
 | 
			
		||||
      (n >> 16n) & 0xffffffn, // nth tx of the block
 | 
			
		||||
      n & 0xffffn // nth output of the tx
 | 
			
		||||
    ].join('x');
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  static utcDateToMysql(date?: number): string {
 | 
			
		||||
    const d = new Date((date || 0) * 1000);
 | 
			
		||||
    return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -4,7 +4,7 @@ import logger from '../logger';
 | 
			
		||||
import { Common } from './common';
 | 
			
		||||
 | 
			
		||||
class DatabaseMigration {
 | 
			
		||||
  private static currentVersion = 35;
 | 
			
		||||
  private static currentVersion = 36;
 | 
			
		||||
  private queryTimeout = 120000;
 | 
			
		||||
  private statisticsAddedIndexed = false;
 | 
			
		||||
  private uniqueLogs: string[] = [];
 | 
			
		||||
@ -320,6 +320,10 @@ class DatabaseMigration {
 | 
			
		||||
      await this.$executeQuery('DELETE from `lightning_stats` WHERE added > "2021-09-19"');
 | 
			
		||||
      await this.$executeQuery('ALTER TABLE `lightning_stats` ADD CONSTRAINT added_unique UNIQUE (added);');
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (databaseSchemaVersion < 36 && isBitcoin == true) {
 | 
			
		||||
      await this.$executeQuery('ALTER TABLE `nodes` ADD status TINYINT NOT NULL DEFAULT "1"');
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,9 @@
 | 
			
		||||
import logger from '../../logger';
 | 
			
		||||
import DB from '../../database';
 | 
			
		||||
import nodesApi from './nodes.api';
 | 
			
		||||
import { ResultSetHeader } from 'mysql2';
 | 
			
		||||
import { ILightningApi } from '../lightning/lightning-api.interface';
 | 
			
		||||
import { Common } from '../common';
 | 
			
		||||
 | 
			
		||||
class ChannelsApi {
 | 
			
		||||
  public async $getAllChannels(): Promise<any[]> {
 | 
			
		||||
@ -302,6 +305,135 @@ class ChannelsApi {
 | 
			
		||||
      },
 | 
			
		||||
    };
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Save or update a channel present in the graph
 | 
			
		||||
   */
 | 
			
		||||
  public async $saveChannel(channel: ILightningApi.Channel): Promise<void> {
 | 
			
		||||
    const [ txid, vout ] = channel.chan_point.split(':');
 | 
			
		||||
 | 
			
		||||
    const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
 | 
			
		||||
    const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
 | 
			
		||||
 | 
			
		||||
    const query = `INSERT INTO channels
 | 
			
		||||
      (
 | 
			
		||||
        id,
 | 
			
		||||
        short_id,
 | 
			
		||||
        capacity,
 | 
			
		||||
        transaction_id,
 | 
			
		||||
        transaction_vout,
 | 
			
		||||
        updated_at,
 | 
			
		||||
        status,
 | 
			
		||||
        node1_public_key,
 | 
			
		||||
        node1_base_fee_mtokens,
 | 
			
		||||
        node1_cltv_delta,
 | 
			
		||||
        node1_fee_rate,
 | 
			
		||||
        node1_is_disabled,
 | 
			
		||||
        node1_max_htlc_mtokens,
 | 
			
		||||
        node1_min_htlc_mtokens,
 | 
			
		||||
        node1_updated_at,
 | 
			
		||||
        node2_public_key,
 | 
			
		||||
        node2_base_fee_mtokens,
 | 
			
		||||
        node2_cltv_delta,
 | 
			
		||||
        node2_fee_rate,
 | 
			
		||||
        node2_is_disabled,
 | 
			
		||||
        node2_max_htlc_mtokens,
 | 
			
		||||
        node2_min_htlc_mtokens,
 | 
			
		||||
        node2_updated_at
 | 
			
		||||
      )
 | 
			
		||||
      VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
 | 
			
		||||
      ON DUPLICATE KEY UPDATE
 | 
			
		||||
        capacity = ?,
 | 
			
		||||
        updated_at = ?,
 | 
			
		||||
        status = 1,
 | 
			
		||||
        node1_public_key = ?,
 | 
			
		||||
        node1_base_fee_mtokens = ?,
 | 
			
		||||
        node1_cltv_delta = ?,
 | 
			
		||||
        node1_fee_rate = ?,
 | 
			
		||||
        node1_is_disabled = ?,
 | 
			
		||||
        node1_max_htlc_mtokens = ?,
 | 
			
		||||
        node1_min_htlc_mtokens = ?,
 | 
			
		||||
        node1_updated_at = ?,
 | 
			
		||||
        node2_public_key = ?,
 | 
			
		||||
        node2_base_fee_mtokens = ?,
 | 
			
		||||
        node2_cltv_delta = ?,
 | 
			
		||||
        node2_fee_rate = ?,
 | 
			
		||||
        node2_is_disabled = ?,
 | 
			
		||||
        node2_max_htlc_mtokens = ?,
 | 
			
		||||
        node2_min_htlc_mtokens = ?,
 | 
			
		||||
        node2_updated_at = ?
 | 
			
		||||
      ;`;
 | 
			
		||||
 | 
			
		||||
    await DB.query(query, [
 | 
			
		||||
      Common.channelShortIdToIntegerId(channel.channel_id),
 | 
			
		||||
      Common.channelIntegerIdToShortId(channel.channel_id),
 | 
			
		||||
      channel.capacity,
 | 
			
		||||
      txid,
 | 
			
		||||
      vout,
 | 
			
		||||
      Common.utcDateToMysql(channel.last_update),
 | 
			
		||||
      channel.node1_pub,
 | 
			
		||||
      policy1.fee_base_msat,
 | 
			
		||||
      policy1.time_lock_delta,
 | 
			
		||||
      policy1.fee_rate_milli_msat,
 | 
			
		||||
      policy1.disabled,
 | 
			
		||||
      policy1.max_htlc_msat,
 | 
			
		||||
      policy1.min_htlc,
 | 
			
		||||
      Common.utcDateToMysql(policy1.last_update),
 | 
			
		||||
      channel.node2_pub,
 | 
			
		||||
      policy2.fee_base_msat,
 | 
			
		||||
      policy2.time_lock_delta,
 | 
			
		||||
      policy2.fee_rate_milli_msat,
 | 
			
		||||
      policy2.disabled,
 | 
			
		||||
      policy2.max_htlc_msat,
 | 
			
		||||
      policy2.min_htlc,
 | 
			
		||||
      Common.utcDateToMysql(policy2.last_update),
 | 
			
		||||
      channel.capacity,
 | 
			
		||||
      Common.utcDateToMysql(channel.last_update),
 | 
			
		||||
      channel.node1_pub,
 | 
			
		||||
      policy1.fee_base_msat,
 | 
			
		||||
      policy1.time_lock_delta,
 | 
			
		||||
      policy1.fee_rate_milli_msat,
 | 
			
		||||
      policy1.disabled,
 | 
			
		||||
      policy1.max_htlc_msat,
 | 
			
		||||
      policy1.min_htlc,
 | 
			
		||||
      Common.utcDateToMysql(policy1.last_update),
 | 
			
		||||
      channel.node2_pub,
 | 
			
		||||
      policy2.fee_base_msat,
 | 
			
		||||
      policy2.time_lock_delta,
 | 
			
		||||
      policy2.fee_rate_milli_msat,
 | 
			
		||||
      policy2.disabled,
 | 
			
		||||
      policy2.max_htlc_msat,
 | 
			
		||||
      policy2.min_htlc,
 | 
			
		||||
      Common.utcDateToMysql(policy2.last_update)
 | 
			
		||||
    ]);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Set all channels not in `graphChannelsIds` as inactive (status = 0)
 | 
			
		||||
   */
 | 
			
		||||
  public async $setChannelsInactive(graphChannelsIds: string[]): Promise<void> {
 | 
			
		||||
    if (graphChannelsIds.length === 0) {
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      const result = await DB.query<ResultSetHeader>(`
 | 
			
		||||
        UPDATE channels
 | 
			
		||||
        SET status = 0
 | 
			
		||||
        WHERE short_id NOT IN (
 | 
			
		||||
          ${graphChannelsIds.map(id => `"${id}"`).join(',')}
 | 
			
		||||
        )
 | 
			
		||||
        AND status != 2
 | 
			
		||||
      `);
 | 
			
		||||
      if (result[0].changedRows ?? 0 > 0) {
 | 
			
		||||
        logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
 | 
			
		||||
      } else {
 | 
			
		||||
        logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export default new ChannelsApi();
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,7 @@
 | 
			
		||||
import logger from '../../logger';
 | 
			
		||||
import DB from '../../database';
 | 
			
		||||
import { ResultSetHeader } from 'mysql2';
 | 
			
		||||
import { ILightningApi } from '../lightning/lightning-api.interface';
 | 
			
		||||
 | 
			
		||||
class NodesApi {
 | 
			
		||||
  public async $getNode(public_key: string): Promise<any> {
 | 
			
		||||
@ -321,6 +323,66 @@ class NodesApi {
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Save or update a node present in the graph
 | 
			
		||||
   */
 | 
			
		||||
  public async $saveNode(node: ILightningApi.Node): Promise<void> {
 | 
			
		||||
    try {
 | 
			
		||||
      const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
 | 
			
		||||
      const query = `INSERT INTO nodes(
 | 
			
		||||
          public_key,
 | 
			
		||||
          first_seen,
 | 
			
		||||
          updated_at,
 | 
			
		||||
          alias,
 | 
			
		||||
          color,
 | 
			
		||||
          sockets,
 | 
			
		||||
          status
 | 
			
		||||
        )
 | 
			
		||||
        VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, 1)
 | 
			
		||||
        ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, color = ?, sockets = ?, status = 1`;
 | 
			
		||||
 | 
			
		||||
      await DB.query(query, [
 | 
			
		||||
        node.pub_key,
 | 
			
		||||
        node.last_update,
 | 
			
		||||
        node.alias,
 | 
			
		||||
        node.color,
 | 
			
		||||
        sockets,
 | 
			
		||||
        node.last_update,
 | 
			
		||||
        node.alias,
 | 
			
		||||
        node.color,
 | 
			
		||||
        sockets,
 | 
			
		||||
      ]);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Set all nodes not in `nodesPubkeys` as inactive (status = 0)
 | 
			
		||||
   */
 | 
			
		||||
   public async $setNodesInactive(graphNodesPubkeys: string[]): Promise<void> {
 | 
			
		||||
    if (graphNodesPubkeys.length === 0) {
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      const result = await DB.query<ResultSetHeader>(`
 | 
			
		||||
        UPDATE nodes
 | 
			
		||||
        SET status = 0
 | 
			
		||||
        WHERE public_key NOT IN (
 | 
			
		||||
          ${graphNodesPubkeys.map(pubkey => `"${pubkey}"`).join(',')}
 | 
			
		||||
        )
 | 
			
		||||
      `);
 | 
			
		||||
      if (result[0].changedRows ?? 0 > 0) {
 | 
			
		||||
        logger.info(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
 | 
			
		||||
      } else {
 | 
			
		||||
        logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export default new NodesApi();
 | 
			
		||||
 | 
			
		||||
@ -473,7 +473,7 @@ class Mining {
 | 
			
		||||
 | 
			
		||||
      for (const block of blocksWithoutPrices) {
 | 
			
		||||
        // Quick optimisation, out mtgox feed only goes back to 2010-07-19 02:00:00, so skip the first 68951 blocks
 | 
			
		||||
        if (block.height < 68951) {
 | 
			
		||||
        if (['mainnet', 'testnet'].includes(config.MEMPOOL.NETWORK) && block.height < 68951) {
 | 
			
		||||
          blocksPrices.push({
 | 
			
		||||
            height: block.height,
 | 
			
		||||
            priceId: prices[0].id,
 | 
			
		||||
@ -492,11 +492,11 @@ class Mining {
 | 
			
		||||
 | 
			
		||||
        if (blocksPrices.length >= 100000) {
 | 
			
		||||
          totalInserted += blocksPrices.length;
 | 
			
		||||
          let logStr = `Linking ${blocksPrices.length} blocks to their closest price`;
 | 
			
		||||
          if (blocksWithoutPrices.length > 200000) {
 | 
			
		||||
            logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`);
 | 
			
		||||
          } else {
 | 
			
		||||
            logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price`);
 | 
			
		||||
            logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
 | 
			
		||||
          }
 | 
			
		||||
          logger.debug(logStr);
 | 
			
		||||
          await BlocksRepository.$saveBlockPrices(blocksPrices);
 | 
			
		||||
          blocksPrices.length = 0;
 | 
			
		||||
        }
 | 
			
		||||
@ -504,11 +504,11 @@ class Mining {
 | 
			
		||||
 | 
			
		||||
      if (blocksPrices.length > 0) {
 | 
			
		||||
        totalInserted += blocksPrices.length;
 | 
			
		||||
        let logStr = `Linking ${blocksPrices.length} blocks to their closest price`;
 | 
			
		||||
        if (blocksWithoutPrices.length > 200000) {
 | 
			
		||||
          logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`);
 | 
			
		||||
        } else {
 | 
			
		||||
          logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price`);
 | 
			
		||||
          logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
 | 
			
		||||
        }
 | 
			
		||||
        logger.debug(logStr);
 | 
			
		||||
        await BlocksRepository.$saveBlockPrices(blocksPrices);
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
 | 
			
		||||
@ -34,7 +34,8 @@ interface IConfig {
 | 
			
		||||
    ENABLED: boolean;
 | 
			
		||||
    BACKEND: 'lnd' | 'cln' | 'ldk';
 | 
			
		||||
    TOPOLOGY_FOLDER: string;
 | 
			
		||||
    NODE_STATS_REFRESH_INTERVAL: number;
 | 
			
		||||
    STATS_REFRESH_INTERVAL: number;
 | 
			
		||||
    GRAPH_REFRESH_INTERVAL: number;
 | 
			
		||||
  };
 | 
			
		||||
  LND: {
 | 
			
		||||
    TLS_CERT_PATH: string;
 | 
			
		||||
@ -188,7 +189,8 @@ const defaults: IConfig = {
 | 
			
		||||
    'ENABLED': false,
 | 
			
		||||
    'BACKEND': 'lnd',
 | 
			
		||||
    'TOPOLOGY_FOLDER': '',
 | 
			
		||||
    'NODE_STATS_REFRESH_INTERVAL': 600,
 | 
			
		||||
    'STATS_REFRESH_INTERVAL': 600,
 | 
			
		||||
    'GRAPH_REFRESH_INTERVAL': 600,
 | 
			
		||||
  },
 | 
			
		||||
  'LND': {
 | 
			
		||||
    'TLS_CERT_PATH': '',
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import config from './config';
 | 
			
		||||
import { createPool, Pool, PoolConnection } from 'mysql2/promise';
 | 
			
		||||
import logger from './logger';
 | 
			
		||||
import { PoolOptions } from 'mysql2/typings/mysql';
 | 
			
		||||
import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } from 'mysql2/typings/mysql';
 | 
			
		||||
 | 
			
		||||
 class DB {
 | 
			
		||||
  constructor() {
 | 
			
		||||
@ -28,7 +28,9 @@ import { PoolOptions } from 'mysql2/typings/mysql';
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public async query(query, params?) {
 | 
			
		||||
  public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
 | 
			
		||||
    OkPacket[] | ResultSetHeader>(query, params?): Promise<[T, FieldPacket[]]>
 | 
			
		||||
  {
 | 
			
		||||
    this.checkDBFlag();
 | 
			
		||||
    const pool = await this.getPool();
 | 
			
		||||
    return pool.query(query, params);
 | 
			
		||||
 | 
			
		||||
@ -6,13 +6,12 @@ import logger from './logger';
 | 
			
		||||
import HashratesRepository from './repositories/HashratesRepository';
 | 
			
		||||
import bitcoinClient from './api/bitcoin/bitcoin-client';
 | 
			
		||||
import priceUpdater from './tasks/price-updater';
 | 
			
		||||
import PricesRepository from './repositories/PricesRepository';
 | 
			
		||||
 | 
			
		||||
class Indexer {
 | 
			
		||||
  runIndexer = true;
 | 
			
		||||
  indexerRunning = false;
 | 
			
		||||
 | 
			
		||||
  constructor() {
 | 
			
		||||
  }
 | 
			
		||||
  tasksRunning: string[] = [];
 | 
			
		||||
 | 
			
		||||
  public reindex() {
 | 
			
		||||
    if (Common.indexingEnabled()) {
 | 
			
		||||
@ -20,6 +19,28 @@ class Indexer {
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public async runSingleTask(task: 'blocksPrices') {
 | 
			
		||||
    if (!Common.indexingEnabled()) {
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (task === 'blocksPrices' && !this.tasksRunning.includes(task)) {
 | 
			
		||||
      this.tasksRunning.push(task);
 | 
			
		||||
      const lastestPriceId = await PricesRepository.$getLatestPriceId();
 | 
			
		||||
      if (priceUpdater.historyInserted === false || lastestPriceId === null) {
 | 
			
		||||
        logger.debug(`Blocks prices indexer is waiting for the price updater to complete`)
 | 
			
		||||
        setTimeout(() => {
 | 
			
		||||
          this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
 | 
			
		||||
          this.runSingleTask('blocksPrices');
 | 
			
		||||
        }, 10000);
 | 
			
		||||
      } else {
 | 
			
		||||
        logger.debug(`Blocks prices indexer will run now`)
 | 
			
		||||
        await mining.$indexBlockPrices();
 | 
			
		||||
        this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public async $run() {
 | 
			
		||||
    if (!Common.indexingEnabled() || this.runIndexer === false ||
 | 
			
		||||
      this.indexerRunning === true || mempool.hasPriority()
 | 
			
		||||
@ -50,7 +71,7 @@ class Indexer {
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      await mining.$indexBlockPrices();
 | 
			
		||||
      this.runSingleTask('blocksPrices');
 | 
			
		||||
      await mining.$indexDifficultyAdjustments();
 | 
			
		||||
      await this.$resetHashratesIndexingState(); // TODO - Remove this as it's not efficient
 | 
			
		||||
      await mining.$generateNetworkHashrateHistory();
 | 
			
		||||
 | 
			
		||||
@ -27,6 +27,11 @@ class PricesRepository {
 | 
			
		||||
    return oldestRow[0] ? oldestRow[0].time : 0;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public async $getLatestPriceId(): Promise<number | null> {
 | 
			
		||||
    const [oldestRow] = await DB.query(`SELECT id from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
 | 
			
		||||
    return oldestRow[0] ? oldestRow[0].id : null;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public async $getLatestPriceTime(): Promise<number> {
 | 
			
		||||
    const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
 | 
			
		||||
    return oldestRow[0] ? oldestRow[0].time : 0;
 | 
			
		||||
 | 
			
		||||
@ -1,60 +1,43 @@
 | 
			
		||||
import DB from '../../database';
 | 
			
		||||
import logger from '../../logger';
 | 
			
		||||
import channelsApi from '../../api/explorer/channels.api';
 | 
			
		||||
import bitcoinClient from '../../api/bitcoin/bitcoin-client';
 | 
			
		||||
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
 | 
			
		||||
import config from '../../config';
 | 
			
		||||
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
 | 
			
		||||
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
 | 
			
		||||
import { $lookupNodeLocation } from './sync-tasks/node-locations';
 | 
			
		||||
import lightningApi from '../../api/lightning/lightning-api-factory';
 | 
			
		||||
import { convertChannelId } from '../../api/lightning/clightning/clightning-convert';
 | 
			
		||||
import { Common } from '../../api/common';
 | 
			
		||||
import nodesApi from '../../api/explorer/nodes.api';
 | 
			
		||||
import { ResultSetHeader } from 'mysql2';
 | 
			
		||||
import fundingTxFetcher from './sync-tasks/funding-tx-fetcher';
 | 
			
		||||
 | 
			
		||||
class NetworkSyncService {
 | 
			
		||||
  loggerTimer = 0;
 | 
			
		||||
 | 
			
		||||
  constructor() {}
 | 
			
		||||
 | 
			
		||||
  public async $startService() {
 | 
			
		||||
    logger.info('Starting node sync service');
 | 
			
		||||
  public async $startService(): Promise<void> {
 | 
			
		||||
    logger.info('Starting lightning network sync service');
 | 
			
		||||
 | 
			
		||||
    await this.$runUpdater();
 | 
			
		||||
    this.loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
 | 
			
		||||
    setInterval(async () => {
 | 
			
		||||
      await this.$runUpdater();
 | 
			
		||||
    }, 1000 * 60 * 60);
 | 
			
		||||
    await this.$runTasks();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private async $runUpdater(): Promise<void> {
 | 
			
		||||
  private async $runTasks(): Promise<void> {
 | 
			
		||||
    try {
 | 
			
		||||
      logger.info(`Updating nodes and channels...`);
 | 
			
		||||
      logger.info(`Updating nodes and channels`);
 | 
			
		||||
 | 
			
		||||
      const networkGraph = await lightningApi.$getNetworkGraph();
 | 
			
		||||
      if (networkGraph.nodes.length === 0 || networkGraph.edges.length === 0) {
 | 
			
		||||
        logger.info(`LN Network graph is empty, retrying in 10 seconds`);
 | 
			
		||||
        await Common.sleep$(10000);
 | 
			
		||||
        this.$runUpdater();
 | 
			
		||||
        setTimeout(() => { this.$runTasks(); }, 10000);
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      for (const node of networkGraph.nodes) {
 | 
			
		||||
        await this.$saveNode(node);
 | 
			
		||||
      }
 | 
			
		||||
      logger.info(`Nodes updated.`);
 | 
			
		||||
 | 
			
		||||
      if (config.MAXMIND.ENABLED) {
 | 
			
		||||
        await $lookupNodeLocation();
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      const graphChannelsIds: string[] = [];
 | 
			
		||||
      for (const channel of networkGraph.edges) {
 | 
			
		||||
        await this.$saveChannel(channel);
 | 
			
		||||
        graphChannelsIds.push(channel.channel_id);
 | 
			
		||||
      }
 | 
			
		||||
      await this.$setChannelsInactive(graphChannelsIds);
 | 
			
		||||
 | 
			
		||||
      logger.info(`Channels updated.`);
 | 
			
		||||
 | 
			
		||||
      await this.$findInactiveNodesAndChannels();
 | 
			
		||||
      await this.$updateNodesList(networkGraph.nodes);
 | 
			
		||||
      await this.$updateChannelsList(networkGraph.edges);
 | 
			
		||||
      await this.$deactivateChannelsWithoutActiveNodes();
 | 
			
		||||
      await this.$lookUpCreationDateFromChain();
 | 
			
		||||
      await this.$updateNodeFirstSeen();
 | 
			
		||||
      await this.$scanForClosedChannels();
 | 
			
		||||
@ -63,84 +46,183 @@ class NetworkSyncService {
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$runUpdater() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('$runTasks() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Update the `nodes` table to reflect the current network graph state
 | 
			
		||||
   */
 | 
			
		||||
  private async $updateNodesList(nodes: ILightningApi.Node[]): Promise<void> {
 | 
			
		||||
    let progress = 0;
 | 
			
		||||
 | 
			
		||||
    const graphNodesPubkeys: string[] = [];
 | 
			
		||||
    for (const node of nodes) {
 | 
			
		||||
      await nodesApi.$saveNode(node);
 | 
			
		||||
      graphNodesPubkeys.push(node.pub_key);
 | 
			
		||||
      ++progress;
 | 
			
		||||
 | 
			
		||||
      const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
 | 
			
		||||
      if (elapsedSeconds > 10) {
 | 
			
		||||
        logger.info(`Updating node ${progress}/${nodes.length}`);
 | 
			
		||||
        this.loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    logger.info(`${progress} nodes updated`);
 | 
			
		||||
 | 
			
		||||
    // If a channel if not present in the graph, mark it as inactive
 | 
			
		||||
    nodesApi.$setNodesInactive(graphNodesPubkeys);
 | 
			
		||||
 | 
			
		||||
    if (config.MAXMIND.ENABLED) {
 | 
			
		||||
      $lookupNodeLocation();
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Update the `channels` table to reflect the current network graph state
 | 
			
		||||
   */
 | 
			
		||||
  private async $updateChannelsList(channels: ILightningApi.Channel[]): Promise<void> {
 | 
			
		||||
    try {
 | 
			
		||||
      let progress = 0;
 | 
			
		||||
 | 
			
		||||
      const graphChannelsIds: string[] = [];
 | 
			
		||||
      for (const channel of channels) {
 | 
			
		||||
        await channelsApi.$saveChannel(channel);
 | 
			
		||||
        graphChannelsIds.push(channel.channel_id);
 | 
			
		||||
        ++progress;
 | 
			
		||||
 | 
			
		||||
        const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
 | 
			
		||||
        if (elapsedSeconds > 10) {
 | 
			
		||||
          logger.info(`Updating channel ${progress}/${channels.length}`);
 | 
			
		||||
          this.loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      logger.info(`${progress} channels updated`);
 | 
			
		||||
 | 
			
		||||
      // If a channel if not present in the graph, mark it as inactive
 | 
			
		||||
      channelsApi.$setChannelsInactive(graphChannelsIds);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err(`Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.STATS_REFRESH_INTERVAL);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // This method look up the creation date of the earliest channel of the node
 | 
			
		||||
  // and update the node to that date in order to get the earliest first seen date
 | 
			
		||||
  private async $updateNodeFirstSeen() {
 | 
			
		||||
  private async $updateNodeFirstSeen(): Promise<void> {
 | 
			
		||||
    let progress = 0;
 | 
			
		||||
    let updated = 0;
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      const [nodes]: any[] = await DB.query(`SELECT nodes.public_key, UNIX_TIMESTAMP(nodes.first_seen) AS first_seen, (SELECT UNIX_TIMESTAMP(created) FROM channels WHERE channels.node1_public_key = nodes.public_key ORDER BY created ASC LIMIT 1) AS created1, (SELECT UNIX_TIMESTAMP(created) FROM channels WHERE channels.node2_public_key = nodes.public_key ORDER BY created ASC LIMIT 1) AS created2 FROM nodes`);
 | 
			
		||||
      const [nodes]: any[] = await DB.query(`
 | 
			
		||||
        SELECT nodes.public_key, UNIX_TIMESTAMP(nodes.first_seen) AS first_seen,
 | 
			
		||||
        (
 | 
			
		||||
          SELECT MIN(UNIX_TIMESTAMP(created))
 | 
			
		||||
          FROM channels
 | 
			
		||||
          WHERE channels.node1_public_key = nodes.public_key
 | 
			
		||||
        ) AS created1,
 | 
			
		||||
        (
 | 
			
		||||
          SELECT MIN(UNIX_TIMESTAMP(created))
 | 
			
		||||
          FROM channels
 | 
			
		||||
          WHERE channels.node2_public_key = nodes.public_key
 | 
			
		||||
        ) AS created2
 | 
			
		||||
        FROM nodes
 | 
			
		||||
      `);
 | 
			
		||||
 | 
			
		||||
      for (const node of nodes) {
 | 
			
		||||
        let lowest = 0;
 | 
			
		||||
        if (node.created1) {
 | 
			
		||||
          if (node.created2 && node.created2 < node.created1) {
 | 
			
		||||
            lowest = node.created2;
 | 
			
		||||
          } else {
 | 
			
		||||
            lowest = node.created1;
 | 
			
		||||
          }
 | 
			
		||||
        } else if (node.created2) {
 | 
			
		||||
          lowest = node.created2;
 | 
			
		||||
        }
 | 
			
		||||
        if (lowest && lowest < node.first_seen) {
 | 
			
		||||
        const lowest = Math.min(
 | 
			
		||||
          node.created1 ?? Number.MAX_SAFE_INTEGER,
 | 
			
		||||
          node.created2 ?? Number.MAX_SAFE_INTEGER,
 | 
			
		||||
          node.first_seen ?? Number.MAX_SAFE_INTEGER
 | 
			
		||||
        );
 | 
			
		||||
        if (lowest < node.first_seen) {
 | 
			
		||||
          const query = `UPDATE nodes SET first_seen = FROM_UNIXTIME(?) WHERE public_key = ?`;
 | 
			
		||||
          const params = [lowest, node.public_key];
 | 
			
		||||
          await DB.query(query, params);
 | 
			
		||||
        }
 | 
			
		||||
        ++progress;
 | 
			
		||||
        const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
 | 
			
		||||
        if (elapsedSeconds > 10) {
 | 
			
		||||
          logger.info(`Updating node first seen date ${progress}/${nodes.length}`);
 | 
			
		||||
          this.loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
          ++updated;
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      logger.info(`Node first seen dates scan complete.`);
 | 
			
		||||
      logger.info(`Updated ${updated} node first seen dates`);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$updateNodeFirstSeen() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private async $lookUpCreationDateFromChain() {
 | 
			
		||||
    logger.info(`Running channel creation date lookup...`);
 | 
			
		||||
  private async $lookUpCreationDateFromChain(): Promise<void> {
 | 
			
		||||
    let progress = 0;
 | 
			
		||||
 | 
			
		||||
    logger.info(`Running channel creation date lookup`);
 | 
			
		||||
    try {
 | 
			
		||||
      const channels = await channelsApi.$getChannelsWithoutCreatedDate();
 | 
			
		||||
      for (const channel of channels) {
 | 
			
		||||
        const transaction = await bitcoinClient.getRawTransaction(channel.transaction_id, 1);
 | 
			
		||||
        await DB.query(`UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.id = ?`, [transaction.blocktime, channel.id]);
 | 
			
		||||
        const transaction = await fundingTxFetcher.$fetchChannelOpenTx(channel.short_id);
 | 
			
		||||
        await DB.query(`
 | 
			
		||||
          UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.id = ?`,
 | 
			
		||||
          [transaction.timestamp, channel.id]
 | 
			
		||||
        );
 | 
			
		||||
        ++progress;
 | 
			
		||||
        const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
 | 
			
		||||
        if (elapsedSeconds > 10) {
 | 
			
		||||
          logger.info(`Updating channel creation date ${progress}/${channels.length}`);
 | 
			
		||||
          this.loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      logger.info(`Channel creation dates scan complete.`);
 | 
			
		||||
      logger.info(`Updated ${channels.length} channels' creation date`);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$setCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('$lookUpCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Looking for channels whos nodes are inactive
 | 
			
		||||
  private async $findInactiveNodesAndChannels(): Promise<void> {
 | 
			
		||||
    logger.info(`Running inactive channels scan...`);
 | 
			
		||||
  /**
 | 
			
		||||
   * If a channel does not have any active node linked to it, then also
 | 
			
		||||
   * mark that channel as inactive
 | 
			
		||||
   */
 | 
			
		||||
  private async $deactivateChannelsWithoutActiveNodes(): Promise<void> {
 | 
			
		||||
    logger.info(`Find channels which nodes are offline`);
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      const [channels]: [{ id: string }[]] = await <any>DB.query(`
 | 
			
		||||
        SELECT channels.id
 | 
			
		||||
        FROM channels
 | 
			
		||||
      const result = await DB.query<ResultSetHeader>(`
 | 
			
		||||
        UPDATE channels
 | 
			
		||||
        SET status = 0
 | 
			
		||||
        WHERE channels.status = 1
 | 
			
		||||
        AND (
 | 
			
		||||
          (
 | 
			
		||||
            SELECT COUNT(*)
 | 
			
		||||
            FROM nodes
 | 
			
		||||
            WHERE nodes.public_key = channels.node1_public_key
 | 
			
		||||
            AND nodes.status = 1
 | 
			
		||||
          ) = 0
 | 
			
		||||
        OR (
 | 
			
		||||
            SELECT COUNT(*)
 | 
			
		||||
            FROM nodes
 | 
			
		||||
            WHERE nodes.public_key = channels.node2_public_key
 | 
			
		||||
            AND nodes.status = 1
 | 
			
		||||
          ) = 0)
 | 
			
		||||
        `);
 | 
			
		||||
 | 
			
		||||
      for (const channel of channels) {
 | 
			
		||||
        await this.$updateChannelStatus(channel.id, 0);
 | 
			
		||||
      if (result[0].changedRows ?? 0 > 0) {
 | 
			
		||||
        logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
 | 
			
		||||
      } else {
 | 
			
		||||
        logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
 | 
			
		||||
      }
 | 
			
		||||
      logger.info(`Inactive channels scan complete.`);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$findInactiveNodesAndChannels() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('$deactivateChannelsWithoutActiveNodes() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private async $scanForClosedChannels(): Promise<void> {
 | 
			
		||||
    let progress = 0;
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      logger.info(`Starting closed channels scan...`);
 | 
			
		||||
      const channels = await channelsApi.$getChannelsByStatus(0);
 | 
			
		||||
@ -154,6 +236,13 @@ class NetworkSyncService {
 | 
			
		||||
            await DB.query(`UPDATE channels SET closing_transaction_id = ? WHERE id = ?`, [spendingTx.txid, channel.id]);
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        ++progress;
 | 
			
		||||
        const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
 | 
			
		||||
        if (elapsedSeconds > 10) {
 | 
			
		||||
          logger.info(`Checking if channel has been closed ${progress}/${channels.length}`);
 | 
			
		||||
          this.loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      logger.info(`Closed channels scan complete.`);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
@ -171,6 +260,9 @@ class NetworkSyncService {
 | 
			
		||||
    if (!config.ESPLORA.REST_API_URL) {
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let progress = 0;
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      logger.info(`Started running closed channel forensics...`);
 | 
			
		||||
      const channels = await channelsApi.$getClosedChannelsWithoutReason();
 | 
			
		||||
@ -216,6 +308,13 @@ class NetworkSyncService {
 | 
			
		||||
          logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
 | 
			
		||||
          await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        ++progress;
 | 
			
		||||
        const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
 | 
			
		||||
        if (elapsedSeconds > 10) {
 | 
			
		||||
          logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
 | 
			
		||||
          this.loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      logger.info(`Closed channels forensics scan complete.`);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
@ -270,195 +369,6 @@ class NetworkSyncService {
 | 
			
		||||
      }
 | 
			
		||||
      return 1;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private async $saveChannel(channel: ILightningApi.Channel): Promise<void> {
 | 
			
		||||
    const [ txid, vout ] = channel.chan_point.split(':');
 | 
			
		||||
 | 
			
		||||
    const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
 | 
			
		||||
    const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      const query = `INSERT INTO channels
 | 
			
		||||
        (
 | 
			
		||||
          id,
 | 
			
		||||
          short_id,
 | 
			
		||||
          capacity,
 | 
			
		||||
          transaction_id,
 | 
			
		||||
          transaction_vout,
 | 
			
		||||
          updated_at,
 | 
			
		||||
          status,
 | 
			
		||||
          node1_public_key,
 | 
			
		||||
          node1_base_fee_mtokens,
 | 
			
		||||
          node1_cltv_delta,
 | 
			
		||||
          node1_fee_rate,
 | 
			
		||||
          node1_is_disabled,
 | 
			
		||||
          node1_max_htlc_mtokens,
 | 
			
		||||
          node1_min_htlc_mtokens,
 | 
			
		||||
          node1_updated_at,
 | 
			
		||||
          node2_public_key,
 | 
			
		||||
          node2_base_fee_mtokens,
 | 
			
		||||
          node2_cltv_delta,
 | 
			
		||||
          node2_fee_rate,
 | 
			
		||||
          node2_is_disabled,
 | 
			
		||||
          node2_max_htlc_mtokens,
 | 
			
		||||
          node2_min_htlc_mtokens,
 | 
			
		||||
          node2_updated_at
 | 
			
		||||
        )
 | 
			
		||||
        VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
 | 
			
		||||
        ON DUPLICATE KEY UPDATE
 | 
			
		||||
          capacity = ?,
 | 
			
		||||
          updated_at = ?,
 | 
			
		||||
          status = 1,
 | 
			
		||||
          node1_public_key = ?,
 | 
			
		||||
          node1_base_fee_mtokens = ?,
 | 
			
		||||
          node1_cltv_delta = ?,
 | 
			
		||||
          node1_fee_rate = ?,
 | 
			
		||||
          node1_is_disabled = ?,
 | 
			
		||||
          node1_max_htlc_mtokens = ?,
 | 
			
		||||
          node1_min_htlc_mtokens = ?,
 | 
			
		||||
          node1_updated_at = ?,
 | 
			
		||||
          node2_public_key = ?,
 | 
			
		||||
          node2_base_fee_mtokens = ?,
 | 
			
		||||
          node2_cltv_delta = ?,
 | 
			
		||||
          node2_fee_rate = ?,
 | 
			
		||||
          node2_is_disabled = ?,
 | 
			
		||||
          node2_max_htlc_mtokens = ?,
 | 
			
		||||
          node2_min_htlc_mtokens = ?,
 | 
			
		||||
          node2_updated_at = ?
 | 
			
		||||
        ;`;
 | 
			
		||||
 | 
			
		||||
      await DB.query(query, [
 | 
			
		||||
        this.toIntegerId(channel.channel_id),
 | 
			
		||||
        this.toShortId(channel.channel_id),
 | 
			
		||||
        channel.capacity,
 | 
			
		||||
        txid,
 | 
			
		||||
        vout,
 | 
			
		||||
        this.utcDateToMysql(channel.last_update),
 | 
			
		||||
        channel.node1_pub,
 | 
			
		||||
        policy1.fee_base_msat,
 | 
			
		||||
        policy1.time_lock_delta,
 | 
			
		||||
        policy1.fee_rate_milli_msat,
 | 
			
		||||
        policy1.disabled,
 | 
			
		||||
        policy1.max_htlc_msat,
 | 
			
		||||
        policy1.min_htlc,
 | 
			
		||||
        this.utcDateToMysql(policy1.last_update),
 | 
			
		||||
        channel.node2_pub,
 | 
			
		||||
        policy2.fee_base_msat,
 | 
			
		||||
        policy2.time_lock_delta,
 | 
			
		||||
        policy2.fee_rate_milli_msat,
 | 
			
		||||
        policy2.disabled,
 | 
			
		||||
        policy2.max_htlc_msat,
 | 
			
		||||
        policy2.min_htlc,
 | 
			
		||||
        this.utcDateToMysql(policy2.last_update),
 | 
			
		||||
        channel.capacity,
 | 
			
		||||
        this.utcDateToMysql(channel.last_update),
 | 
			
		||||
        channel.node1_pub,
 | 
			
		||||
        policy1.fee_base_msat,
 | 
			
		||||
        policy1.time_lock_delta,
 | 
			
		||||
        policy1.fee_rate_milli_msat,
 | 
			
		||||
        policy1.disabled,
 | 
			
		||||
        policy1.max_htlc_msat,
 | 
			
		||||
        policy1.min_htlc,
 | 
			
		||||
        this.utcDateToMysql(policy1.last_update),
 | 
			
		||||
        channel.node2_pub,
 | 
			
		||||
        policy2.fee_base_msat,
 | 
			
		||||
        policy2.time_lock_delta,
 | 
			
		||||
        policy2.fee_rate_milli_msat,
 | 
			
		||||
        policy2.disabled,
 | 
			
		||||
        policy2.max_htlc_msat,
 | 
			
		||||
        policy2.min_htlc,
 | 
			
		||||
        this.utcDateToMysql(policy2.last_update)
 | 
			
		||||
      ]);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$saveChannel() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private async $updateChannelStatus(channelId: string, status: number): Promise<void> {
 | 
			
		||||
    try {
 | 
			
		||||
      await DB.query(`UPDATE channels SET status = ? WHERE id = ?`, [status, channelId]);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$updateChannelStatus() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private async $setChannelsInactive(graphChannelsIds: string[]): Promise<void> {
 | 
			
		||||
    if (graphChannelsIds.length === 0) {
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      await DB.query(`
 | 
			
		||||
        UPDATE channels
 | 
			
		||||
        SET status = 0
 | 
			
		||||
        WHERE short_id NOT IN (
 | 
			
		||||
          ${graphChannelsIds.map(id => `"${id}"`).join(',')}
 | 
			
		||||
        )
 | 
			
		||||
        AND status != 2
 | 
			
		||||
      `);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private async $saveNode(node: ILightningApi.Node): Promise<void> {
 | 
			
		||||
    try {
 | 
			
		||||
      const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
 | 
			
		||||
      const query = `INSERT INTO nodes(
 | 
			
		||||
          public_key,
 | 
			
		||||
          first_seen,
 | 
			
		||||
          updated_at,
 | 
			
		||||
          alias,
 | 
			
		||||
          color,
 | 
			
		||||
          sockets
 | 
			
		||||
        )
 | 
			
		||||
        VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?)
 | 
			
		||||
        ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, color = ?, sockets = ?`;
 | 
			
		||||
 | 
			
		||||
      await DB.query(query, [
 | 
			
		||||
        node.pub_key,
 | 
			
		||||
        node.last_update,
 | 
			
		||||
        node.alias,
 | 
			
		||||
        node.color,
 | 
			
		||||
        sockets,
 | 
			
		||||
        node.last_update,
 | 
			
		||||
        node.alias,
 | 
			
		||||
        node.color,
 | 
			
		||||
        sockets,
 | 
			
		||||
      ]);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private toIntegerId(id: string): string {
 | 
			
		||||
    if (config.LIGHTNING.BACKEND === 'cln') {
 | 
			
		||||
      return convertChannelId(id);
 | 
			
		||||
    }
 | 
			
		||||
    else if (config.LIGHTNING.BACKEND === 'lnd') {
 | 
			
		||||
      return id;
 | 
			
		||||
    }
 | 
			
		||||
    return '';
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /** Decodes a channel id returned by lnd as uint64 to a short channel id */
 | 
			
		||||
  private toShortId(id: string): string {
 | 
			
		||||
    if (config.LIGHTNING.BACKEND === 'cln') {
 | 
			
		||||
      return id;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const n = BigInt(id);
 | 
			
		||||
    return [
 | 
			
		||||
      n >> 40n, // nth block
 | 
			
		||||
      (n >> 16n) & 0xffffffn, // nth tx of the block
 | 
			
		||||
      n & 0xffffn // nth output of the tx
 | 
			
		||||
    ].join('x');
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private utcDateToMysql(date?: number): string {
 | 
			
		||||
    const d = new Date((date || 0) * 1000);
 | 
			
		||||
    return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export default new NetworkSyncService();
 | 
			
		||||
 | 
			
		||||
@ -1,8 +1,8 @@
 | 
			
		||||
import DB from '../../database';
 | 
			
		||||
import logger from '../../logger';
 | 
			
		||||
import lightningApi from '../../api/lightning/lightning-api-factory';
 | 
			
		||||
import LightningStatsImporter from './sync-tasks/stats-importer';
 | 
			
		||||
import config from '../../config';
 | 
			
		||||
import { Common } from '../../api/common';
 | 
			
		||||
 | 
			
		||||
class LightningStatsUpdater {
 | 
			
		||||
  public async $startService(): Promise<void> {
 | 
			
		||||
@ -12,31 +12,22 @@ class LightningStatsUpdater {
 | 
			
		||||
    LightningStatsImporter.$run();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private setDateMidnight(date: Date): void {
 | 
			
		||||
    date.setUTCHours(0);
 | 
			
		||||
    date.setUTCMinutes(0);
 | 
			
		||||
    date.setUTCSeconds(0);
 | 
			
		||||
    date.setUTCMilliseconds(0);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private async $runTasks(): Promise<void> {
 | 
			
		||||
    await this.$logStatsDaily();
 | 
			
		||||
 | 
			
		||||
    setTimeout(() => {
 | 
			
		||||
      this.$runTasks();
 | 
			
		||||
    }, 1000 * config.LIGHTNING.NODE_STATS_REFRESH_INTERVAL);
 | 
			
		||||
    setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.STATS_REFRESH_INTERVAL);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Update the latest entry for each node every config.LIGHTNING.NODE_STATS_REFRESH_INTERVAL seconds
 | 
			
		||||
   * Update the latest entry for each node every config.LIGHTNING.STATS_REFRESH_INTERVAL seconds
 | 
			
		||||
   */
 | 
			
		||||
  private async $logStatsDaily(): Promise<void> {
 | 
			
		||||
    const date = new Date();
 | 
			
		||||
    this.setDateMidnight(date);
 | 
			
		||||
 | 
			
		||||
    logger.info(`Updating latest networks stats`);
 | 
			
		||||
    Common.setDateMidnight(date);
 | 
			
		||||
    const networkGraph = await lightningApi.$getNetworkGraph();
 | 
			
		||||
    LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
 | 
			
		||||
    
 | 
			
		||||
    logger.info(`Updated latest network stats`);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,6 @@
 | 
			
		||||
import { existsSync, promises } from 'fs';
 | 
			
		||||
import bitcoinClient from '../../../api/bitcoin/bitcoin-client';
 | 
			
		||||
import { Common } from '../../../api/common';
 | 
			
		||||
import config from '../../../config';
 | 
			
		||||
import logger from '../../../logger';
 | 
			
		||||
 | 
			
		||||
@ -69,7 +70,11 @@ class FundingTxFetcher {
 | 
			
		||||
    this.running = false;
 | 
			
		||||
  }
 | 
			
		||||
  
 | 
			
		||||
  public async $fetchChannelOpenTx(channelId: string): Promise<any> {
 | 
			
		||||
  public async $fetchChannelOpenTx(channelId: string): Promise<{timestamp: number, txid: string, value: number}> {
 | 
			
		||||
    if (channelId.indexOf('x') === -1) {
 | 
			
		||||
      channelId = Common.channelIntegerIdToShortId(channelId);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (this.fundingTxCache[channelId]) {
 | 
			
		||||
      return this.fundingTxCache[channelId];
 | 
			
		||||
    }
 | 
			
		||||
@ -110,4 +115,4 @@ class FundingTxFetcher {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export default new FundingTxFetcher;
 | 
			
		||||
export default new FundingTxFetcher;
 | 
			
		||||
 | 
			
		||||
@ -6,7 +6,10 @@ import DB from '../../../database';
 | 
			
		||||
import logger from '../../../logger';
 | 
			
		||||
 | 
			
		||||
export async function $lookupNodeLocation(): Promise<void> {
 | 
			
		||||
  logger.info(`Running node location updater using Maxmind...`);
 | 
			
		||||
  let loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
  let progress = 0;
 | 
			
		||||
 | 
			
		||||
  logger.info(`Running node location updater using Maxmind`);
 | 
			
		||||
  try {
 | 
			
		||||
    const nodes = await nodesApi.$getAllNodes();
 | 
			
		||||
    const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
 | 
			
		||||
@ -18,21 +21,24 @@ export async function $lookupNodeLocation(): Promise<void> {
 | 
			
		||||
      for (const socket of sockets) {
 | 
			
		||||
        const ip = socket.substring(0, socket.lastIndexOf(':')).replace('[', '').replace(']', '');
 | 
			
		||||
        const hasClearnet = [4, 6].includes(net.isIP(ip));
 | 
			
		||||
 | 
			
		||||
        if (hasClearnet && ip !== '127.0.1.1' && ip !== '127.0.0.1') {
 | 
			
		||||
          const city = lookupCity.get(ip);
 | 
			
		||||
          const asn = lookupAsn.get(ip);
 | 
			
		||||
          const isp = lookupIsp.get(ip);
 | 
			
		||||
 | 
			
		||||
          if (city && (asn || isp)) {
 | 
			
		||||
            const query = `UPDATE nodes SET 
 | 
			
		||||
              as_number = ?, 
 | 
			
		||||
              city_id = ?, 
 | 
			
		||||
              country_id = ?, 
 | 
			
		||||
              subdivision_id = ?, 
 | 
			
		||||
              longitude = ?, 
 | 
			
		||||
              latitude = ?, 
 | 
			
		||||
              accuracy_radius = ?
 | 
			
		||||
            WHERE public_key = ?`;
 | 
			
		||||
            const query = `
 | 
			
		||||
              UPDATE nodes SET 
 | 
			
		||||
                as_number = ?, 
 | 
			
		||||
                city_id = ?, 
 | 
			
		||||
                country_id = ?, 
 | 
			
		||||
                subdivision_id = ?, 
 | 
			
		||||
                longitude = ?, 
 | 
			
		||||
                latitude = ?, 
 | 
			
		||||
                accuracy_radius = ?
 | 
			
		||||
              WHERE public_key = ?
 | 
			
		||||
            `;
 | 
			
		||||
 | 
			
		||||
            const params = [
 | 
			
		||||
              isp?.autonomous_system_number ?? asn?.autonomous_system_number,
 | 
			
		||||
@ -46,25 +52,25 @@ export async function $lookupNodeLocation(): Promise<void> {
 | 
			
		||||
            ];
 | 
			
		||||
            await DB.query(query, params);
 | 
			
		||||
 | 
			
		||||
             // Store Continent
 | 
			
		||||
             if (city.continent?.geoname_id) {
 | 
			
		||||
               await DB.query(
 | 
			
		||||
            // Store Continent
 | 
			
		||||
            if (city.continent?.geoname_id) {
 | 
			
		||||
              await DB.query(
 | 
			
		||||
                `INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'continent', ?)`,
 | 
			
		||||
                [city.continent?.geoname_id, JSON.stringify(city.continent?.names)]);
 | 
			
		||||
             }
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
             // Store Country
 | 
			
		||||
             if (city.country?.geoname_id) {
 | 
			
		||||
               await DB.query(
 | 
			
		||||
            // Store Country
 | 
			
		||||
            if (city.country?.geoname_id) {
 | 
			
		||||
              await DB.query(
 | 
			
		||||
                `INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country', ?)`,
 | 
			
		||||
                [city.country?.geoname_id, JSON.stringify(city.country?.names)]);
 | 
			
		||||
             }
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            // Store Country ISO code
 | 
			
		||||
            if (city.country?.iso_code) {
 | 
			
		||||
              await DB.query(
 | 
			
		||||
               `INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
 | 
			
		||||
               [city.country?.geoname_id, city.country?.iso_code]);
 | 
			
		||||
                `INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
 | 
			
		||||
                [city.country?.geoname_id, city.country?.iso_code]);
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            // Store Division
 | 
			
		||||
@ -88,10 +94,17 @@ export async function $lookupNodeLocation(): Promise<void> {
 | 
			
		||||
                [isp?.autonomous_system_number ?? asn?.autonomous_system_number, JSON.stringify(isp?.isp ?? asn?.autonomous_system_organization)]);
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
 | 
			
		||||
          ++progress;
 | 
			
		||||
          const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
 | 
			
		||||
          if (elapsedSeconds > 10) {
 | 
			
		||||
            logger.info(`Updating node location data ${progress}/${nodes.length}`);
 | 
			
		||||
            loggerTimer = new Date().getTime() / 1000;
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    logger.info(`Node location data updated.`);
 | 
			
		||||
    logger.info(`${progress} nodes location data updated`);
 | 
			
		||||
  } catch (e) {
 | 
			
		||||
    logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -128,32 +128,32 @@ class LightningStatsImporter {
 | 
			
		||||
      if (channel.node1_policy !== undefined) { // Coming from the node
 | 
			
		||||
        for (const policy of [channel.node1_policy, channel.node2_policy]) {
 | 
			
		||||
          if (policy && policy.fee_rate_milli_msat < 5000) {
 | 
			
		||||
            avgFeeRate += policy.fee_rate_milli_msat;
 | 
			
		||||
            feeRates.push(policy.fee_rate_milli_msat);
 | 
			
		||||
            avgFeeRate += parseInt(policy.fee_rate_milli_msat, 10);
 | 
			
		||||
            feeRates.push(parseInt(policy.fee_rate_milli_msat, 10));
 | 
			
		||||
          }  
 | 
			
		||||
          if (policy && policy.fee_base_msat < 5000) {
 | 
			
		||||
            avgBaseFee += policy.fee_base_msat;      
 | 
			
		||||
            baseFees.push(policy.fee_base_msat);
 | 
			
		||||
            avgBaseFee += parseInt(policy.fee_base_msat, 10);
 | 
			
		||||
            baseFees.push(parseInt(policy.fee_base_msat, 10));
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      } else { // Coming from the historical import
 | 
			
		||||
        if (channel.fee_rate_milli_msat < 5000) {
 | 
			
		||||
          avgFeeRate += channel.fee_rate_milli_msat;
 | 
			
		||||
          feeRates.push(channel.fee_rate_milli_msat);
 | 
			
		||||
          avgFeeRate += parseInt(channel.fee_rate_milli_msat, 10);
 | 
			
		||||
          feeRates.push(parseInt(channel.fee_rate_milli_msat), 10);
 | 
			
		||||
        }  
 | 
			
		||||
        if (channel.fee_base_msat < 5000) {
 | 
			
		||||
          avgBaseFee += channel.fee_base_msat;      
 | 
			
		||||
          baseFees.push(channel.fee_base_msat);
 | 
			
		||||
          avgBaseFee += parseInt(channel.fee_base_msat, 10);
 | 
			
		||||
          baseFees.push(parseInt(channel.fee_base_msat), 10);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    avgFeeRate /= networkGraph.edges.length;
 | 
			
		||||
    avgBaseFee /= networkGraph.edges.length;
 | 
			
		||||
 | 
			
		||||
    avgFeeRate /= Math.max(networkGraph.edges.length, 1);
 | 
			
		||||
    avgBaseFee /= Math.max(networkGraph.edges.length, 1);
 | 
			
		||||
    const medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
 | 
			
		||||
    const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
 | 
			
		||||
    const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
 | 
			
		||||
    const avgCapacity = Math.round(capacity / capacities.length);
 | 
			
		||||
    const avgCapacity = Math.round(capacity / Math.max(capacities.length, 1));
 | 
			
		||||
 | 
			
		||||
    let query = `INSERT INTO lightning_stats(
 | 
			
		||||
        added,
 | 
			
		||||
@ -251,6 +251,9 @@ class LightningStatsImporter {
 | 
			
		||||
    };
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Import topology files LN historical data into the database
 | 
			
		||||
   */
 | 
			
		||||
  async $importHistoricalLightningStats(): Promise<void> {
 | 
			
		||||
    let latestNodeCount = 1;
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,4 +1,5 @@
 | 
			
		||||
import * as fs from 'fs';
 | 
			
		||||
import { Common } from '../api/common';
 | 
			
		||||
import config from '../config';
 | 
			
		||||
import logger from '../logger';
 | 
			
		||||
import PricesRepository from '../repositories/PricesRepository';
 | 
			
		||||
@ -34,10 +35,10 @@ export interface Prices {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
class PriceUpdater {
 | 
			
		||||
  historyInserted: boolean = false;
 | 
			
		||||
  lastRun: number = 0;
 | 
			
		||||
  lastHistoricalRun: number = 0;
 | 
			
		||||
  running: boolean = false;
 | 
			
		||||
  public historyInserted = false;
 | 
			
		||||
  lastRun = 0;
 | 
			
		||||
  lastHistoricalRun = 0;
 | 
			
		||||
  running = false;
 | 
			
		||||
  feeds: PriceFeed[] = [];
 | 
			
		||||
  currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
 | 
			
		||||
  latestPrices: Prices;
 | 
			
		||||
 | 
			
		||||
@ -20,7 +20,11 @@ export class NodesChannelsMap implements OnInit, OnDestroy {
 | 
			
		||||
  @Input() publicKey: string | undefined;
 | 
			
		||||
 | 
			
		||||
  observable$: Observable<any>;
 | 
			
		||||
  center: number[] | undefined = undefined;
 | 
			
		||||
  
 | 
			
		||||
  center: number[] | undefined;
 | 
			
		||||
  zoom: number | undefined;
 | 
			
		||||
  channelWidth = 0.6;
 | 
			
		||||
  channelOpacity = 0.1;
 | 
			
		||||
 | 
			
		||||
  chartInstance = undefined;
 | 
			
		||||
  chartOptions: EChartsOption = {};
 | 
			
		||||
@ -42,7 +46,8 @@ export class NodesChannelsMap implements OnInit, OnDestroy {
 | 
			
		||||
  ngOnDestroy(): void {}
 | 
			
		||||
 | 
			
		||||
  ngOnInit(): void {
 | 
			
		||||
    this.center = this.style === 'widget' ? [0, 0, -10] : undefined;
 | 
			
		||||
    this.center = this.style === 'widget' ? [0, 40] : [0, 5];
 | 
			
		||||
    this.zoom = this.style === 'widget' ? 3.5 : 1.3;
 | 
			
		||||
 | 
			
		||||
    if (this.style === 'graph') {
 | 
			
		||||
      this.seoService.setTitle($localize`Lightning nodes channels world map`);
 | 
			
		||||
@ -69,29 +74,46 @@ export class NodesChannelsMap implements OnInit, OnDestroy {
 | 
			
		||||
              thisNodeGPS = [channel[6], channel[7]];
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            channelsLoc.push([[channel[2], channel[3]], [channel[6], channel[7]]]);
 | 
			
		||||
            // We add a bit of noise so nodes at the same location are not all
 | 
			
		||||
            // on top of each other
 | 
			
		||||
            let random = Math.random() * 2 * Math.PI;
 | 
			
		||||
            let random2 = Math.random() * 0.01;
 | 
			
		||||
            
 | 
			
		||||
            if (!nodesPubkeys[channel[0]]) {
 | 
			
		||||
              nodes.push({
 | 
			
		||||
                publicKey: channel[0],
 | 
			
		||||
                name: channel[1],
 | 
			
		||||
                value: [channel[2], channel[3]],
 | 
			
		||||
              });
 | 
			
		||||
              nodesPubkeys[channel[0]] = true;
 | 
			
		||||
              nodes.push([
 | 
			
		||||
                channel[2] + random2 * Math.cos(random),
 | 
			
		||||
                channel[3] + random2 * Math.sin(random),
 | 
			
		||||
                1,
 | 
			
		||||
                channel[0],
 | 
			
		||||
                channel[1]
 | 
			
		||||
              ]);
 | 
			
		||||
              nodesPubkeys[channel[0]] = nodes[nodes.length - 1];
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            random = Math.random() * 2 * Math.PI;
 | 
			
		||||
            random2 = Math.random() * 0.01;
 | 
			
		||||
 | 
			
		||||
            if (!nodesPubkeys[channel[4]]) {
 | 
			
		||||
              nodes.push({
 | 
			
		||||
                publicKey: channel[4],
 | 
			
		||||
                name: channel[5],
 | 
			
		||||
                value: [channel[6], channel[7]],
 | 
			
		||||
              });
 | 
			
		||||
              nodesPubkeys[channel[4]] = true;  
 | 
			
		||||
              nodes.push([
 | 
			
		||||
                channel[6] + random2 * Math.cos(random),
 | 
			
		||||
                channel[7] + random2 * Math.sin(random),
 | 
			
		||||
                1,
 | 
			
		||||
                channel[4],
 | 
			
		||||
                channel[5]
 | 
			
		||||
              ]);
 | 
			
		||||
              nodesPubkeys[channel[4]] = nodes[nodes.length - 1];
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            const channelLoc = [];
 | 
			
		||||
            channelLoc.push(nodesPubkeys[channel[0]].slice(0, 2));            
 | 
			
		||||
            channelLoc.push(nodesPubkeys[channel[4]].slice(0, 2));
 | 
			
		||||
            channelsLoc.push(channelLoc);
 | 
			
		||||
          }
 | 
			
		||||
          if (this.style === 'nodepage' && thisNodeGPS) {
 | 
			
		||||
            // 1ML 0217890e3aad8d35bc054f43acc00084b25229ecff0ab68debd82883ad65ee8266
 | 
			
		||||
            // New York GPS [-74.0068, 40.7123]
 | 
			
		||||
            // Map center [-20.55, 0, -9.85]
 | 
			
		||||
            this.center = [thisNodeGPS[0] * -20.55 / -74.0068, 0, thisNodeGPS[1] * -9.85 / 40.7123];
 | 
			
		||||
            this.center = [thisNodeGPS[0], thisNodeGPS[1]];
 | 
			
		||||
            this.zoom = 10;
 | 
			
		||||
            this.channelWidth = 1;
 | 
			
		||||
            this.channelOpacity = 1;
 | 
			
		||||
          }
 | 
			
		||||
 | 
			
		||||
          this.prepareChartOptions(nodes, channelsLoc);
 | 
			
		||||
@ -115,87 +137,84 @@ export class NodesChannelsMap implements OnInit, OnDestroy {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    this.chartOptions = {
 | 
			
		||||
      silent: this.style === 'widget' ? true : false,
 | 
			
		||||
      silent: this.style === 'widget',
 | 
			
		||||
      title: title ?? undefined,
 | 
			
		||||
      geo3D: {
 | 
			
		||||
        map: 'world',
 | 
			
		||||
        shading: 'color',
 | 
			
		||||
      tooltip: {},
 | 
			
		||||
      geo: {
 | 
			
		||||
        animation: false,
 | 
			
		||||
        silent: true,
 | 
			
		||||
        postEffect: {
 | 
			
		||||
          enable: true,
 | 
			
		||||
          bloom: {
 | 
			
		||||
            intensity: 0.1,
 | 
			
		||||
          }
 | 
			
		||||
        },
 | 
			
		||||
        viewControl: {
 | 
			
		||||
          center: this.center,
 | 
			
		||||
          minDistance: 1,
 | 
			
		||||
          maxDistance: 60,
 | 
			
		||||
          distance: this.style === 'widget' ? 22 : this.style === 'nodepage' ? 22 : 60,
 | 
			
		||||
          alpha: 90,
 | 
			
		||||
          rotateSensitivity: 0,
 | 
			
		||||
          panSensitivity: this.style === 'widget' ? 0 : 1,
 | 
			
		||||
          zoomSensitivity: this.style === 'widget' ? 0 : 0.5,
 | 
			
		||||
          panMouseButton: this.style === 'widget' ? null : 'left',
 | 
			
		||||
          rotateMouseButton: undefined,
 | 
			
		||||
        center: this.center,
 | 
			
		||||
        zoom: this.zoom,
 | 
			
		||||
        tooltip: {
 | 
			
		||||
          show: true
 | 
			
		||||
        },
 | 
			
		||||
        map: 'world',
 | 
			
		||||
        roam: this.style === 'widget' ? false : true,
 | 
			
		||||
        itemStyle: {
 | 
			
		||||
          color: 'white',
 | 
			
		||||
          opacity: 0.02,
 | 
			
		||||
          borderWidth: 1,
 | 
			
		||||
          borderColor: 'black',
 | 
			
		||||
          color: '#ffffff44'
 | 
			
		||||
        },
 | 
			
		||||
        regionHeight: 0.01,
 | 
			
		||||
        scaleLimit: {
 | 
			
		||||
          min: 1.3,
 | 
			
		||||
          max: 100000,
 | 
			
		||||
        }
 | 
			
		||||
      },
 | 
			
		||||
      series: [
 | 
			
		||||
        {
 | 
			
		||||
          // @ts-ignore
 | 
			
		||||
          type: 'lines3D',
 | 
			
		||||
          coordinateSystem: 'geo3D',
 | 
			
		||||
          blendMode: 'lighter',
 | 
			
		||||
          lineStyle: {
 | 
			
		||||
            width: 1,
 | 
			
		||||
            opacity: ['widget', 'graph'].includes(this.style) ? 0.025 : 1,
 | 
			
		||||
          large: true,
 | 
			
		||||
          progressive: 200,
 | 
			
		||||
          type: 'scatter',
 | 
			
		||||
          data: nodes,
 | 
			
		||||
          coordinateSystem: 'geo',
 | 
			
		||||
          geoIndex: 0,
 | 
			
		||||
          symbolSize: 4,
 | 
			
		||||
          tooltip: {
 | 
			
		||||
            backgroundColor: 'rgba(17, 19, 31, 1)',
 | 
			
		||||
            borderRadius: 4,
 | 
			
		||||
            shadowColor: 'rgba(0, 0, 0, 0.5)',
 | 
			
		||||
            textStyle: {
 | 
			
		||||
              color: '#b1b1b1',
 | 
			
		||||
              align: 'left',
 | 
			
		||||
            },
 | 
			
		||||
            borderColor: '#000',
 | 
			
		||||
            formatter: (value) => {
 | 
			
		||||
              const data = value.data;
 | 
			
		||||
              const alias = data[4].length > 0 ? data[4] : data[3].slice(0, 20);
 | 
			
		||||
              return `<b style="color: white">${alias}</b>`;
 | 
			
		||||
            }
 | 
			
		||||
          },
 | 
			
		||||
          data: channels
 | 
			
		||||
          itemStyle: {
 | 
			
		||||
            color: 'white',
 | 
			
		||||
            borderColor: 'black',
 | 
			
		||||
            borderWidth: 2,
 | 
			
		||||
            opacity: 1,
 | 
			
		||||
          },
 | 
			
		||||
          blendMode: 'lighter',
 | 
			
		||||
          zlevel: 1,
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          // @ts-ignore
 | 
			
		||||
          type: 'scatter3D',
 | 
			
		||||
          symbol: 'circle',
 | 
			
		||||
          blendMode: 'lighter',
 | 
			
		||||
          coordinateSystem: 'geo3D',
 | 
			
		||||
          symbolSize: 3,
 | 
			
		||||
          itemStyle: {
 | 
			
		||||
            color: '#BBFFFF',
 | 
			
		||||
            opacity: 1,
 | 
			
		||||
            borderColor: '#FFFFFF00',
 | 
			
		||||
          large: true,
 | 
			
		||||
          progressive: 200,
 | 
			
		||||
          silent: true,
 | 
			
		||||
          type: 'lines',
 | 
			
		||||
          coordinateSystem: 'geo',
 | 
			
		||||
          data: channels,
 | 
			
		||||
          lineStyle: {
 | 
			
		||||
            opacity: this.channelOpacity,
 | 
			
		||||
            width: this.channelWidth,
 | 
			
		||||
            curveness: 0,
 | 
			
		||||
            color: '#466d9d',
 | 
			
		||||
          },
 | 
			
		||||
          data: nodes,
 | 
			
		||||
          emphasis: {
 | 
			
		||||
            label: {
 | 
			
		||||
              position: 'top',
 | 
			
		||||
              color: 'white',
 | 
			
		||||
              fontSize: 16,
 | 
			
		||||
              formatter: function(value) {
 | 
			
		||||
                return value.name;
 | 
			
		||||
              },
 | 
			
		||||
              show: true,
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        },
 | 
			
		||||
          blendMode: 'lighter',
 | 
			
		||||
          tooltip: {
 | 
			
		||||
            show: false,
 | 
			
		||||
          },
 | 
			
		||||
          zlevel: 2,
 | 
			
		||||
        }
 | 
			
		||||
      ]
 | 
			
		||||
    };
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @HostListener('window:wheel', ['$event'])
 | 
			
		||||
  onWindowScroll(e): void {
 | 
			
		||||
    // Not very smooth when using the mouse
 | 
			
		||||
    if (this.style === 'widget' && e.target.tagName === 'CANVAS') {
 | 
			
		||||
      window.scrollBy({left: 0, top: e.deltaY, behavior: 'auto'});
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  onChartInit(ec) {
 | 
			
		||||
    if (this.chartInstance !== undefined) {
 | 
			
		||||
      return;
 | 
			
		||||
@ -211,14 +230,34 @@ export class NodesChannelsMap implements OnInit, OnDestroy {
 | 
			
		||||
        });
 | 
			
		||||
      });
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
      
 | 
			
		||||
    this.chartInstance.on('click', (e) => {
 | 
			
		||||
      if (e.data && e.data.publicKey) {
 | 
			
		||||
      if (e.data) {
 | 
			
		||||
        this.zone.run(() => {
 | 
			
		||||
          const url = new RelativeUrlPipe(this.stateService).transform(`/lightning/node/${e.data.publicKey}`);
 | 
			
		||||
          const url = new RelativeUrlPipe(this.stateService).transform(`/lightning/node/${e.data[3]}`);
 | 
			
		||||
          this.router.navigate([url]);
 | 
			
		||||
        });
 | 
			
		||||
      }
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    this.chartInstance.on('georoam', (e) => {
 | 
			
		||||
      if (!e.zoom || this.style === 'nodepage') {
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      const speed = 0.005;
 | 
			
		||||
      const chartOptions = {
 | 
			
		||||
        series: this.chartOptions.series
 | 
			
		||||
      };
 | 
			
		||||
 | 
			
		||||
      chartOptions.series[1].lineStyle.opacity += e.zoom > 1 ? speed : -speed;
 | 
			
		||||
      chartOptions.series[1].lineStyle.width += e.zoom > 1 ? speed : -speed;
 | 
			
		||||
      chartOptions.series[0].symbolSize += e.zoom > 1 ? speed * 10 : -speed * 10;
 | 
			
		||||
      chartOptions.series[1].lineStyle.opacity = Math.max(0.05, Math.min(0.5, chartOptions.series[1].lineStyle.opacity));
 | 
			
		||||
      chartOptions.series[1].lineStyle.width = Math.max(0.5, Math.min(1, chartOptions.series[1].lineStyle.width));
 | 
			
		||||
      chartOptions.series[0].symbolSize = Math.max(4, Math.min(5.5, chartOptions.series[0].symbolSize));
 | 
			
		||||
 | 
			
		||||
      this.chartInstance.setOption(chartOptions);
 | 
			
		||||
    });
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -34,10 +34,11 @@ esac
 | 
			
		||||
TOR_INSTALL=ON
 | 
			
		||||
CERTBOT_INSTALL=ON
 | 
			
		||||
 | 
			
		||||
# install 3 network daemons
 | 
			
		||||
# install 4 network daemons
 | 
			
		||||
BITCOIN_INSTALL=ON
 | 
			
		||||
BISQ_INSTALL=ON
 | 
			
		||||
ELEMENTS_INSTALL=ON
 | 
			
		||||
CLN_INSTALL=ON
 | 
			
		||||
 | 
			
		||||
# install UNFURL
 | 
			
		||||
UNFURL_INSTALL=ON
 | 
			
		||||
@ -191,6 +192,7 @@ case $OS in
 | 
			
		||||
        NGINX_ETC_FOLDER=/usr/local/etc/nginx
 | 
			
		||||
        NGINX_CONFIGURATION=/usr/local/etc/nginx/nginx.conf
 | 
			
		||||
        CERTBOT_PKG=py39-certbot
 | 
			
		||||
        CLN_PKG=c-lightning
 | 
			
		||||
    ;;
 | 
			
		||||
 | 
			
		||||
    Debian)
 | 
			
		||||
@ -275,6 +277,12 @@ ELECTRS_LIQUID_DATA=${ELECTRS_DATA_ROOT}/liquid
 | 
			
		||||
ELECTRS_LIQUIDTESTNET_ZPOOL=${ZPOOL}
 | 
			
		||||
ELECTRS_LIQUIDTESTNET_DATA=${ELECTRS_DATA_ROOT}/liquidtestnet
 | 
			
		||||
 | 
			
		||||
# Core Lightning user/group
 | 
			
		||||
CLN_USER=cln
 | 
			
		||||
CLN_GROUP=cln
 | 
			
		||||
# Core Lightning home folder
 | 
			
		||||
CLN_HOME=/cln
 | 
			
		||||
 | 
			
		||||
# bisq user/group
 | 
			
		||||
BISQ_USER=bisq
 | 
			
		||||
BISQ_GROUP=bisq
 | 
			
		||||
@ -596,6 +604,10 @@ zfsCreateFilesystems()
 | 
			
		||||
        done
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    if [ "${CLN_INSTALL}" = ON ];then
 | 
			
		||||
        zfs create -o "mountpoint=${CLN_HOME}" "${ZPOOL}/cln"
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    if [ "${BISQ_INSTALL}" = ON ];then
 | 
			
		||||
        zfs create -o "mountpoint=${BISQ_HOME}" "${ZPOOL}/bisq"
 | 
			
		||||
    fi
 | 
			
		||||
@ -675,6 +687,10 @@ ext4CreateDir()
 | 
			
		||||
        done
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    if [ "${CLN_INSTALL}" = ON ];then
 | 
			
		||||
        mkdir -p "${CLN_HOME}"
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    if [ "${BISQ_INSTALL}" = ON ];then
 | 
			
		||||
        mkdir -p "${BISQ_HOME}"
 | 
			
		||||
    fi
 | 
			
		||||
@ -735,6 +751,7 @@ Testnet:Enable Bitcoin Testnet:ON
 | 
			
		||||
Signet:Enable Bitcoin Signet:ON
 | 
			
		||||
Liquid:Enable Elements Liquid:ON
 | 
			
		||||
Liquidtestnet:Enable Elements Liquidtestnet:ON
 | 
			
		||||
CoreLN:Enable Core Lightning:ON
 | 
			
		||||
Bisq:Enable Bisq:ON
 | 
			
		||||
Unfurl:Enable Unfurl:ON
 | 
			
		||||
EOF
 | 
			
		||||
@ -810,6 +827,11 @@ else
 | 
			
		||||
    ELEMENTS_INSTALL=OFF
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if grep CoreLN $tempfile >/dev/null 2>&1;then
 | 
			
		||||
    CLN_INSTALL=ON
 | 
			
		||||
else
 | 
			
		||||
    CLN_INSTALL=OFF
 | 
			
		||||
 | 
			
		||||
if [ "${BITCOIN_MAINNET_ENABLE}" = ON -o "${BITCOIN_TESTNET_ENABLE}" = ON -o "${BITCOIN_SIGNET_ENABLE}" = ON ];then
 | 
			
		||||
    BITCOIN_ELECTRS_INSTALL=ON
 | 
			
		||||
else
 | 
			
		||||
@ -1234,6 +1256,33 @@ if [ "${ELEMENTS_ELECTRS_INSTALL}" = ON ];then
 | 
			
		||||
    osSudo "${ELEMENTS_USER}" sh -c "cd ${ELEMENTS_ELECTRS_HOME} && cargo run --release --features liquid --bin electrs -- --network liquid --version" || true
 | 
			
		||||
fi
 | 
			
		||||
    
 | 
			
		||||
#####################################
 | 
			
		||||
# Core Lightning for Bitcoin Mainnet #
 | 
			
		||||
#####################################
 | 
			
		||||
 | 
			
		||||
echo "[*] Installing Core Lightning"
 | 
			
		||||
case $OS in
 | 
			
		||||
    FreeBSD)
 | 
			
		||||
        echo "[*] Creating Core Lightning user"
 | 
			
		||||
        osGroupCreate "${CLN_GROUP}"
 | 
			
		||||
        osUserCreate "${CLN_USER}" "${CLN_HOME}" "${CLN_GROUP}"
 | 
			
		||||
        osSudo "${ROOT_USER}" chsh -s `which zsh` "${CLN_USER}"
 | 
			
		||||
        osSudo "${CLN_USER}" touch "${CLN_HOME}/.zshrc"
 | 
			
		||||
        osSudo "${ROOT_USER}" chown -R "${CLN_USER}:${CLN_GROUP}" "${CLN_HOME}"
 | 
			
		||||
 | 
			
		||||
        echo "[*] Installing Core Lightning package"
 | 
			
		||||
        osPackageInstall ${CLN_PKG}
 | 
			
		||||
 | 
			
		||||
        echo "[*] Installing Core Lightning mainnet Cronjob"
 | 
			
		||||
        crontab_cln+='@reboot sleep 30 ; screen -dmS main lightningd --alias `hostname` --bitcoin-datadir /bitcoin\n'
 | 
			
		||||
        crontab_cln+='@reboot sleep 60 ; screen -dmS sig lightningd --alias `hostname` --bitcoin-datadir /bitcoin --network signet\n'
 | 
			
		||||
        crontab_cln+='@reboot sleep 90 ; screen -dmS tes lightningd --alias `hostname` --bitcoin-datadir /bitcoin --network testnet\n'
 | 
			
		||||
        echo "${crontab_cln}" | crontab -u "${CLN_USER}" -
 | 
			
		||||
    ;;
 | 
			
		||||
    Debian)
 | 
			
		||||
    ;;
 | 
			
		||||
esac
 | 
			
		||||
 | 
			
		||||
#####################
 | 
			
		||||
# Bisq installation #
 | 
			
		||||
#####################
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user