Cleanup mining related backend logs
This commit is contained in:
		
							parent
							
								
									bc0ea1e999
								
							
						
					
					
						commit
						d7d8fda3f6
					
				@ -75,9 +75,12 @@ class Blocks {
 | 
			
		||||
          transactions.push(tx);
 | 
			
		||||
          transactionsFetched++;
 | 
			
		||||
        } catch (e) {
 | 
			
		||||
          logger.debug('Error fetching block tx: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
          if (i === 0) {
 | 
			
		||||
            throw new Error('Failed to fetch Coinbase transaction: ' + txIds[i]);
 | 
			
		||||
            const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e); 
 | 
			
		||||
            logger.err(msg);
 | 
			
		||||
            throw new Error(msg);
 | 
			
		||||
          } else {
 | 
			
		||||
            logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
@ -137,8 +140,8 @@ class Blocks {
 | 
			
		||||
        pool = await poolsRepository.$getUnknownPool();
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (!pool) { // Something is wrong with the pools table, ignore pool indexing
 | 
			
		||||
        logger.err('Unable to find pool, nor getting the unknown pool. Is the "pools" table empty?');
 | 
			
		||||
      if (!pool) { // We should never have this situation in practise
 | 
			
		||||
        logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. Check your "pools" table entries`);
 | 
			
		||||
        return blockExtended;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
@ -214,11 +217,12 @@ class Blocks {
 | 
			
		||||
 | 
			
		||||
      const lastBlockToIndex = Math.max(0, currentBlockHeight - indexingBlockAmount + 1);
 | 
			
		||||
 | 
			
		||||
      logger.info(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
 | 
			
		||||
      logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
 | 
			
		||||
 | 
			
		||||
      const chunkSize = 10000;
 | 
			
		||||
      let totaIndexed = await blocksRepository.$blockCount(null, null);
 | 
			
		||||
      let indexedThisRun = 0;
 | 
			
		||||
      let newlyIndexed = 0;
 | 
			
		||||
      const startedAt = new Date().getTime() / 1000;
 | 
			
		||||
      let timer = new Date().getTime() / 1000;
 | 
			
		||||
 | 
			
		||||
@ -228,12 +232,11 @@ class Blocks {
 | 
			
		||||
        const missingBlockHeights: number[] = await blocksRepository.$getMissingBlocksBetweenHeights(
 | 
			
		||||
          currentBlockHeight, endBlock);
 | 
			
		||||
        if (missingBlockHeights.length <= 0) {
 | 
			
		||||
          logger.debug(`No missing blocks between #${currentBlockHeight} to #${endBlock}`);
 | 
			
		||||
          currentBlockHeight -= chunkSize;
 | 
			
		||||
          continue;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        logger.debug(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
 | 
			
		||||
        logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
 | 
			
		||||
 | 
			
		||||
        for (const blockHeight of missingBlockHeights) {
 | 
			
		||||
          if (blockHeight < lastBlockToIndex) {
 | 
			
		||||
@ -255,14 +258,16 @@ class Blocks {
 | 
			
		||||
          const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
 | 
			
		||||
          const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
 | 
			
		||||
          const blockExtended = await this.$getBlockExtended(block, transactions);
 | 
			
		||||
 | 
			
		||||
          newlyIndexed++;
 | 
			
		||||
          await blocksRepository.$saveBlockInDatabase(blockExtended);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        currentBlockHeight -= chunkSize;
 | 
			
		||||
      }
 | 
			
		||||
      logger.info('Block indexing completed');
 | 
			
		||||
      logger.info(`Indexed ${newlyIndexed} blocks`);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('An error occured in $generateBlockDatabase(). Trying again later. ' + e);
 | 
			
		||||
      logger.err('Block indexing failed. Trying again later. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      this.blockIndexingStarted = false;
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@ -142,8 +142,6 @@ class Mining {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      logger.info(`Indexing mining pools weekly hashrates`);
 | 
			
		||||
 | 
			
		||||
      const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
 | 
			
		||||
      const hashrates: any[] = [];
 | 
			
		||||
      const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
 | 
			
		||||
@ -155,6 +153,7 @@ class Mining {
 | 
			
		||||
      const totalWeekIndexed = (await BlocksRepository.$blockCount(null, null)) / 1008;
 | 
			
		||||
      let indexedThisRun = 0;
 | 
			
		||||
      let totalIndexed = 0;
 | 
			
		||||
      let newlyIndexed = 0;
 | 
			
		||||
      let startedAt = new Date().getTime();
 | 
			
		||||
 | 
			
		||||
      while (toTimestamp > genesisTimestamp) {
 | 
			
		||||
@ -198,6 +197,7 @@ class Mining {
 | 
			
		||||
          });
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        newlyIndexed += hashrates.length;
 | 
			
		||||
        await HashratesRepository.$saveHashrates(hashrates);
 | 
			
		||||
        hashrates.length = 0;
 | 
			
		||||
 | 
			
		||||
@ -217,7 +217,9 @@ class Mining {
 | 
			
		||||
      }
 | 
			
		||||
      this.weeklyHashrateIndexingStarted = false;
 | 
			
		||||
      await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing');
 | 
			
		||||
      logger.info(`Weekly pools hashrate indexing completed`);
 | 
			
		||||
      if (newlyIndexed > 0) {
 | 
			
		||||
        logger.info(`Indexed ${newlyIndexed} pools weekly hashrate`);
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      this.weeklyHashrateIndexingStarted = false;
 | 
			
		||||
      throw e;
 | 
			
		||||
@ -249,8 +251,6 @@ class Mining {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      logger.info(`Indexing network daily hashrate`);
 | 
			
		||||
 | 
			
		||||
      const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
 | 
			
		||||
      const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
 | 
			
		||||
      const lastMidnight = this.getDateMidnight(new Date());
 | 
			
		||||
@ -260,6 +260,7 @@ class Mining {
 | 
			
		||||
      const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
 | 
			
		||||
      let indexedThisRun = 0;
 | 
			
		||||
      let totalIndexed = 0;
 | 
			
		||||
      let newlyIndexed = 0;
 | 
			
		||||
      let startedAt = new Date().getTime();
 | 
			
		||||
 | 
			
		||||
      while (toTimestamp > genesisTimestamp) {
 | 
			
		||||
@ -294,6 +295,7 @@ class Mining {
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        if (hashrates.length > 10) {
 | 
			
		||||
          newlyIndexed += hashrates.length;
 | 
			
		||||
          await HashratesRepository.$saveHashrates(hashrates);
 | 
			
		||||
          hashrates.length = 0;
 | 
			
		||||
        }
 | 
			
		||||
@ -303,7 +305,8 @@ class Mining {
 | 
			
		||||
          const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
 | 
			
		||||
          const formattedDate = new Date(fromTimestamp).toUTCString();
 | 
			
		||||
          const daysLeft = Math.round(totalDayIndexed - totalIndexed);
 | 
			
		||||
          logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`);
 | 
			
		||||
          logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ` +
 | 
			
		||||
            `~${daysLeft} days left to index`);
 | 
			
		||||
          startedAt = new Date().getTime();
 | 
			
		||||
          indexedThisRun = 0;
 | 
			
		||||
        }
 | 
			
		||||
@ -323,11 +326,14 @@ class Mining {
 | 
			
		||||
        });
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      newlyIndexed += hashrates.length;
 | 
			
		||||
      await HashratesRepository.$saveHashrates(hashrates);
 | 
			
		||||
 | 
			
		||||
      await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing');
 | 
			
		||||
      this.hashrateIndexingStarted = false;
 | 
			
		||||
      logger.info(`Daily network hashrate indexing completed`);
 | 
			
		||||
      if (newlyIndexed > 0) {
 | 
			
		||||
        logger.info(`Indexed ${newlyIndexed} day of network hashrate`);
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      this.hashrateIndexingStarted = false;
 | 
			
		||||
      throw e;
 | 
			
		||||
 | 
			
		||||
@ -64,7 +64,7 @@ class PoolsParser {
 | 
			
		||||
    try {
 | 
			
		||||
      [existingPools] = await connection.query<any>({ sql: 'SELECT * FROM pools;', timeout: 120000 });
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('Unable to get existing pools from the database, skipping pools.json import');
 | 
			
		||||
      logger.err('Cannot get existing pools from the database, skipping pools.json import');
 | 
			
		||||
      connection.release();
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
@ -97,7 +97,7 @@ class PoolsParser {
 | 
			
		||||
      if (slug === undefined) {
 | 
			
		||||
        // Only keep alphanumerical
 | 
			
		||||
        slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
 | 
			
		||||
        logger.debug(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
 | 
			
		||||
        logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) {
 | 
			
		||||
@ -155,7 +155,7 @@ class PoolsParser {
 | 
			
		||||
      logger.info('Mining pools.json import completed');
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err(`Unable to import pools in the database!`);
 | 
			
		||||
      logger.err(`Unable to import pools in the database`);
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -93,7 +93,7 @@ class Server {
 | 
			
		||||
      try {
 | 
			
		||||
        if (process.env.npm_config_reindex != undefined) { // Re-index requests
 | 
			
		||||
          const tables = process.env.npm_config_reindex.split(',');
 | 
			
		||||
          logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds from now (using '--reindex') ...`);
 | 
			
		||||
          logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds (using '--reindex')`);
 | 
			
		||||
          await Common.sleep(5000);
 | 
			
		||||
          await databaseMigration.$truncateIndexedData(tables);
 | 
			
		||||
        }
 | 
			
		||||
@ -169,8 +169,12 @@ class Server {
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async $resetHashratesIndexingState() {
 | 
			
		||||
    await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing', 0);
 | 
			
		||||
    await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
 | 
			
		||||
    try {
 | 
			
		||||
      await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing', 0);
 | 
			
		||||
      await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err(`Cannot reset hashrate indexing timestamps. Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async $runIndexingWhenReady() {
 | 
			
		||||
@ -188,7 +192,7 @@ class Server {
 | 
			
		||||
      await mining.$generateNetworkHashrateHistory();
 | 
			
		||||
      await mining.$generatePoolHashrateHistory();
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err(`Unable to run indexing right now, trying again later. ` + e);
 | 
			
		||||
      logger.err(`Indexing failed, trying again later. Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -56,11 +56,11 @@ class BlocksRepository {
 | 
			
		||||
      connection.release();
 | 
			
		||||
    } catch (e: any) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      if (e.errno === 1062) { // ER_DUP_ENTRY
 | 
			
		||||
      if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
 | 
			
		||||
        logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`);
 | 
			
		||||
      } else {
 | 
			
		||||
        connection.release();
 | 
			
		||||
        logger.err('$saveBlockInDatabase() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
        logger.err('Cannot save indexed block into db. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
        throw e;
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
@ -93,7 +93,7 @@ class BlocksRepository {
 | 
			
		||||
      return missingBlocksHeights;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getMissingBlocksBetweenHeights() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot retrieve blocks list to index. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -130,7 +130,7 @@ class BlocksRepository {
 | 
			
		||||
      return rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getEmptyBlocks() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot count empty blocks. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -168,7 +168,7 @@ class BlocksRepository {
 | 
			
		||||
      return <number>rows[0].blockCount;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$blockCount() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err(`Cannot count blocks for this pool (using offset). Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -208,7 +208,7 @@ class BlocksRepository {
 | 
			
		||||
      return <number>rows[0];
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$blockCountBetweenTimestamp() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err(`Cannot count blocks for this pool (using timestamps). Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -235,7 +235,7 @@ class BlocksRepository {
 | 
			
		||||
      return <number>rows[0].blockTimestamp;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$oldestBlockTimestamp() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot get oldest indexed block timestamp. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -278,7 +278,7 @@ class BlocksRepository {
 | 
			
		||||
      return blocks;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getBlocksByPool() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot get blocks for this pool. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -308,7 +308,7 @@ class BlocksRepository {
 | 
			
		||||
      return rows[0];
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getBlockByHeight() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -365,7 +365,7 @@ class BlocksRepository {
 | 
			
		||||
      return rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getBlocksDifficulty() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot generate difficulty history. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -392,7 +392,7 @@ class BlocksRepository {
 | 
			
		||||
      return rows[0];
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getBlockStats() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot generate reward stats. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -410,7 +410,7 @@ class BlocksRepository {
 | 
			
		||||
 | 
			
		||||
      for (let i = 0; i < lastBlocks.length - 1; ++i) {
 | 
			
		||||
        if (lastBlocks[i].previous_block_hash !== lastBlocks[i + 1].hash) {
 | 
			
		||||
          logger.notice(`Chain divergence detected at block ${lastBlocks[i].height}, re-indexing most recent data`);
 | 
			
		||||
          logger.warn(`Chain divergence detected at block ${lastBlocks[i].height}, re-indexing most recent data`);
 | 
			
		||||
          return false;
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
@ -427,21 +427,21 @@ class BlocksRepository {
 | 
			
		||||
   * Delete $count blocks from the database
 | 
			
		||||
   */
 | 
			
		||||
  public async $deleteBlocks(count: number) {
 | 
			
		||||
    logger.info(`Delete ${count} most recent indexed blocks from the database`);
 | 
			
		||||
    let connection;
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      connection = await DB.getConnection();
 | 
			
		||||
      logger.debug(`Delete ${count} most recent indexed blocks from the database`);
 | 
			
		||||
      await connection.query(`DELETE FROM blocks ORDER BY height DESC LIMIT ${count};`);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$deleteBlocks() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot delete recent indexed blocks. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    connection.release();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Get the historical averaged block reward and total fees
 | 
			
		||||
   * Get the historical averaged block fees
 | 
			
		||||
   */
 | 
			
		||||
  public async $getHistoricalBlockFees(div: number, interval: string | null): Promise<any> {
 | 
			
		||||
    let connection;
 | 
			
		||||
@ -464,7 +464,7 @@ class BlocksRepository {
 | 
			
		||||
      return rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getHistoricalBlockFees() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot generate block fees history. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -493,7 +493,7 @@ class BlocksRepository {
 | 
			
		||||
      return rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getHistoricalBlockRewards() error: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot generate block rewards history. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -27,7 +27,7 @@ class HashratesRepository {
 | 
			
		||||
      connection.release();
 | 
			
		||||
    } catch (e: any) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$saveHashrateInDatabase() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -56,7 +56,7 @@ class HashratesRepository {
 | 
			
		||||
      return rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getNetworkDailyHashrate() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -76,7 +76,7 @@ class HashratesRepository {
 | 
			
		||||
      return rows.map(row => row.timestamp);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getWeeklyHashrateTimestamps() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -112,7 +112,7 @@ class HashratesRepository {
 | 
			
		||||
      return rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getPoolsWeeklyHashrate() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -146,7 +146,7 @@ class HashratesRepository {
 | 
			
		||||
      connection.release();
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getPoolWeeklyHashrate() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // Get hashrates entries between boundaries
 | 
			
		||||
@ -164,7 +164,7 @@ class HashratesRepository {
 | 
			
		||||
      return rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getPoolWeeklyHashrate() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -181,6 +181,8 @@ class HashratesRepository {
 | 
			
		||||
      connection.release();
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err(`Cannot set last indexing timestamp for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -201,7 +203,7 @@ class HashratesRepository {
 | 
			
		||||
      return rows[0]['number'];
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$setLatestRunTimestamp() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err(`Cannot retreive last indexing timestamp for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -210,7 +212,7 @@ class HashratesRepository {
 | 
			
		||||
   * Delete most recent data points for re-indexing
 | 
			
		||||
   */
 | 
			
		||||
  public async $deleteLastEntries() {
 | 
			
		||||
    logger.debug(`Delete latest hashrates data points from the database`);
 | 
			
		||||
    logger.info(`Delete latest hashrates data points from the database`);
 | 
			
		||||
 | 
			
		||||
    let connection;
 | 
			
		||||
    try {
 | 
			
		||||
@ -223,7 +225,7 @@ class HashratesRepository {
 | 
			
		||||
      await this.$setLatestRunTimestamp('last_hashrates_indexing', 0);
 | 
			
		||||
      await this.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('$deleteLastEntries() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    connection.release();
 | 
			
		||||
 | 
			
		||||
@ -42,7 +42,6 @@ class PoolsRepository {
 | 
			
		||||
    query += ` GROUP BY pool_id
 | 
			
		||||
      ORDER BY COUNT(height) DESC`;
 | 
			
		||||
 | 
			
		||||
    // logger.debug(query);
 | 
			
		||||
    const connection = await DB.getConnection();
 | 
			
		||||
    try {
 | 
			
		||||
      const [rows] = await connection.query(query);
 | 
			
		||||
@ -51,7 +50,7 @@ class PoolsRepository {
 | 
			
		||||
      return <PoolInfo[]>rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getPoolsInfo() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err(`Cannot generate pools stats. Reason: ` + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -73,7 +72,7 @@ class PoolsRepository {
 | 
			
		||||
      return <PoolInfo[]>rows;
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getPoolsInfoBetween() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot generate pools blocks count. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
@ -95,7 +94,7 @@ class PoolsRepository {
 | 
			
		||||
      connection.release();
 | 
			
		||||
 | 
			
		||||
      if (rows.length < 1) {
 | 
			
		||||
        logger.debug(`$getPool(): slug does not match any known pool`);
 | 
			
		||||
        logger.debug(`This slug does not match any known pool`);
 | 
			
		||||
        return null;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
@ -109,7 +108,7 @@ class PoolsRepository {
 | 
			
		||||
      return rows[0];
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err('$getPool() error' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      logger.err('Cannot get pool from db. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -48,7 +48,7 @@ class PoolsUpdater {
 | 
			
		||||
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
 | 
			
		||||
      logger.err('PoolsUpdater failed. Will try again in 24h. Error: ' + e);
 | 
			
		||||
      logger.err('PoolsUpdater failed. Will try again in 24h. Reason: '  + (e instanceof Error ? e.message : e));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -71,7 +71,7 @@ class PoolsUpdater {
 | 
			
		||||
      await connection.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
 | 
			
		||||
      connection.release();
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('Unable save github pools.json sha into the DB, error: ' + e);
 | 
			
		||||
      logger.err('Cannot save github pools.json sha into the db. Reason: '  + (e instanceof Error ? e.message : e));
 | 
			
		||||
      connection.release();
 | 
			
		||||
      return undefined;
 | 
			
		||||
    }
 | 
			
		||||
@ -88,7 +88,7 @@ class PoolsUpdater {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      return (rows.length > 0 ? rows[0].string : undefined);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('Unable fetch pools.json sha from DB, error: ' + e);
 | 
			
		||||
      logger.err('Cannot fetch pools.json sha from db. Reason: '  + (e instanceof Error ? e.message : e));
 | 
			
		||||
      connection.release();
 | 
			
		||||
      return undefined;
 | 
			
		||||
    }
 | 
			
		||||
@ -106,7 +106,7 @@ class PoolsUpdater {
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    logger.err('Unable to find latest pools.json sha from github');
 | 
			
		||||
    logger.err('Cannot to find latest pools.json sha from github api response');
 | 
			
		||||
    return undefined;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -138,7 +138,7 @@ class PoolsUpdater {
 | 
			
		||||
      });
 | 
			
		||||
 | 
			
		||||
      request.on('error', (error) => {
 | 
			
		||||
        logger.err('Query failed with error: ' + error);
 | 
			
		||||
        logger.err('Github API query failed. Reason: '  + error);
 | 
			
		||||
        reject(error);
 | 
			
		||||
      })
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user