Cleanup some ops logs
This commit is contained in:
		
							parent
							
								
									aa7f3ac69d
								
							
						
					
					
						commit
						a507dfe2cb
					
				@ -280,7 +280,7 @@ class Blocks {
 | 
			
		||||
 | 
			
		||||
        currentBlockHeight -= chunkSize;
 | 
			
		||||
      }
 | 
			
		||||
      logger.info(`Indexed ${newlyIndexed} blocks`);
 | 
			
		||||
      logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`);
 | 
			
		||||
      loadingIndicators.setProgress('block-indexing', 100);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('Block indexing failed. Trying again later. Reason: ' + (e instanceof Error ? e.message : e));
 | 
			
		||||
 | 
			
		||||
@ -7,8 +7,24 @@ class DatabaseMigration {
 | 
			
		||||
  private static currentVersion = 19;
 | 
			
		||||
  private queryTimeout = 120000;
 | 
			
		||||
  private statisticsAddedIndexed = false;
 | 
			
		||||
  private uniqueLogs: string[] = [];
 | 
			
		||||
 | 
			
		||||
  private blocksTruncatedMessage = `'blocks' table has been truncated. Re-indexing from scratch.`;
 | 
			
		||||
  private hashratesTruncatedMessage = `'hashrates' table has been truncated. Re-indexing from scratch.`;
 | 
			
		||||
 | 
			
		||||
  constructor() { }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Avoid printing multiple time the same message
 | 
			
		||||
   */
 | 
			
		||||
  private uniqueLog(loggerFunction: any, msg: string) {
 | 
			
		||||
    if (this.uniqueLogs.includes(msg)) {
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
    this.uniqueLogs.push(msg);
 | 
			
		||||
    loggerFunction(msg);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Entry point
 | 
			
		||||
   */
 | 
			
		||||
@ -39,6 +55,16 @@ class DatabaseMigration {
 | 
			
		||||
      process.exit(-1);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (databaseSchemaVersion === 0) {
 | 
			
		||||
      logger.info('Initializing database (first run, clean install)');
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (databaseSchemaVersion <= 2) {
 | 
			
		||||
      // Disable some spam logs when they're not relevant
 | 
			
		||||
      this.uniqueLogs.push(this.blocksTruncatedMessage);
 | 
			
		||||
      this.uniqueLogs.push(this.hashratesTruncatedMessage);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
 | 
			
		||||
    logger.debug('MIGRATIONS: Latest DatabaseMigration.version is ' + DatabaseMigration.currentVersion);
 | 
			
		||||
    if (databaseSchemaVersion >= DatabaseMigration.currentVersion) {
 | 
			
		||||
@ -56,10 +82,13 @@ class DatabaseMigration {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (DatabaseMigration.currentVersion > databaseSchemaVersion) {
 | 
			
		||||
      logger.notice('MIGRATIONS: Upgrading database schema');
 | 
			
		||||
      try {
 | 
			
		||||
        await this.$migrateTableSchemaFromVersion(databaseSchemaVersion);
 | 
			
		||||
        logger.notice(`MIGRATIONS: OK. Database schema have been migrated from version ${databaseSchemaVersion} to ${DatabaseMigration.currentVersion} (latest version)`);
 | 
			
		||||
        if (databaseSchemaVersion === 0) {
 | 
			
		||||
          logger.notice(`MIGRATIONS: OK. Database schema has been properly initialized to version ${DatabaseMigration.currentVersion} (latest version)`);          
 | 
			
		||||
        } else {
 | 
			
		||||
          logger.notice(`MIGRATIONS: OK. Database schema have been migrated from version ${databaseSchemaVersion} to ${DatabaseMigration.currentVersion} (latest version)`);
 | 
			
		||||
        }
 | 
			
		||||
      } catch (e) {
 | 
			
		||||
        logger.err('MIGRATIONS: Unable to migrate database, aborting. ' + e);
 | 
			
		||||
      }
 | 
			
		||||
@ -89,13 +118,13 @@ class DatabaseMigration {
 | 
			
		||||
        await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
 | 
			
		||||
      }
 | 
			
		||||
      if (databaseSchemaVersion < 5 && isBitcoin === true) {
 | 
			
		||||
        logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
 | 
			
		||||
        this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
 | 
			
		||||
        await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
 | 
			
		||||
        await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (databaseSchemaVersion < 6 && isBitcoin === true) {
 | 
			
		||||
        logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
 | 
			
		||||
        this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
 | 
			
		||||
        await this.$executeQuery('TRUNCATE blocks;');  // Need to re-index
 | 
			
		||||
        // Cleanup original blocks fields type
 | 
			
		||||
        await this.$executeQuery('ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
 | 
			
		||||
@ -122,7 +151,7 @@ class DatabaseMigration {
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (databaseSchemaVersion < 8 && isBitcoin === true) {
 | 
			
		||||
        logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
 | 
			
		||||
        this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
 | 
			
		||||
        await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
 | 
			
		||||
        await this.$executeQuery('ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
 | 
			
		||||
        await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
 | 
			
		||||
@ -131,7 +160,7 @@ class DatabaseMigration {
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (databaseSchemaVersion < 9 && isBitcoin === true) {
 | 
			
		||||
        logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
 | 
			
		||||
        this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
 | 
			
		||||
        await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
 | 
			
		||||
        await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
 | 
			
		||||
        await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
 | 
			
		||||
@ -142,7 +171,7 @@ class DatabaseMigration {
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (databaseSchemaVersion < 11 && isBitcoin === true) {
 | 
			
		||||
        logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
 | 
			
		||||
        this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
 | 
			
		||||
        await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
 | 
			
		||||
        await this.$executeQuery(`ALTER TABLE blocks
 | 
			
		||||
          ADD avg_fee INT UNSIGNED NULL,
 | 
			
		||||
@ -166,14 +195,14 @@ class DatabaseMigration {
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (databaseSchemaVersion < 14 && isBitcoin === true) {
 | 
			
		||||
        logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
 | 
			
		||||
        this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
 | 
			
		||||
        await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
 | 
			
		||||
        await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
 | 
			
		||||
        await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (databaseSchemaVersion < 16 && isBitcoin === true) {
 | 
			
		||||
        logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
 | 
			
		||||
        this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
 | 
			
		||||
        await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
@ -282,6 +311,8 @@ class DatabaseMigration {
 | 
			
		||||
    for (const query of this.getMigrationQueriesFromVersion(version)) {
 | 
			
		||||
      transactionQueries.push(query);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    logger.notice(`MIGRATIONS: ${version > 0 ? 'Upgrading' : 'Initializing'} database schema version number to ${DatabaseMigration.currentVersion}`);
 | 
			
		||||
    transactionQueries.push(this.getUpdateToLatestSchemaVersionQuery());
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
@ -305,6 +336,9 @@ class DatabaseMigration {
 | 
			
		||||
 | 
			
		||||
    if (version < 1) {
 | 
			
		||||
      if (config.MEMPOOL.NETWORK !== 'liquid' && config.MEMPOOL.NETWORK !== 'liquidtestnet') {
 | 
			
		||||
        if (version > 0) {
 | 
			
		||||
          logger.notice(`MIGRATIONS: Migrating (shifting) statistics table data`);
 | 
			
		||||
        }
 | 
			
		||||
        queries.push(this.getShiftStatisticsQuery());
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@ -257,7 +257,7 @@ class Mining {
 | 
			
		||||
      }
 | 
			
		||||
      await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', new Date().getUTCDate());
 | 
			
		||||
      if (newlyIndexed > 0) {
 | 
			
		||||
        logger.info(`Indexed ${newlyIndexed} pools weekly hashrate`);
 | 
			
		||||
        logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
 | 
			
		||||
      }
 | 
			
		||||
      loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
@ -368,7 +368,7 @@ class Mining {
 | 
			
		||||
 | 
			
		||||
      await HashratesRepository.$setLatestRun('last_hashrates_indexing', new Date().getUTCDate());
 | 
			
		||||
      if (newlyIndexed > 0) {
 | 
			
		||||
        logger.info(`Indexed ${newlyIndexed} day of network hashrate`);
 | 
			
		||||
        logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
 | 
			
		||||
      }
 | 
			
		||||
      loadingIndicators.setProgress('daily-hashrate-indexing', 100);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
 | 
			
		||||
@ -13,6 +13,8 @@ import * as https from 'https';
 | 
			
		||||
class PoolsUpdater {
 | 
			
		||||
  lastRun: number = 0;
 | 
			
		||||
  currentSha: any = undefined;
 | 
			
		||||
  poolsUrl: string = 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json';
 | 
			
		||||
  treeUrl: string = 'https://api.github.com/repos/mempool/mining-pools/git/trees/master';
 | 
			
		||||
 | 
			
		||||
  constructor() {
 | 
			
		||||
  }
 | 
			
		||||
@ -32,11 +34,10 @@ class PoolsUpdater {
 | 
			
		||||
 | 
			
		||||
    this.lastRun = now;
 | 
			
		||||
 | 
			
		||||
    logger.info('Updating latest mining pools from Github');
 | 
			
		||||
    if (config.SOCKS5PROXY.ENABLED) {
 | 
			
		||||
      logger.info('List of public pools will be queried over the Tor network');
 | 
			
		||||
      logger.info(`Updating latest mining pools from ${this.poolsUrl} over the Tor network`);
 | 
			
		||||
    } else {
 | 
			
		||||
      logger.info('List of public pools will be queried over clearnet');
 | 
			
		||||
      logger.info(`Updating latest mining pools from ${this.poolsUrl} over clearnet`);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
@ -54,8 +55,12 @@ class PoolsUpdater {
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      logger.warn('Pools.json is outdated, fetch latest from github');
 | 
			
		||||
      const poolsJson = await this.query('https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json');
 | 
			
		||||
      if (this.currentSha === undefined) {
 | 
			
		||||
        logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`);
 | 
			
		||||
      } else {
 | 
			
		||||
        logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`);
 | 
			
		||||
      }
 | 
			
		||||
      const poolsJson = await this.query(this.poolsUrl);
 | 
			
		||||
      if (poolsJson === undefined) {
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
@ -101,7 +106,7 @@ class PoolsUpdater {
 | 
			
		||||
   * Fetch our latest pools.json sha from github
 | 
			
		||||
   */
 | 
			
		||||
  private async fetchPoolsSha(): Promise<string | undefined> {
 | 
			
		||||
    const response = await this.query('https://api.github.com/repos/mempool/mining-pools/git/trees/master');
 | 
			
		||||
    const response = await this.query(this.treeUrl);
 | 
			
		||||
 | 
			
		||||
    if (response !== undefined) {
 | 
			
		||||
      for (const file of response['tree']) {
 | 
			
		||||
@ -111,7 +116,7 @@ class PoolsUpdater {
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    logger.err('Cannot to find latest pools.json sha from github api response');
 | 
			
		||||
    logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`);
 | 
			
		||||
    return undefined;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@ -159,7 +164,7 @@ class PoolsUpdater {
 | 
			
		||||
        
 | 
			
		||||
        const data: AxiosResponse = await axios.get(path, axiosOptions);
 | 
			
		||||
        if (data.statusText === 'error' || !data.data) {
 | 
			
		||||
          throw new Error(`Could not fetch data from Github, Error: ${data.status}`);
 | 
			
		||||
          throw new Error(`Could not fetch data from ${path}, Error: ${data.status}`);
 | 
			
		||||
        }
 | 
			
		||||
        return data.data;
 | 
			
		||||
      } catch (e) {
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user