Merge pull request #1163 from nymkappa/feature/pools-migration
Mining dashboard (1/2) - Import mining pools into the database - Increment db schema to 3
This commit is contained in:
		
						commit
						6ad0344ea5
					
				@ -14,7 +14,9 @@
 | 
			
		||||
    "MEMPOOL_BLOCKS_AMOUNT": 8,
 | 
			
		||||
    "PRICE_FEED_UPDATE_INTERVAL": 3600,
 | 
			
		||||
    "USE_SECOND_NODE_FOR_MINFEE": false,
 | 
			
		||||
    "EXTERNAL_ASSETS": []
 | 
			
		||||
    "EXTERNAL_ASSETS": [
 | 
			
		||||
      "https://mempool.space/resources/pools.json"
 | 
			
		||||
    ]
 | 
			
		||||
  },
 | 
			
		||||
  "CORE_RPC": {
 | 
			
		||||
    "HOST": "127.0.0.1",
 | 
			
		||||
 | 
			
		||||
@ -3,10 +3,10 @@ import config from '../config';
 | 
			
		||||
import { DB } from '../database';
 | 
			
		||||
import logger from '../logger';
 | 
			
		||||
 | 
			
		||||
const sleep = (ms: number) => new Promise( res => setTimeout(res, ms));
 | 
			
		||||
const sleep = (ms: number) => new Promise(res => setTimeout(res, ms));
 | 
			
		||||
 | 
			
		||||
class DatabaseMigration {
 | 
			
		||||
  private static currentVersion = 2;
 | 
			
		||||
  private static currentVersion = 3;
 | 
			
		||||
  private queryTimeout = 120000;
 | 
			
		||||
  private statisticsAddedIndexed = false;
 | 
			
		||||
 | 
			
		||||
@ -83,6 +83,9 @@ class DatabaseMigration {
 | 
			
		||||
      if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
 | 
			
		||||
        await this.$executeQuery(connection, `CREATE INDEX added ON statistics (added);`);
 | 
			
		||||
      }
 | 
			
		||||
      if (databaseSchemaVersion < 3) {
 | 
			
		||||
        await this.$executeQuery(connection, this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
 | 
			
		||||
      }
 | 
			
		||||
      connection.release();
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
@ -335,6 +338,17 @@ class DatabaseMigration {
 | 
			
		||||
      final_tx int(11) NOT NULL
 | 
			
		||||
    ) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private getCreatePoolsTableQuery(): string {
 | 
			
		||||
    return `CREATE TABLE IF NOT EXISTS pools (
 | 
			
		||||
      id int(11) NOT NULL AUTO_INCREMENT,
 | 
			
		||||
      name varchar(50) NOT NULL,
 | 
			
		||||
      link varchar(255) NOT NULL,
 | 
			
		||||
      addresses text NOT NULL,
 | 
			
		||||
      regexes text NOT NULL,
 | 
			
		||||
      PRIMARY KEY (id)
 | 
			
		||||
    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;`;
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export default new DatabaseMigration();
 | 
			
		||||
export default new DatabaseMigration();
 | 
			
		||||
							
								
								
									
										173
									
								
								backend/src/api/pools-parser.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										173
									
								
								backend/src/api/pools-parser.ts
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,173 @@
 | 
			
		||||
import { readFileSync } from 'fs';
 | 
			
		||||
import { DB } from '../database';
 | 
			
		||||
import logger from '../logger';
 | 
			
		||||
import config from '../config';
 | 
			
		||||
 | 
			
		||||
interface Pool {
 | 
			
		||||
  name: string;
 | 
			
		||||
  link: string;
 | 
			
		||||
  regexes: string[];
 | 
			
		||||
  addresses: string[];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
class PoolsParser {
 | 
			
		||||
  /**
 | 
			
		||||
   * Parse the pools.json file, consolidate the data and dump it into the database
 | 
			
		||||
   */
 | 
			
		||||
  public async migratePoolsJson() {
 | 
			
		||||
    if (config.MEMPOOL.NETWORK !== 'mainnet') {
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    logger.debug('Importing pools.json to the database, open ./pools.json');
 | 
			
		||||
 | 
			
		||||
    let poolsJson: object = {};
 | 
			
		||||
    try {
 | 
			
		||||
      const fileContent: string = readFileSync('./pools.json', 'utf8');
 | 
			
		||||
      poolsJson = JSON.parse(fileContent);
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('Unable to open ./pools.json, does the file exist?');
 | 
			
		||||
      await this.insertUnknownPool();
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // First we save every entries without paying attention to pool duplication
 | 
			
		||||
    const poolsDuplicated: Pool[] = [];
 | 
			
		||||
 | 
			
		||||
    logger.debug('Parse coinbase_tags');
 | 
			
		||||
    const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
 | 
			
		||||
    for (let i = 0; i < coinbaseTags.length; ++i) {
 | 
			
		||||
      poolsDuplicated.push({
 | 
			
		||||
        'name': (<Pool>coinbaseTags[i][1]).name,
 | 
			
		||||
        'link': (<Pool>coinbaseTags[i][1]).link,
 | 
			
		||||
        'regexes': [coinbaseTags[i][0]],
 | 
			
		||||
        'addresses': [],
 | 
			
		||||
      });
 | 
			
		||||
    }
 | 
			
		||||
    logger.debug('Parse payout_addresses');
 | 
			
		||||
    const addressesTags = Object.entries(poolsJson['payout_addresses']);
 | 
			
		||||
    for (let i = 0; i < addressesTags.length; ++i) {
 | 
			
		||||
      poolsDuplicated.push({
 | 
			
		||||
        'name': (<Pool>addressesTags[i][1]).name,
 | 
			
		||||
        'link': (<Pool>addressesTags[i][1]).link,
 | 
			
		||||
        'regexes': [],
 | 
			
		||||
        'addresses': [addressesTags[i][0]],
 | 
			
		||||
      });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // Then, we find unique mining pool names
 | 
			
		||||
    logger.debug('Identify unique mining pools');
 | 
			
		||||
    const poolNames: string[] = [];
 | 
			
		||||
    for (let i = 0; i < poolsDuplicated.length; ++i) {
 | 
			
		||||
      if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
 | 
			
		||||
        poolNames.push(poolsDuplicated[i].name);
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    logger.debug(`Found ${poolNames.length} unique mining pools`);
 | 
			
		||||
 | 
			
		||||
    // Get existing pools from the db
 | 
			
		||||
    const connection = await DB.pool.getConnection();
 | 
			
		||||
    let existingPools;
 | 
			
		||||
    try {
 | 
			
		||||
      [existingPools] = await connection.query<any>({ sql: 'SELECT * FROM pools;', timeout: 120000 });
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('Unable to get existing pools from the database, skipping pools.json import');
 | 
			
		||||
      connection.release();
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // Finally, we generate the final consolidated pools data
 | 
			
		||||
    const finalPoolDataAdd: Pool[] = [];
 | 
			
		||||
    const finalPoolDataUpdate: Pool[] = [];
 | 
			
		||||
    for (let i = 0; i < poolNames.length; ++i) {
 | 
			
		||||
      let allAddresses: string[] = [];
 | 
			
		||||
      let allRegexes: string[] = [];
 | 
			
		||||
      const match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
 | 
			
		||||
 | 
			
		||||
      for (let y = 0; y < match.length; ++y) {
 | 
			
		||||
        allAddresses = allAddresses.concat(match[y].addresses);
 | 
			
		||||
        allRegexes = allRegexes.concat(match[y].regexes);
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      const finalPoolName = poolNames[i].replace(`'`, `''`); // To support single quote in names when doing db queries
 | 
			
		||||
 | 
			
		||||
      if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) {
 | 
			
		||||
        logger.debug(`Update '${finalPoolName}' mining pool`);
 | 
			
		||||
        finalPoolDataUpdate.push({
 | 
			
		||||
          'name': finalPoolName,
 | 
			
		||||
          'link': match[0].link,
 | 
			
		||||
          'regexes': allRegexes,
 | 
			
		||||
          'addresses': allAddresses,
 | 
			
		||||
        });
 | 
			
		||||
      } else {
 | 
			
		||||
        logger.debug(`Add '${finalPoolName}' mining pool`);
 | 
			
		||||
        finalPoolDataAdd.push({
 | 
			
		||||
          'name': finalPoolName,
 | 
			
		||||
          'link': match[0].link,
 | 
			
		||||
          'regexes': allRegexes,
 | 
			
		||||
          'addresses': allAddresses,
 | 
			
		||||
        });
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    logger.debug(`Update pools table now`);
 | 
			
		||||
 | 
			
		||||
    // Add new mining pools into the database
 | 
			
		||||
    let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses) VALUES ';
 | 
			
		||||
    for (let i = 0; i < finalPoolDataAdd.length; ++i) {
 | 
			
		||||
      queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
 | 
			
		||||
      '${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}'),`;
 | 
			
		||||
    }
 | 
			
		||||
    queryAdd = queryAdd.slice(0, -1) + ';';
 | 
			
		||||
 | 
			
		||||
    // Add new mining pools into the database
 | 
			
		||||
    const updateQueries: string[] = [];
 | 
			
		||||
    for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
 | 
			
		||||
      updateQueries.push(`
 | 
			
		||||
        UPDATE pools
 | 
			
		||||
        SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
 | 
			
		||||
        regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}'
 | 
			
		||||
        WHERE name='${finalPoolDataUpdate[i].name}'
 | 
			
		||||
      ;`);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      if (finalPoolDataAdd.length > 0) {
 | 
			
		||||
        await connection.query<any>({ sql: queryAdd, timeout: 120000 });
 | 
			
		||||
      }
 | 
			
		||||
      for (const query of updateQueries) {
 | 
			
		||||
        await connection.query<any>({ sql: query, timeout: 120000 });
 | 
			
		||||
      }
 | 
			
		||||
      await this.insertUnknownPool();
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.info('Mining pools.json import completed');
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      connection.release();
 | 
			
		||||
      logger.err(`Unable to import pools in the database!`);
 | 
			
		||||
      throw e;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Manually add the 'unknown pool'
 | 
			
		||||
   */
 | 
			
		||||
  private async insertUnknownPool() {
 | 
			
		||||
    const connection = await DB.pool.getConnection();
 | 
			
		||||
    try {
 | 
			
		||||
      const [rows]: any[] = await connection.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
 | 
			
		||||
      if (rows.length === 0) {
 | 
			
		||||
        logger.debug('Manually inserting "Unknown" mining pool into the databse');
 | 
			
		||||
        await connection.query({
 | 
			
		||||
          sql: `INSERT INTO pools(name, link, regexes, addresses)
 | 
			
		||||
          VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]");
 | 
			
		||||
        `});
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      logger.err('Unable to insert "Unknown" mining pool');
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    connection.release();
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export default new PoolsParser();
 | 
			
		||||
@ -79,7 +79,9 @@ const defaults: IConfig = {
 | 
			
		||||
    'MEMPOOL_BLOCKS_AMOUNT': 8,
 | 
			
		||||
    'PRICE_FEED_UPDATE_INTERVAL': 3600,
 | 
			
		||||
    'USE_SECOND_NODE_FOR_MINFEE': false,
 | 
			
		||||
    'EXTERNAL_ASSETS': [],
 | 
			
		||||
    'EXTERNAL_ASSETS': [
 | 
			
		||||
      'https://mempool.space/resources/pools.json'
 | 
			
		||||
    ]
 | 
			
		||||
  },
 | 
			
		||||
  'ESPLORA': {
 | 
			
		||||
    'REST_API_URL': 'http://127.0.0.1:3000',
 | 
			
		||||
 | 
			
		||||
@ -22,6 +22,7 @@ import loadingIndicators from './api/loading-indicators';
 | 
			
		||||
import mempool from './api/mempool';
 | 
			
		||||
import elementsParser from './api/liquid/elements-parser';
 | 
			
		||||
import databaseMigration from './api/database-migration';
 | 
			
		||||
import poolsParser from './api/pools-parser';
 | 
			
		||||
import syncAssets from './sync-assets';
 | 
			
		||||
import icons from './api/liquid/icons';
 | 
			
		||||
import { Common } from './api/common';
 | 
			
		||||
@ -88,6 +89,7 @@ class Server {
 | 
			
		||||
      await checkDbConnection();
 | 
			
		||||
      try {
 | 
			
		||||
        await databaseMigration.$initializeOrMigrateDatabase();
 | 
			
		||||
        await poolsParser.migratePoolsJson();
 | 
			
		||||
      } catch (e) {
 | 
			
		||||
        throw new Error(e instanceof Error ? e.message : 'Error');
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user