diff --git a/backend/src/api/pools-parser.ts b/backend/src/api/pools-parser.ts index 15a4fe7be..29d916376 100644 --- a/backend/src/api/pools-parser.ts +++ b/backend/src/api/pools-parser.ts @@ -1,6 +1,7 @@ import DB from '../database'; import logger from '../logger'; import config from '../config'; +import BlocksRepository from '../repositories/BlocksRepository'; interface Pool { name: string; @@ -32,7 +33,6 @@ class PoolsParser { // First we save every entries without paying attention to pool duplication const poolsDuplicated: Pool[] = []; - logger.debug('Parse coinbase_tags'); const coinbaseTags = Object.entries(poolsJson['coinbase_tags']); for (let i = 0; i < coinbaseTags.length; ++i) { poolsDuplicated.push({ @@ -43,7 +43,6 @@ class PoolsParser { 'slug': '' }); } - logger.debug('Parse payout_addresses'); const addressesTags = Object.entries(poolsJson['payout_addresses']); for (let i = 0; i < addressesTags.length; ++i) { poolsDuplicated.push({ @@ -56,7 +55,6 @@ class PoolsParser { } // Then, we find unique mining pool names - logger.debug('Identify unique mining pools'); const poolNames: string[] = []; for (let i = 0; i < poolsDuplicated.length; ++i) { if (poolNames.indexOf(poolsDuplicated[i].name) === -1) { @@ -119,8 +117,15 @@ class PoolsParser { 'slug': slug }; - if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) { - finalPoolDataUpdate.push(poolObj); + const existingPool = existingPools.find((pool) => pool.name === poolNames[i]); + if (existingPool !== undefined) { + // Check if any data was actually updated + const equals = (a, b) => + a.length === b.length && + a.every((v, i) => v === b[i]); + if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) { + finalPoolDataUpdate.push(poolObj); + } } else { logger.debug(`Add '${finalPoolName}' mining pool`); finalPoolDataAdd.push(poolObj); @@ -140,40 +145,51 @@ class PoolsParser { return; } - logger.debug(`Update pools table now`); + if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) { + logger.debug(`Update pools table now`); - // Add new mining pools into the database - let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES '; - for (let i = 0; i < finalPoolDataAdd.length; ++i) { - queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}', - '${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}', - ${JSON.stringify(finalPoolDataAdd[i].slug)}),`; - } - queryAdd = queryAdd.slice(0, -1) + ';'; + // Add new mining pools into the database + let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES '; + for (let i = 0; i < finalPoolDataAdd.length; ++i) { + queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}', + '${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}', + ${JSON.stringify(finalPoolDataAdd[i].slug)}),`; + } + queryAdd = queryAdd.slice(0, -1) + ';'; - // Updated existing mining pools in the database - const updateQueries: string[] = []; - for (let i = 0; i < finalPoolDataUpdate.length; ++i) { - updateQueries.push(` - UPDATE pools - SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}', - regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}', - slug='${finalPoolDataUpdate[i].slug}' - WHERE name='${finalPoolDataUpdate[i].name}' - ;`); + // Updated existing mining pools in the database + const updateQueries: string[] = []; + for (let i = 0; i < finalPoolDataUpdate.length; ++i) { + updateQueries.push(` + UPDATE pools + SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}', + regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}', + slug='${finalPoolDataUpdate[i].slug}' + WHERE name='${finalPoolDataUpdate[i].name}' + ;`); + } + + try { + await this.$deleteBlocskToReindex(finalPoolDataUpdate); + + if (finalPoolDataAdd.length > 0) { + await DB.query({ sql: queryAdd, timeout: 120000 }); + } + for (const query of updateQueries) { + await DB.query({ sql: query, timeout: 120000 }); + } + await this.insertUnknownPool(); + logger.info('Mining pools.json import completed'); + } catch (e) { + logger.err(`Cannot import pools in the database`); + throw e; + } } try { - if (finalPoolDataAdd.length > 0) { - await DB.query({ sql: queryAdd, timeout: 120000 }); - } - for (const query of updateQueries) { - await DB.query({ sql: query, timeout: 120000 }); - } await this.insertUnknownPool(); - logger.info('Mining pools.json import completed'); } catch (e) { - logger.err(`Cannot import pools in the database`); + logger.err(`Cannot insert unknown pool in the database`); throw e; } } @@ -201,6 +217,32 @@ class PoolsParser { logger.err('Unable to insert "Unknown" mining pool'); } } + + /** + * Delete blocks which needs to be reindexed + */ + private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) { + const blockCount = await BlocksRepository.$blockCount(null, null); + if (blockCount === 0) { + return; + } + + for (const updatedPool of finalPoolDataUpdate) { + const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`); + if (pool.length > 0) { + logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`); + await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`); + } + } + + // Ignore early days of Bitcoin as there were not mining pool yet + logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing'); + const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`); + await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`); + + logger.notice('Truncating hashrates for future re-indexing'); + await DB.query(`DELETE FROM hashrates`); + } } export default new PoolsParser();