Merge pull request #3125 from mempool/nymkappa/feature/update-mining-indexer-log
Update some mining indexer log
This commit is contained in:
		
						commit
						aae61bcb45
					
				| @ -1037,7 +1037,7 @@ class DatabaseMigration { | ||||
|     await this.$executeQuery('DELETE FROM `pools`'); | ||||
|     await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1'); | ||||
|     await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`); | ||||
| } | ||||
|   } | ||||
| 
 | ||||
|   private async $convertCompactCpfpTables(): Promise<void> { | ||||
|     try { | ||||
|  | ||||
| @ -117,7 +117,7 @@ class Mining { | ||||
|       poolsStatistics['lastEstimatedHashrate'] = await bitcoinClient.getNetworkHashPs(totalBlock24h); | ||||
|     } catch (e) { | ||||
|       poolsStatistics['lastEstimatedHashrate'] = 0; | ||||
|       logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate'); | ||||
|       logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate', logger.tags.mining); | ||||
|     } | ||||
| 
 | ||||
|     return poolsStatistics; | ||||
| @ -145,7 +145,7 @@ class Mining { | ||||
|     try { | ||||
|       currentEstimatedHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h); | ||||
|     } catch (e) { | ||||
|       logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate'); | ||||
|       logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate', logger.tags.mining); | ||||
|     } | ||||
| 
 | ||||
|     return { | ||||
| @ -208,7 +208,7 @@ class Mining { | ||||
|       const startedAt = new Date().getTime() / 1000; | ||||
|       let timer = new Date().getTime() / 1000; | ||||
| 
 | ||||
|       logger.debug(`Indexing weekly mining pool hashrate`); | ||||
|       logger.debug(`Indexing weekly mining pool hashrate`, logger.tags.mining); | ||||
|       loadingIndicators.setProgress('weekly-hashrate-indexing', 0); | ||||
| 
 | ||||
|       while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) { | ||||
| @ -245,7 +245,7 @@ class Mining { | ||||
|             }); | ||||
|           } | ||||
| 
 | ||||
|           newlyIndexed += hashrates.length; | ||||
|           newlyIndexed += hashrates.length / Math.max(1, pools.length); | ||||
|           await HashratesRepository.$saveHashrates(hashrates); | ||||
|           hashrates.length = 0; | ||||
|         } | ||||
| @ -256,7 +256,7 @@ class Mining { | ||||
|           const weeksPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds)); | ||||
|           const progress = Math.round(totalIndexed / totalWeekIndexed * 10000) / 100; | ||||
|           const formattedDate = new Date(fromTimestamp).toUTCString(); | ||||
|           logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`); | ||||
|           logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining); | ||||
|           timer = new Date().getTime() / 1000; | ||||
|           indexedThisRun = 0; | ||||
|           loadingIndicators.setProgress('weekly-hashrate-indexing', progress, false); | ||||
| @ -268,14 +268,14 @@ class Mining { | ||||
|       } | ||||
|       this.lastWeeklyHashrateIndexingDate = new Date().getUTCDate(); | ||||
|       if (newlyIndexed > 0) { | ||||
|         logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining); | ||||
|         logger.info(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed} weeks`, logger.tags.mining); | ||||
|       } else { | ||||
|         logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining); | ||||
|         logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed} weeks`, logger.tags.mining); | ||||
|       } | ||||
|       loadingIndicators.setProgress('weekly-hashrate-indexing', 100); | ||||
|     } catch (e) { | ||||
|       loadingIndicators.setProgress('weekly-hashrate-indexing', 100); | ||||
|       logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`); | ||||
|       logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -308,7 +308,7 @@ class Mining { | ||||
|       const startedAt = new Date().getTime() / 1000; | ||||
|       let timer = new Date().getTime() / 1000; | ||||
| 
 | ||||
|       logger.debug(`Indexing daily network hashrate`); | ||||
|       logger.debug(`Indexing daily network hashrate`, logger.tags.mining); | ||||
|       loadingIndicators.setProgress('daily-hashrate-indexing', 0); | ||||
| 
 | ||||
|       while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) { | ||||
| @ -346,7 +346,7 @@ class Mining { | ||||
|           const daysPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds)); | ||||
|           const progress = Math.round(totalIndexed / totalDayIndexed * 10000) / 100; | ||||
|           const formattedDate = new Date(fromTimestamp).toUTCString(); | ||||
|           logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`); | ||||
|           logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining); | ||||
|           timer = new Date().getTime() / 1000; | ||||
|           indexedThisRun = 0; | ||||
|           loadingIndicators.setProgress('daily-hashrate-indexing', progress); | ||||
| @ -373,14 +373,14 @@ class Mining { | ||||
| 
 | ||||
|       this.lastHashrateIndexingDate = new Date().getUTCDate(); | ||||
|       if (newlyIndexed > 0) { | ||||
|         logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining); | ||||
|         logger.info(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining); | ||||
|       } else { | ||||
|         logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining); | ||||
|       } | ||||
|       loadingIndicators.setProgress('daily-hashrate-indexing', 100); | ||||
|     } catch (e) { | ||||
|       loadingIndicators.setProgress('daily-hashrate-indexing', 100); | ||||
|       logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining); | ||||
|       logger.err(`Daily network hashrate indexing failed. Trying again later. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -446,13 +446,13 @@ class Mining { | ||||
|       const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer)); | ||||
|       if (elapsedSeconds > 5) { | ||||
|         const progress = Math.round(totalBlockChecked / blocks.length * 100); | ||||
|         logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`); | ||||
|         logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining); | ||||
|         timer = new Date().getTime() / 1000; | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
|     if (totalIndexed > 0) { | ||||
|       logger.notice(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining); | ||||
|       logger.info(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining); | ||||
|     } else { | ||||
|       logger.debug(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining); | ||||
|     } | ||||
| @ -499,7 +499,7 @@ class Mining { | ||||
|           if (blocksWithoutPrices.length > 200000) { | ||||
|             logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`; | ||||
|           } | ||||
|           logger.debug(logStr); | ||||
|           logger.debug(logStr, logger.tags.mining); | ||||
|           await BlocksRepository.$saveBlockPrices(blocksPrices); | ||||
|           blocksPrices.length = 0; | ||||
|         } | ||||
| @ -511,7 +511,7 @@ class Mining { | ||||
|         if (blocksWithoutPrices.length > 200000) { | ||||
|           logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`; | ||||
|         } | ||||
|         logger.debug(logStr); | ||||
|         logger.debug(logStr, logger.tags.mining); | ||||
|         await BlocksRepository.$saveBlockPrices(blocksPrices); | ||||
|       } | ||||
|     } catch (e) { | ||||
|  | ||||
| @ -76,13 +76,13 @@ class Indexer { | ||||
|       this.tasksRunning.push(task); | ||||
|       const lastestPriceId = await PricesRepository.$getLatestPriceId(); | ||||
|       if (priceUpdater.historyInserted === false || lastestPriceId === null) { | ||||
|         logger.debug(`Blocks prices indexer is waiting for the price updater to complete`); | ||||
|         logger.debug(`Blocks prices indexer is waiting for the price updater to complete`, logger.tags.mining); | ||||
|         setTimeout(() => { | ||||
|           this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task); | ||||
|           this.runSingleTask('blocksPrices'); | ||||
|         }, 10000); | ||||
|       } else { | ||||
|         logger.debug(`Blocks prices indexer will run now`); | ||||
|         logger.debug(`Blocks prices indexer will run now`, logger.tags.mining); | ||||
|         await mining.$indexBlockPrices(); | ||||
|         this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task); | ||||
|       } | ||||
| @ -112,7 +112,7 @@ class Indexer { | ||||
|     this.runIndexer = false; | ||||
|     this.indexerRunning = true; | ||||
| 
 | ||||
|     logger.info(`Running mining indexer`); | ||||
|     logger.debug(`Running mining indexer`); | ||||
| 
 | ||||
|     await this.checkAvailableCoreIndexes(); | ||||
| 
 | ||||
| @ -122,7 +122,7 @@ class Indexer { | ||||
|       const chainValid = await blocks.$generateBlockDatabase(); | ||||
|       if (chainValid === false) { | ||||
|         // Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration
 | ||||
|         logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`); | ||||
|         logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`, logger.tags.mining); | ||||
|         setTimeout(() => this.reindex(), 10000); | ||||
|         this.indexerRunning = false; | ||||
|         return; | ||||
|  | ||||
| @ -20,9 +20,9 @@ class DifficultyAdjustmentsRepository { | ||||
|       await DB.query(query, params); | ||||
|     } catch (e: any) { | ||||
|       if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
 | ||||
|         logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`); | ||||
|         logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`, logger.tags.mining); | ||||
|       } else { | ||||
|         logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`); | ||||
|         logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining); | ||||
|         throw e; | ||||
|       } | ||||
|     } | ||||
| @ -54,7 +54,7 @@ class DifficultyAdjustmentsRepository { | ||||
|       const [rows] = await DB.query(query); | ||||
|       return rows as IndexedDifficultyAdjustment[]; | ||||
|     } catch (e) { | ||||
|       logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e)); | ||||
|       logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -83,7 +83,7 @@ class DifficultyAdjustmentsRepository { | ||||
|       const [rows] = await DB.query(query); | ||||
|       return rows as IndexedDifficultyAdjustment[]; | ||||
|     } catch (e) { | ||||
|       logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e)); | ||||
|       logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -93,27 +93,27 @@ class DifficultyAdjustmentsRepository { | ||||
|       const [rows]: any[] = await DB.query(`SELECT height FROM difficulty_adjustments`); | ||||
|       return rows.map(block => block.height); | ||||
|     } catch (e: any) { | ||||
|       logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`); | ||||
|       logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   public async $deleteAdjustementsFromHeight(height: number): Promise<void> { | ||||
|     try { | ||||
|       logger.info(`Delete newer difficulty adjustments from height ${height} from the database`); | ||||
|       logger.info(`Delete newer difficulty adjustments from height ${height} from the database`, logger.tags.mining); | ||||
|       await DB.query(`DELETE FROM difficulty_adjustments WHERE height >= ?`, [height]); | ||||
|     } catch (e: any) { | ||||
|       logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`); | ||||
|       logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   public async $deleteLastAdjustment(): Promise<void> { | ||||
|     try { | ||||
|       logger.info(`Delete last difficulty adjustment from the database`); | ||||
|       logger.info(`Delete last difficulty adjustment from the database`, logger.tags.mining); | ||||
|       await DB.query(`DELETE FROM difficulty_adjustments ORDER BY time LIMIT 1`); | ||||
|     } catch (e: any) { | ||||
|       logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`); | ||||
|       logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
|  | ||||
| @ -25,7 +25,7 @@ class HashratesRepository { | ||||
|     try { | ||||
|       await DB.query(query); | ||||
|     } catch (e: any) { | ||||
|       logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -51,7 +51,7 @@ class HashratesRepository { | ||||
|       const [rows]: any[] = await DB.query(query); | ||||
|       return rows; | ||||
|     } catch (e) { | ||||
|       logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -78,7 +78,7 @@ class HashratesRepository { | ||||
|       const [rows]: any[] = await DB.query(query); | ||||
|       return rows; | ||||
|     } catch (e) { | ||||
|       logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -93,7 +93,7 @@ class HashratesRepository { | ||||
|       const [rows]: any[] = await DB.query(query); | ||||
|       return rows.map(row => row.timestamp); | ||||
|     } catch (e) { | ||||
|       logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -128,7 +128,7 @@ class HashratesRepository { | ||||
|       const [rows]: any[] = await DB.query(query); | ||||
|       return rows; | ||||
|     } catch (e) { | ||||
|       logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -158,7 +158,7 @@ class HashratesRepository { | ||||
|       const [rows]: any[] = await DB.query(query, [pool.id]); | ||||
|       boundaries = rows[0]; | ||||
|     } catch (e) { | ||||
|       logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|     } | ||||
| 
 | ||||
|     // Get hashrates entries between boundaries
 | ||||
| @ -173,7 +173,7 @@ class HashratesRepository { | ||||
|       const [rows]: any[] = await DB.query(query, [boundaries.firstTimestamp, boundaries.lastTimestamp, pool.id]); | ||||
|       return rows; | ||||
|     } catch (e) { | ||||
|       logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -192,7 +192,7 @@ class HashratesRepository { | ||||
|       } | ||||
|       return rows[0]['number']; | ||||
|     } catch (e) { | ||||
|       logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e)); | ||||
|       logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|       throw e; | ||||
|     } | ||||
|   } | ||||
| @ -201,7 +201,7 @@ class HashratesRepository { | ||||
|    * Delete most recent data points for re-indexing | ||||
|    */ | ||||
|   public async $deleteLastEntries() { | ||||
|     logger.info(`Delete latest hashrates data points from the database`); | ||||
|     logger.info(`Delete latest hashrates data points from the database`, logger.tags.mining); | ||||
| 
 | ||||
|     try { | ||||
|       const [rows]: any[] = await DB.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`); | ||||
| @ -212,7 +212,7 @@ class HashratesRepository { | ||||
|       mining.lastHashrateIndexingDate = null; | ||||
|       mining.lastWeeklyHashrateIndexingDate = null; | ||||
|     } catch (e) { | ||||
|       logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|     } | ||||
|   } | ||||
|    | ||||
| @ -228,7 +228,7 @@ class HashratesRepository { | ||||
|       mining.lastHashrateIndexingDate = null; | ||||
|       mining.lastWeeklyHashrateIndexingDate = null; | ||||
|     } catch (e) { | ||||
|       logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e)); | ||||
|       logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining); | ||||
|     } | ||||
|   } | ||||
| } | ||||
|  | ||||
| @ -411,7 +411,7 @@ class LightningStatsImporter { | ||||
|       } | ||||
| 
 | ||||
|       if (totalProcessed > 0) { | ||||
|         logger.notice(`Lightning network stats historical import completed`, logger.tags.ln); | ||||
|         logger.info(`Lightning network stats historical import completed`, logger.tags.ln); | ||||
|       } | ||||
|     } catch (e) { | ||||
|       logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln); | ||||
|  | ||||
| @ -82,7 +82,7 @@ class PoolsUpdater { | ||||
|         logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining); | ||||
|         await DB.query('ROLLBACK;'); | ||||
|       } | ||||
|       logger.notice('PoolsUpdater completed'); | ||||
|       logger.info('PoolsUpdater completed'); | ||||
| 
 | ||||
|     } catch (e) { | ||||
|       this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
 | ||||
|  | ||||
| @ -98,7 +98,7 @@ class KrakenApi implements PriceFeed { | ||||
|     } | ||||
| 
 | ||||
|     if (Object.keys(priceHistory).length > 0) { | ||||
|       logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining); | ||||
|       logger.info(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining); | ||||
|     } | ||||
|   } | ||||
| } | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user