Compare commits
14 Commits
mononaut/t
...
mononaut/f
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
28a10f2aaa | ||
|
|
c454ef0655 | ||
|
|
639294d319 | ||
|
|
5905eebaa6 | ||
|
|
f99aa8f1f0 | ||
|
|
3e99605870 | ||
|
|
05c4440680 | ||
|
|
238a2e75b1 | ||
|
|
667fc4ea18 | ||
|
|
05a8154db0 | ||
|
|
68642aeb5f | ||
|
|
dcc8b81ca6 | ||
|
|
db4bf52596 | ||
|
|
efb48271f9 |
10
.github/workflows/on-tag.yml
vendored
10
.github/workflows/on-tag.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
run: |
|
||||
sudo swapoff /mnt/swapfile
|
||||
sudo rm -v /mnt/swapfile
|
||||
sudo fallocate -l 10G /mnt/swapfile
|
||||
sudo fallocate -l 13G /mnt/swapfile
|
||||
sudo chmod 600 /mnt/swapfile
|
||||
sudo mkswap /mnt/swapfile
|
||||
sudo swapon /mnt/swapfile
|
||||
@@ -68,24 +68,24 @@ jobs:
|
||||
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
|
||||
|
||||
- name: Checkout project
|
||||
uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Init repo for Dockerization
|
||||
run: docker/init.sh "$TAG"
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # v2.1.0
|
||||
uses: docker/setup-qemu-action@v2
|
||||
id: qemu
|
||||
|
||||
- name: Setup Docker buildx action
|
||||
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # v2.2.1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
|
||||
- name: Available platforms
|
||||
run: echo ${{ steps.buildx.outputs.platforms }}
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # v3.0.11
|
||||
uses: actions/cache@v3
|
||||
id: cache
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
|
||||
@@ -27,7 +27,8 @@
|
||||
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master",
|
||||
"ADVANCED_GBT_AUDIT": false,
|
||||
"ADVANCED_GBT_MEMPOOL": false,
|
||||
"TRANSACTION_INDEXING": false
|
||||
"TRANSACTION_INDEXING": false,
|
||||
"FIRST_SEEN_INDEXING_DAYS": 0
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "127.0.0.1",
|
||||
|
||||
@@ -28,7 +28,8 @@
|
||||
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
|
||||
"ADVANCED_GBT_AUDIT": "__ADVANCED_GBT_AUDIT__",
|
||||
"ADVANCED_GBT_MEMPOOL": "__ADVANCED_GBT_MEMPOOL__",
|
||||
"TRANSACTION_INDEXING": "__TRANSACTION_INDEXING__"
|
||||
"TRANSACTION_INDEXING": "__TRANSACTION_INDEXING__",
|
||||
"FIRST_SEEN_INDEXING_DAYS": "__FIRST_SEEN_INDEXING_DAYS__"
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "__CORE_RPC_HOST__",
|
||||
|
||||
@@ -41,6 +41,7 @@ describe('Mempool Backend Config', () => {
|
||||
ADVANCED_GBT_AUDIT: false,
|
||||
ADVANCED_GBT_MEMPOOL: false,
|
||||
TRANSACTION_INDEXING: false,
|
||||
FIRST_SEEN_INDEXING_DAYS: 0,
|
||||
});
|
||||
|
||||
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
|
||||
|
||||
@@ -25,6 +25,7 @@ class BitcoinRoutes {
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.$getCpfpInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'extras/:txId', this.$getTransactionExtras)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', this.getMempoolBlocks)
|
||||
@@ -221,6 +222,42 @@ class BitcoinRoutes {
|
||||
res.status(404).send(`Transaction has no CPFP info available.`);
|
||||
}
|
||||
|
||||
private async $getTransactionExtras(req: Request, res: Response): Promise<void> {
|
||||
if (!/^[a-fA-F0-9]{64}$/.test(req.params.txId)) {
|
||||
res.status(501).send(`Invalid transaction ID.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const tx = mempool.getMempool()[req.params.txId];
|
||||
if (tx) {
|
||||
if (tx?.cpfpChecked) {
|
||||
res.json({
|
||||
ancestors: tx.ancestors,
|
||||
bestDescendant: tx.bestDescendant || null,
|
||||
descendants: tx.descendants || null,
|
||||
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
|
||||
firstSeen: tx.firstSeen,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
|
||||
|
||||
res.json({
|
||||
...cpfpInfo,
|
||||
firstSeen: tx.firstSeen,
|
||||
});
|
||||
return;
|
||||
} else {
|
||||
const extras = await transactionRepository.$getTransactionExtras(req.params.txId);
|
||||
if (extras) {
|
||||
res.json(extras);
|
||||
return;
|
||||
}
|
||||
}
|
||||
res.status(404).send(`Transaction has no extra info available.`);
|
||||
}
|
||||
|
||||
private getBackendInfo(req: Request, res: Response) {
|
||||
res.json(backendInfo.getBackendInfo());
|
||||
}
|
||||
|
||||
@@ -296,7 +296,7 @@ class Blocks {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
|
||||
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
}
|
||||
@@ -309,12 +309,12 @@ class Blocks {
|
||||
newlyIndexed++;
|
||||
}
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -385,7 +385,7 @@ class Blocks {
|
||||
|
||||
const lastBlockToIndex = Math.max(0, currentBlockHeight - indexingBlockAmount + 1);
|
||||
|
||||
logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
|
||||
logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`, logger.tags.mining);
|
||||
loadingIndicators.setProgress('block-indexing', 0);
|
||||
|
||||
const chunkSize = 10000;
|
||||
@@ -405,7 +405,7 @@ class Blocks {
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
|
||||
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`, logger.tags.mining);
|
||||
|
||||
for (const blockHeight of missingBlockHeights) {
|
||||
if (blockHeight < lastBlockToIndex) {
|
||||
@@ -418,7 +418,7 @@ class Blocks {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
|
||||
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('block-indexing', progress, false);
|
||||
@@ -435,13 +435,13 @@ class Blocks {
|
||||
currentBlockHeight -= chunkSize;
|
||||
}
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
|
||||
}
|
||||
loadingIndicators.setProgress('block-indexing', 100);
|
||||
} catch (e) {
|
||||
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
loadingIndicators.setProgress('block-indexing', 100);
|
||||
throw e;
|
||||
}
|
||||
@@ -537,7 +537,7 @@ class Blocks {
|
||||
priceId: lastestPriceId,
|
||||
}]);
|
||||
} else {
|
||||
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`)
|
||||
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
|
||||
setTimeout(() => {
|
||||
indexer.runSingleTask('blocksPrices');
|
||||
}, 10000);
|
||||
@@ -677,7 +677,7 @@ class Blocks {
|
||||
}
|
||||
|
||||
public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> {
|
||||
let currentHeight = fromHeight !== undefined ? fromHeight : await blocksRepository.$mostRecentBlockHeight();
|
||||
let currentHeight = fromHeight !== undefined ? fromHeight : this.currentBlockHeight;
|
||||
const returnBlocks: BlockExtended[] = [];
|
||||
|
||||
if (currentHeight < 0) {
|
||||
|
||||
@@ -194,6 +194,13 @@ export class Common {
|
||||
);
|
||||
}
|
||||
|
||||
static firstSeenIndexingEnabled(): boolean {
|
||||
return (
|
||||
Common.indexingEnabled() &&
|
||||
config.MEMPOOL.FIRST_SEEN_INDEXING_DAYS !== 0
|
||||
);
|
||||
}
|
||||
|
||||
static setDateMidnight(date: Date): void {
|
||||
date.setUTCHours(0);
|
||||
date.setUTCMinutes(0);
|
||||
|
||||
@@ -4,7 +4,7 @@ import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 49;
|
||||
private static currentVersion = 50;
|
||||
private queryTimeout = 3600_000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
@@ -442,6 +442,11 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('TRUNCATE TABLE `blocks_audits`');
|
||||
await this.updateToSchemaVersion(49);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 50 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `transactions` ADD first_seen datetime DEFAULT NULL, ADD INDEX (first_seen)');
|
||||
await this.updateToSchemaVersion(50);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -670,9 +670,7 @@ class ChannelsApi {
|
||||
AND status != 2
|
||||
`);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
|
||||
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`, logger.tags.ln);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
|
||||
|
||||
@@ -685,9 +685,7 @@ class NodesApi {
|
||||
)
|
||||
`);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
|
||||
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`, logger.tags.ln);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));
|
||||
|
||||
@@ -141,13 +141,13 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
|
||||
// main data directory provided, default to using the bitcoin mainnet subdirectory
|
||||
// to be removed in v0.2.0
|
||||
else if (fExists(rpcPath, 'bitcoin', 'lightning-rpc')) {
|
||||
logger.warn(`[CLightningClient] ${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`)
|
||||
logger.warn(`[CLightningClient] specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`)
|
||||
logger.warn(`${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`, logger.tags.ln)
|
||||
logger.warn(`specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`, logger.tags.ln)
|
||||
rpcPath = path.join(rpcPath, 'bitcoin', 'lightning-rpc')
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`[CLightningClient] Connecting to ${rpcPath}`);
|
||||
logger.debug(`Connecting to ${rpcPath}`, logger.tags.ln);
|
||||
|
||||
super();
|
||||
this.rpcPath = rpcPath;
|
||||
@@ -172,19 +172,19 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
|
||||
|
||||
this.clientConnectionPromise = new Promise<void>(resolve => {
|
||||
_self.client.on('connect', () => {
|
||||
logger.info(`[CLightningClient] Lightning client connected`);
|
||||
logger.info(`CLightning client connected`, logger.tags.ln);
|
||||
_self.reconnectWait = 1;
|
||||
resolve();
|
||||
});
|
||||
|
||||
_self.client.on('end', () => {
|
||||
logger.err('[CLightningClient] Lightning client connection closed, reconnecting');
|
||||
logger.err(`CLightning client connection closed, reconnecting`, logger.tags.ln);
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
});
|
||||
|
||||
_self.client.on('error', error => {
|
||||
logger.err(`[CLightningClient] Lightning client connection error: ${error}`);
|
||||
logger.err(`CLightning client connection error: ${error}`, logger.tags.ln);
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
});
|
||||
@@ -196,7 +196,6 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
|
||||
return;
|
||||
}
|
||||
const data = JSON.parse(line);
|
||||
// logger.debug(`[CLightningClient] #${data.id} <-- ${JSON.stringify(data.error || data.result)}`);
|
||||
_self.emit('res:' + data.id, data);
|
||||
});
|
||||
}
|
||||
@@ -217,7 +216,7 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
|
||||
}
|
||||
|
||||
this.reconnectTimeout = setTimeout(() => {
|
||||
logger.debug('[CLightningClient] Trying to reconnect...');
|
||||
logger.debug(`Trying to reconnect...`, logger.tags.ln);
|
||||
|
||||
_self.client.connect(_self.rpcPath);
|
||||
_self.reconnectTimeout = null;
|
||||
@@ -235,7 +234,6 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
|
||||
id: '' + callInt
|
||||
};
|
||||
|
||||
// logger.debug(`[CLightningClient] #${callInt} --> ${method} ${args}`);
|
||||
|
||||
// Wait for the client to connect
|
||||
return this.clientConnectionPromise
|
||||
|
||||
@@ -2,6 +2,7 @@ import { ILightningApi } from '../lightning-api.interface';
|
||||
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||
import logger from '../../../logger';
|
||||
import { Common } from '../../common';
|
||||
import config from '../../../config';
|
||||
|
||||
/**
|
||||
* Convert a clightning "listnode" entry to a lnd node entry
|
||||
@@ -40,7 +41,7 @@ export function convertNode(clNode: any): ILightningApi.Node {
|
||||
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
|
||||
*/
|
||||
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
|
||||
logger.info('Converting clightning nodes and channels to lnd graph format');
|
||||
logger.debug(`Converting clightning nodes and channels to lnd graph format`, logger.tags.ln);
|
||||
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let channelProcessed = 0;
|
||||
@@ -62,8 +63,8 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
|
||||
}
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`);
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`, logger.tags.ln);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
@@ -76,7 +77,7 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
|
||||
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import loadingIndicators from './loading-indicators';
|
||||
import bitcoinClient from './bitcoin/bitcoin-client';
|
||||
import bitcoinSecondClient from './bitcoin/bitcoin-second-client';
|
||||
import rbfCache from './rbf-cache';
|
||||
import transactionRepository from '../repositories/TransactionRepository';
|
||||
|
||||
class Mempool {
|
||||
private static WEBSOCKET_REFRESH_RATE_MS = 10000;
|
||||
@@ -217,6 +218,14 @@ class Mempool {
|
||||
}
|
||||
}
|
||||
|
||||
public async $saveTxFirstSeenTimes(transactions: TransactionExtended[], mempool: { [txid: string]: TransactionExtended }) {
|
||||
for (const tx of transactions) {
|
||||
if (mempool[tx.txid]) {
|
||||
await transactionRepository.$saveTxFirstSeen(tx.txid, tx.firstSeen || Date.now());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private updateTxPerSecond() {
|
||||
const nowMinusTimeSpan = new Date().getTime() - (1000 * config.STATISTICS.TX_PER_SECOND_SAMPLE_PERIOD);
|
||||
this.txPerSecondArray = this.txPerSecondArray.filter((unixTime) => unixTime > nowMinusTimeSpan);
|
||||
|
||||
@@ -265,9 +265,9 @@ class Mining {
|
||||
}
|
||||
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', new Date().getUTCDate());
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
|
||||
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
|
||||
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining);
|
||||
}
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
|
||||
} catch (e) {
|
||||
@@ -370,14 +370,14 @@ class Mining {
|
||||
|
||||
await HashratesRepository.$setLatestRun('last_hashrates_indexing', new Date().getUTCDate());
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
|
||||
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
|
||||
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
|
||||
}
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
|
||||
} catch (e) {
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
|
||||
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -449,9 +449,9 @@ class Mining {
|
||||
}
|
||||
|
||||
if (totalIndexed > 0) {
|
||||
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`);
|
||||
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`);
|
||||
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ class PoolsParser {
|
||||
poolNames.push(poolsDuplicated[i].name);
|
||||
}
|
||||
}
|
||||
logger.debug(`Found ${poolNames.length} unique mining pools`);
|
||||
logger.debug(`Found ${poolNames.length} unique mining pools`, logger.tags.mining);
|
||||
|
||||
// Get existing pools from the db
|
||||
let existingPools;
|
||||
@@ -72,7 +72,7 @@ class PoolsParser {
|
||||
existingPools = [];
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Cannot get existing pools from the database, skipping pools.json import');
|
||||
logger.err('Cannot get existing pools from the database, skipping pools.json import', logger.tags.mining);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -99,7 +99,7 @@ class PoolsParser {
|
||||
slug = poolsJson['slugs'][poolNames[i]];
|
||||
} catch (e) {
|
||||
if (this.slugWarnFlag === false) {
|
||||
logger.warn(`pools.json does not seem to contain the 'slugs' object`);
|
||||
logger.warn(`pools.json does not seem to contain the 'slugs' object`, logger.tags.mining);
|
||||
this.slugWarnFlag = true;
|
||||
}
|
||||
}
|
||||
@@ -107,7 +107,7 @@ class PoolsParser {
|
||||
if (slug === undefined) {
|
||||
// Only keep alphanumerical
|
||||
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
|
||||
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
|
||||
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`, logger.tags.mining);
|
||||
}
|
||||
|
||||
const poolObj = {
|
||||
@@ -143,9 +143,9 @@ class PoolsParser {
|
||||
'addresses': allAddresses,
|
||||
'slug': slug
|
||||
});
|
||||
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`);
|
||||
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`);
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`, logger.tags.mining);
|
||||
finalPoolDataAdd.push(poolObj);
|
||||
}
|
||||
}
|
||||
@@ -160,14 +160,14 @@ class PoolsParser {
|
||||
}
|
||||
|
||||
if (config.DATABASE.ENABLED === false) { // Don't run db operations
|
||||
logger.info('Mining pools.json import completed (no database)');
|
||||
logger.info('Mining pools.json import completed (no database)', logger.tags.mining);
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
|
||||
finalPoolDataRename.length > 0
|
||||
) {
|
||||
logger.debug(`Update pools table now`);
|
||||
logger.debug(`Update pools table now`, logger.tags.mining);
|
||||
|
||||
// Add new mining pools into the database
|
||||
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
|
||||
@@ -217,9 +217,9 @@ class PoolsParser {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
logger.info('Mining pools.json import completed');
|
||||
logger.info('Mining pools.json import completed', logger.tags.mining);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot import pools in the database`);
|
||||
logger.err(`Cannot import pools in the database`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -227,7 +227,7 @@ class PoolsParser {
|
||||
try {
|
||||
await this.insertUnknownPool();
|
||||
} catch (e) {
|
||||
logger.err(`Cannot insert unknown pool in the database`);
|
||||
logger.err(`Cannot insert unknown pool in the database`, logger.tags.mining);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -252,7 +252,7 @@ class PoolsParser {
|
||||
`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert "Unknown" mining pool');
|
||||
logger.err('Unable to insert "Unknown" mining pool', logger.tags.mining);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -272,17 +272,17 @@ class PoolsParser {
|
||||
for (const updatedPool of finalPoolDataUpdate) {
|
||||
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
|
||||
if (pool.length > 0) {
|
||||
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`);
|
||||
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`, logger.tags.mining);
|
||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore early days of Bitcoin as there were not mining pool yet
|
||||
logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing');
|
||||
logger.notice(`Deleting blocks with unknown mining pool from height 130635 for future re-indexing`, logger.tags.mining);
|
||||
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
|
||||
|
||||
logger.notice('Truncating hashrates for future re-indexing');
|
||||
logger.notice(`Truncating hashrates for future re-indexing`, logger.tags.mining);
|
||||
await DB.query(`DELETE FROM hashrates`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import feeApi from './fee-api';
|
||||
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
import Audit from './audit';
|
||||
import mempool from './mempool';
|
||||
|
||||
class WebsocketHandler {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@@ -462,6 +463,10 @@ class WebsocketHandler {
|
||||
}
|
||||
}
|
||||
|
||||
if (Common.firstSeenIndexingEnabled()) {
|
||||
await mempool.$saveTxFirstSeenTimes(transactions, _memPool);
|
||||
}
|
||||
|
||||
const removed: string[] = [];
|
||||
// Update mempool to remove transactions included in the new block
|
||||
for (const txId of txIds) {
|
||||
|
||||
@@ -32,6 +32,7 @@ interface IConfig {
|
||||
ADVANCED_GBT_AUDIT: boolean;
|
||||
ADVANCED_GBT_MEMPOOL: boolean;
|
||||
TRANSACTION_INDEXING: boolean;
|
||||
FIRST_SEEN_INDEXING_DAYS: number;
|
||||
};
|
||||
ESPLORA: {
|
||||
REST_API_URL: string;
|
||||
@@ -153,6 +154,7 @@ const defaults: IConfig = {
|
||||
'ADVANCED_GBT_AUDIT': false,
|
||||
'ADVANCED_GBT_MEMPOOL': false,
|
||||
'TRANSACTION_INDEXING': false,
|
||||
'FIRST_SEEN_INDEXING_DAYS': 0,
|
||||
},
|
||||
'ESPLORA': {
|
||||
'REST_API_URL': 'http://127.0.0.1:3000',
|
||||
|
||||
@@ -7,6 +7,7 @@ import HashratesRepository from './repositories/HashratesRepository';
|
||||
import bitcoinClient from './api/bitcoin/bitcoin-client';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
import PricesRepository from './repositories/PricesRepository';
|
||||
import TransactionRepository from './repositories/TransactionRepository';
|
||||
|
||||
class Indexer {
|
||||
runIndexer = true;
|
||||
@@ -78,6 +79,7 @@ class Indexer {
|
||||
await mining.$generatePoolHashrateHistory();
|
||||
await blocks.$generateBlocksSummariesDatabase();
|
||||
await blocks.$generateCPFPDatabase();
|
||||
await TransactionRepository.$clearOldFirstSeen();
|
||||
} catch (e) {
|
||||
this.indexerRunning = false;
|
||||
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
|
||||
@@ -32,22 +32,27 @@ class Logger {
|
||||
local7: 23
|
||||
};
|
||||
|
||||
public tags = {
|
||||
mining: 'Mining',
|
||||
ln: 'Lightning',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
public emerg: ((msg: string) => void);
|
||||
public emerg: ((msg: string, tag?: string) => void);
|
||||
// @ts-ignore
|
||||
public alert: ((msg: string) => void);
|
||||
public alert: ((msg: string, tag?: string) => void);
|
||||
// @ts-ignore
|
||||
public crit: ((msg: string) => void);
|
||||
public crit: ((msg: string, tag?: string) => void);
|
||||
// @ts-ignore
|
||||
public err: ((msg: string) => void);
|
||||
public err: ((msg: string, tag?: string) => void);
|
||||
// @ts-ignore
|
||||
public warn: ((msg: string) => void);
|
||||
public warn: ((msg: string, tag?: string) => void);
|
||||
// @ts-ignore
|
||||
public notice: ((msg: string) => void);
|
||||
public notice: ((msg: string, tag?: string) => void);
|
||||
// @ts-ignore
|
||||
public info: ((msg: string) => void);
|
||||
public info: ((msg: string, tag?: string) => void);
|
||||
// @ts-ignore
|
||||
public debug: ((msg: string) => void);
|
||||
public debug: ((msg: string, tag?: string) => void);
|
||||
|
||||
private name = 'mempool';
|
||||
private client: dgram.Socket;
|
||||
@@ -66,8 +71,8 @@ class Logger {
|
||||
|
||||
private addprio(prio): void {
|
||||
this[prio] = (function(_this) {
|
||||
return function(msg) {
|
||||
return _this.msg(prio, msg);
|
||||
return function(msg, tag?: string) {
|
||||
return _this.msg(prio, msg, tag);
|
||||
};
|
||||
})(this);
|
||||
}
|
||||
@@ -85,7 +90,7 @@ class Logger {
|
||||
return '';
|
||||
}
|
||||
|
||||
private msg(priority, msg) {
|
||||
private msg(priority, msg, tag?: string) {
|
||||
let consolemsg, prionum, syslogmsg;
|
||||
if (typeof msg === 'string' && msg.length > 0) {
|
||||
while (msg[msg.length - 1].charCodeAt(0) === 10) {
|
||||
@@ -94,10 +99,10 @@ class Logger {
|
||||
}
|
||||
const network = this.network ? ' <' + this.network + '>' : '';
|
||||
prionum = Logger.priorities[priority] || Logger.priorities.info;
|
||||
consolemsg = `${this.ts()} [${process.pid}] ${priority.toUpperCase()}:${network} ${msg}`;
|
||||
consolemsg = `${this.ts()} [${process.pid}] ${priority.toUpperCase()}:${network} ${tag ? '[' + tag + '] ' : ''}${msg}`;
|
||||
|
||||
if (config.SYSLOG.ENABLED && Logger.priorities[priority] <= Logger.priorities[config.SYSLOG.MIN_PRIORITY]) {
|
||||
syslogmsg = `<${(Logger.facilities[config.SYSLOG.FACILITY] * 8 + prionum)}> ${this.name}[${process.pid}]: ${priority.toUpperCase()}${network} ${msg}`;
|
||||
syslogmsg = `<${(Logger.facilities[config.SYSLOG.FACILITY] * 8 + prionum)}> ${this.name}[${process.pid}]: ${priority.toUpperCase()}${network} ${tag ? '[' + tag + '] ' : ''}${msg}`;
|
||||
this.syslog(syslogmsg);
|
||||
}
|
||||
if (Logger.priorities[priority] > Logger.priorities[config.MEMPOOL.STDOUT_LOG_MIN_PRIORITY]) {
|
||||
|
||||
@@ -136,6 +136,10 @@ export interface CpfpInfo {
|
||||
effectiveFeePerVsize?: number;
|
||||
}
|
||||
|
||||
export interface TransactionExtras extends CpfpInfo {
|
||||
firstSeen?: number;
|
||||
}
|
||||
|
||||
export interface TransactionStripped {
|
||||
txid: string;
|
||||
fee: number;
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
import config from '../config';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { Ancestor, CpfpInfo } from '../mempool.interfaces';
|
||||
import { Ancestor, CpfpInfo, TransactionExtras } from '../mempool.interfaces';
|
||||
|
||||
interface CpfpSummary {
|
||||
interface TxInfo {
|
||||
txid: string;
|
||||
cluster: string;
|
||||
root: string;
|
||||
txs: Ancestor[];
|
||||
height: number;
|
||||
fee_rate: number;
|
||||
firstSeen: number;
|
||||
}
|
||||
|
||||
class TransactionRepository {
|
||||
@@ -33,6 +35,46 @@ class TransactionRepository {
|
||||
}
|
||||
}
|
||||
|
||||
public async $saveTxFirstSeen(txid: string, seenAt: number) {
|
||||
try {
|
||||
await DB.query(
|
||||
`
|
||||
INSERT INTO transactions
|
||||
(
|
||||
txid,
|
||||
first_seen
|
||||
)
|
||||
VALUE (?, FROM_UNIXTIME(?))
|
||||
ON DUPLICATE KEY UPDATE
|
||||
first_seen = FROM_UNIXTIME(?)
|
||||
;`,
|
||||
[txid, seenAt, seenAt]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot save transaction first seen time into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getTransactionExtras(txid: string): Promise<TransactionExtras | void> {
|
||||
try {
|
||||
let query = `
|
||||
SELECT *, UNIX_TIMESTAMP(first_seen) as firstSeen
|
||||
FROM transactions
|
||||
LEFT JOIN cpfp_clusters AS cluster ON cluster.root = transactions.cluster
|
||||
WHERE transactions.txid = ?
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [txid]);
|
||||
if (rows.length) {
|
||||
rows[0].txs = JSON.parse(rows[0].txs) as Ancestor[];
|
||||
return this.convertCpfp(rows[0]);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Cannot get transaction cpfp info from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getCpfpInfo(txid: string): Promise<CpfpInfo | void> {
|
||||
try {
|
||||
let query = `
|
||||
@@ -54,12 +96,34 @@ class TransactionRepository {
|
||||
}
|
||||
}
|
||||
|
||||
private convertCpfp(cpfp: CpfpSummary): CpfpInfo {
|
||||
public async $clearOldFirstSeen() {
|
||||
if (config.MEMPOOL.FIRST_SEEN_INDEXING_DAYS > 0) {
|
||||
const cutoff = Math.floor(Date.now() / 1000) - (config.MEMPOOL.FIRST_SEEN_INDEXING_DAYS * 86400);
|
||||
await this.$clearFirstSeenBefore(cutoff);
|
||||
}
|
||||
}
|
||||
|
||||
private async $clearFirstSeenBefore(cutoff: number) {
|
||||
try {
|
||||
const result = await DB.query(
|
||||
`
|
||||
DELETE FROM transactions
|
||||
WHERE cluster is null AND first_seen < FROM_UNIXTIME(?)
|
||||
;`,
|
||||
[cutoff]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot clear old tx first seen times from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private convertCpfp(info: TxInfo): TransactionExtras {
|
||||
const descendants: Ancestor[] = [];
|
||||
const ancestors: Ancestor[] = [];
|
||||
let matched = false;
|
||||
for (const tx of cpfp.txs) {
|
||||
if (tx.txid === cpfp.txid) {
|
||||
for (const tx of (info.txs || [])) {
|
||||
if (tx.txid === info.txid) {
|
||||
matched = true;
|
||||
} else if (!matched) {
|
||||
descendants.push(tx);
|
||||
@@ -68,9 +132,10 @@ class TransactionRepository {
|
||||
}
|
||||
}
|
||||
return {
|
||||
descendants,
|
||||
ancestors,
|
||||
effectiveFeePerVsize: cpfp.fee_rate
|
||||
descendants: descendants?.length ? descendants : undefined,
|
||||
ancestors: ancestors,
|
||||
effectiveFeePerVsize: info.fee_rate,
|
||||
firstSeen: info.firstSeen || undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ class NetworkSyncService {
|
||||
constructor() {}
|
||||
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting lightning network sync service');
|
||||
logger.info(`Starting lightning network sync service`, logger.tags.ln);
|
||||
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
|
||||
@@ -33,11 +33,11 @@ class NetworkSyncService {
|
||||
private async $runTasks(): Promise<void> {
|
||||
const taskStartTime = Date.now();
|
||||
try {
|
||||
logger.info(`Updating nodes and channels`);
|
||||
logger.debug(`Updating nodes and channels`, logger.tags.ln);
|
||||
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
if (networkGraph.nodes.length === 0 || networkGraph.edges.length === 0) {
|
||||
logger.info(`LN Network graph is empty, retrying in 10 seconds`);
|
||||
logger.info(`LN Network graph is empty, retrying in 10 seconds`, logger.tags.ln);
|
||||
setTimeout(() => { this.$runTasks(); }, 10000);
|
||||
return;
|
||||
}
|
||||
@@ -55,7 +55,7 @@ class NetworkSyncService {
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.err('$runTasks() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`$runTasks() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
}
|
||||
|
||||
setTimeout(() => { this.$runTasks(); }, Math.max(1, (1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL) - (Date.now() - taskStartTime)));
|
||||
@@ -79,8 +79,8 @@ class NetworkSyncService {
|
||||
++progress;
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node ${progress}/${nodes.length}`);
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
logger.debug(`Updating node ${progress}/${nodes.length}`, logger.tags.ln);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ class NetworkSyncService {
|
||||
deletedRecords += await NodeRecordsRepository.$deleteUnusedRecords(node.pub_key, customRecordTypes);
|
||||
}
|
||||
}
|
||||
logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`);
|
||||
logger.debug(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`);
|
||||
|
||||
// If a channel if not present in the graph, mark it as inactive
|
||||
await nodesApi.$setNodesInactive(graphNodesPubkeys);
|
||||
@@ -138,18 +138,18 @@ class NetworkSyncService {
|
||||
++progress;
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating channel ${progress}/${channels.length}`);
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
logger.debug(`Updating channel ${progress}/${channels.length}`, logger.tags.ln);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`${progress} channels updated`);
|
||||
logger.debug(`${progress} channels updated`, logger.tags.ln);
|
||||
|
||||
// If a channel if not present in the graph, mark it as inactive
|
||||
await channelsApi.$setChannelsInactive(graphChannelsIds);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
logger.err(` Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.ln);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,26 +184,28 @@ class NetworkSyncService {
|
||||
if (lowest < node.first_seen) {
|
||||
const query = `UPDATE nodes SET first_seen = FROM_UNIXTIME(?) WHERE public_key = ?`;
|
||||
const params = [lowest, node.public_key];
|
||||
++updated;
|
||||
await DB.query(query, params);
|
||||
}
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node first seen date ${progress}/${nodes.length}`);
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
logger.debug(`Updating node first seen date ${progress}/${nodes.length}`, logger.tags.ln);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
++updated;
|
||||
}
|
||||
}
|
||||
logger.info(`Updated ${updated} node first seen dates`);
|
||||
if (updated > 0) {
|
||||
logger.debug(`Updated ${updated} node first seen dates`, logger.tags.ln);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$updateNodeFirstSeen() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`$updateNodeFirstSeen() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
}
|
||||
}
|
||||
|
||||
private async $lookUpCreationDateFromChain(): Promise<void> {
|
||||
let progress = 0;
|
||||
|
||||
logger.info(`Running channel creation date lookup`);
|
||||
logger.debug(`Running channel creation date lookup`, logger.tags.ln);
|
||||
try {
|
||||
const channels = await channelsApi.$getChannelsWithoutCreatedDate();
|
||||
for (const channel of channels) {
|
||||
@@ -214,14 +216,17 @@ class NetworkSyncService {
|
||||
);
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating channel creation date ${progress}/${channels.length}`);
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
logger.debug(`Updating channel creation date ${progress}/${channels.length}`, logger.tags.ln);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
logger.info(`Updated ${channels.length} channels' creation date`);
|
||||
|
||||
if (channels.length > 0) {
|
||||
logger.debug(`Updated ${channels.length} channels' creation date`, logger.tags.ln);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$lookUpCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`$lookUpCreationDateFromChain() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,7 +235,7 @@ class NetworkSyncService {
|
||||
* mark that channel as inactive
|
||||
*/
|
||||
private async $deactivateChannelsWithoutActiveNodes(): Promise<void> {
|
||||
logger.info(`Find channels which nodes are offline`);
|
||||
logger.debug(`Find channels which nodes are offline`, logger.tags.ln);
|
||||
|
||||
try {
|
||||
const result = await DB.query<ResultSetHeader>(`
|
||||
@@ -253,12 +258,10 @@ class NetworkSyncService {
|
||||
`);
|
||||
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
|
||||
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`, logger.tags.ln);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$deactivateChannelsWithoutActiveNodes() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`$deactivateChannelsWithoutActiveNodes() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -277,13 +280,13 @@ class NetworkSyncService {
|
||||
} else {
|
||||
log += ` for the first time`;
|
||||
}
|
||||
logger.info(log);
|
||||
logger.info(`${log}`, logger.tags.ln);
|
||||
|
||||
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
|
||||
for (const channel of channels) {
|
||||
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
|
||||
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
|
||||
logger.debug('Marking channel: ' + channel.id + ' as closed.');
|
||||
logger.debug(`Marking channel: ${channel.id} as closed.`, logger.tags.ln);
|
||||
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
|
||||
[spendingTx.status.block_time, channel.id]);
|
||||
if (spendingTx.txid && !channel.closing_transaction_id) {
|
||||
@@ -293,16 +296,16 @@ class NetworkSyncService {
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`);
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`, logger.tags.ln);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
this.closedChannelsScanBlock = blocks.getCurrentBlockHeight();
|
||||
logger.info(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`);
|
||||
logger.debug(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`, logger.tags.ln);
|
||||
} catch (e) {
|
||||
logger.err('$scanForClosedChannels() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`$scanForClosedChannels() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import { Common } from '../../api/common';
|
||||
|
||||
class LightningStatsUpdater {
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting Lightning Stats service');
|
||||
logger.info(`Starting Lightning Stats service`, logger.tags.ln);
|
||||
|
||||
await this.$runTasks();
|
||||
LightningStatsImporter.$run();
|
||||
@@ -27,7 +27,7 @@ class LightningStatsUpdater {
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
await LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
|
||||
|
||||
logger.info(`Updated latest network stats`);
|
||||
logger.debug(`Updated latest network stats`, logger.tags.ln);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,10 +21,10 @@ class FundingTxFetcher {
|
||||
try {
|
||||
this.fundingTxCache = JSON.parse(await fsPromises.readFile(CACHE_FILE_NAME, 'utf-8'));
|
||||
} catch (e) {
|
||||
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`);
|
||||
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`, logger.tags.ln);
|
||||
this.fundingTxCache = {};
|
||||
}
|
||||
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`);
|
||||
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`, logger.tags.ln);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,26 +44,27 @@ class FundingTxFetcher {
|
||||
++channelProcessed;
|
||||
|
||||
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
|
||||
logger.info(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
|
||||
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
|
||||
`elapsed: ${elapsedSeconds} seconds`
|
||||
`elapsed: ${elapsedSeconds} seconds`,
|
||||
logger.tags.ln
|
||||
);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - cacheTimer);
|
||||
if (elapsedSeconds > 60) {
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`, logger.tags.ln);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
cacheTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.channelNewlyProcessed > 0) {
|
||||
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`);
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`, logger.tags.ln);
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`, logger.tags.ln);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ export async function $lookupNodeLocation(): Promise<void> {
|
||||
let nodesUpdated = 0;
|
||||
let geoNamesInserted = 0;
|
||||
|
||||
logger.info(`Running node location updater using Maxmind`);
|
||||
logger.debug(`Running node location updater using Maxmind`, logger.tags.ln);
|
||||
try {
|
||||
const nodes = await nodesApi.$getAllNodes();
|
||||
const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
|
||||
@@ -152,8 +152,8 @@ export async function $lookupNodeLocation(): Promise<void> {
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node location data ${progress}/${nodes.length}`);
|
||||
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
|
||||
logger.debug(`Updating node location data ${progress}/${nodes.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
@@ -161,9 +161,7 @@ export async function $lookupNodeLocation(): Promise<void> {
|
||||
}
|
||||
|
||||
if (nodesUpdated > 0) {
|
||||
logger.info(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
|
||||
} else {
|
||||
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
|
||||
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`, logger.tags.ln);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));
|
||||
|
||||
@@ -8,7 +8,6 @@ import { isIP } from 'net';
|
||||
import { Common } from '../../../api/common';
|
||||
import channelsApi from '../../../api/explorer/channels.api';
|
||||
import nodesApi from '../../../api/explorer/nodes.api';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
@@ -17,7 +16,7 @@ class LightningStatsImporter {
|
||||
|
||||
async $run(): Promise<void> {
|
||||
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
||||
logger.info('Caching funding txs for currently existing channels');
|
||||
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
||||
|
||||
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
|
||||
@@ -108,7 +107,7 @@ class LightningStatsImporter {
|
||||
|
||||
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
|
||||
if (!tx) {
|
||||
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`);
|
||||
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`, logger.tags.ln);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -321,7 +320,7 @@ class LightningStatsImporter {
|
||||
try {
|
||||
fileList = await fsPromises.readdir(this.topologiesFolder);
|
||||
} catch (e) {
|
||||
logger.err(`Unable to open topology folder at ${this.topologiesFolder}`);
|
||||
logger.err(`Unable to open topology folder at ${this.topologiesFolder}`, logger.tags.ln);
|
||||
throw e;
|
||||
}
|
||||
// Insert history from the most recent to the oldest
|
||||
@@ -359,7 +358,7 @@ class LightningStatsImporter {
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
|
||||
logger.debug(`Reading ${this.topologiesFolder}/${filename}`, logger.tags.ln);
|
||||
let fileContent = '';
|
||||
try {
|
||||
fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
||||
@@ -368,7 +367,7 @@ class LightningStatsImporter {
|
||||
totalProcessed++;
|
||||
continue;
|
||||
}
|
||||
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`);
|
||||
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`, logger.tags.ln);
|
||||
totalProcessed++;
|
||||
continue;
|
||||
}
|
||||
@@ -378,7 +377,7 @@ class LightningStatsImporter {
|
||||
graph = JSON.parse(fileContent);
|
||||
graph = await this.cleanupTopology(graph);
|
||||
} catch (e) {
|
||||
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
totalProcessed++;
|
||||
continue;
|
||||
}
|
||||
@@ -390,20 +389,20 @@ class LightningStatsImporter {
|
||||
}
|
||||
|
||||
if (!logStarted) {
|
||||
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`);
|
||||
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`, logger.tags.ln);
|
||||
logStarted = true;
|
||||
}
|
||||
|
||||
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
||||
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
|
||||
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`, logger.tags.ln);
|
||||
|
||||
totalProcessed++;
|
||||
|
||||
if (processed > 10) {
|
||||
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`, logger.tags.ln);
|
||||
processed = 0;
|
||||
} else {
|
||||
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`, logger.tags.ln);
|
||||
}
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
|
||||
const stat = await this.computeNetworkStats(timestamp, graph, true);
|
||||
@@ -412,10 +411,10 @@ class LightningStatsImporter {
|
||||
}
|
||||
|
||||
if (totalProcessed > 0) {
|
||||
logger.info(`Lightning network stats historical import completed`);
|
||||
logger.notice(`Lightning network stats historical import completed`, logger.tags.ln);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -32,9 +32,9 @@ class PoolsUpdater {
|
||||
this.lastRun = now;
|
||||
|
||||
if (config.SOCKS5PROXY.ENABLED) {
|
||||
logger.info(`Updating latest mining pools from ${this.poolsUrl} over the Tor network`);
|
||||
logger.info(`Updating latest mining pools from ${this.poolsUrl} over the Tor network`, logger.tags.mining);
|
||||
} else {
|
||||
logger.info(`Updating latest mining pools from ${this.poolsUrl} over clearnet`);
|
||||
logger.info(`Updating latest mining pools from ${this.poolsUrl} over clearnet`, logger.tags.mining);
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -53,9 +53,9 @@ class PoolsUpdater {
|
||||
}
|
||||
|
||||
if (this.currentSha === undefined) {
|
||||
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`);
|
||||
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
|
||||
} else {
|
||||
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`);
|
||||
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
|
||||
}
|
||||
const poolsJson = await this.query(this.poolsUrl);
|
||||
if (poolsJson === undefined) {
|
||||
@@ -63,11 +63,11 @@ class PoolsUpdater {
|
||||
}
|
||||
await poolsParser.migratePoolsJson(poolsJson);
|
||||
await this.updateDBSha(githubSha);
|
||||
logger.notice('PoolsUpdater completed');
|
||||
logger.notice(`PoolsUpdater completed`, logger.tags.mining);
|
||||
|
||||
} catch (e) {
|
||||
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
|
||||
logger.err('PoolsUpdater failed. Will try again in 24h. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`PoolsUpdater failed. Will try again in 24h. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,7 +81,7 @@ class PoolsUpdater {
|
||||
await DB.query('DELETE FROM state where name="pools_json_sha"');
|
||||
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
|
||||
} catch (e) {
|
||||
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -94,7 +94,7 @@ class PoolsUpdater {
|
||||
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
|
||||
return (rows.length > 0 ? rows[0].string : undefined);
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -113,7 +113,7 @@ class PoolsUpdater {
|
||||
}
|
||||
}
|
||||
|
||||
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`);
|
||||
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`, logger.tags.mining);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ class KrakenApi implements PriceFeed {
|
||||
}
|
||||
|
||||
if (Object.keys(priceHistory).length > 0) {
|
||||
logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`);
|
||||
logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ class PriceUpdater {
|
||||
await this.$updatePrice();
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Cannot save BTC prices in db. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.err(`Cannot save BTC prices in db. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
||||
}
|
||||
|
||||
this.running = false;
|
||||
@@ -115,14 +115,14 @@ class PriceUpdater {
|
||||
if (price > 0) {
|
||||
prices.push(price);
|
||||
}
|
||||
logger.debug(`${feed.name} BTC/${currency} price: ${price}`);
|
||||
logger.debug(`${feed.name} BTC/${currency} price: ${price}`, logger.tags.mining);
|
||||
} catch (e) {
|
||||
logger.debug(`Could not fetch BTC/${currency} price at ${feed.name}. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
logger.debug(`Could not fetch BTC/${currency} price at ${feed.name}. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (prices.length === 1) {
|
||||
logger.debug(`Only ${prices.length} feed available for BTC/${currency} price`);
|
||||
logger.debug(`Only ${prices.length} feed available for BTC/${currency} price`, logger.tags.mining);
|
||||
}
|
||||
|
||||
// Compute average price, non weighted
|
||||
@@ -175,9 +175,9 @@ class PriceUpdater {
|
||||
++insertedCount;
|
||||
}
|
||||
if (insertedCount > 0) {
|
||||
logger.notice(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
|
||||
logger.notice(`Inserted ${insertedCount} MtGox USD weekly price history into db`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
|
||||
logger.debug(`Inserted ${insertedCount} MtGox USD weekly price history into db`, logger.tags.mining);
|
||||
}
|
||||
|
||||
// Insert Kraken weekly prices
|
||||
@@ -198,7 +198,7 @@ class PriceUpdater {
|
||||
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
|
||||
const existingPriceTimes = await PricesRepository.$getPricesTimes();
|
||||
|
||||
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database, this may take a while`);
|
||||
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
|
||||
|
||||
const historicalPrices: PriceHistory[] = [];
|
||||
|
||||
@@ -207,7 +207,7 @@ class PriceUpdater {
|
||||
try {
|
||||
historicalPrices.push(await feed.$fetchRecentPrice(this.currencies, type));
|
||||
} catch (e) {
|
||||
logger.err(`Cannot fetch hourly historical price from ${feed.name}. Ignoring this feed. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.err(`Cannot fetch hourly historical price from ${feed.name}. Ignoring this feed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -252,9 +252,9 @@ class PriceUpdater {
|
||||
}
|
||||
|
||||
if (totalInserted > 0) {
|
||||
logger.notice(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
|
||||
logger.notice(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`, logger.tags.mining);
|
||||
} else {
|
||||
logger.debug(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
|
||||
logger.debug(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`, logger.tags.mining);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,9 @@ __LIQUID_WEBSITE_URL__=${LIQUID_WEBSITE_URL:=https://liquid.network}
|
||||
__BISQ_WEBSITE_URL__=${BISQ_WEBSITE_URL:=https://bisq.markets}
|
||||
__MINING_DASHBOARD__=${MINING_DASHBOARD:=true}
|
||||
__LIGHTNING__=${LIGHTNING:=false}
|
||||
__MAINNET_BLOCK_AUDIT_START_HEIGHT__=${MAINNET_BLOCK_AUDIT_START_HEIGHT:=0}
|
||||
__TESTNET_BLOCK_AUDIT_START_HEIGHT__=${TESTNET_BLOCK_AUDIT_START_HEIGHT:=0}
|
||||
__SIGNET_BLOCK_AUDIT_START_HEIGHT__=${SIGNET_BLOCK_AUDIT_START_HEIGHT:=0}
|
||||
|
||||
# Export as environment variables to be used by envsubst
|
||||
export __TESTNET_ENABLED__
|
||||
@@ -52,6 +55,9 @@ export __LIQUID_WEBSITE_URL__
|
||||
export __BISQ_WEBSITE_URL__
|
||||
export __MINING_DASHBOARD__
|
||||
export __LIGHTNING__
|
||||
export __MAINNET_BLOCK_AUDIT_START_HEIGHT__
|
||||
export __TESTNET_BLOCK_AUDIT_START_HEIGHT__
|
||||
export __SIGNET_BLOCK_AUDIT_START_HEIGHT__
|
||||
|
||||
folder=$(find /var/www/mempool -name "config.js" | xargs dirname)
|
||||
echo ${folder}
|
||||
|
||||
@@ -53,9 +53,6 @@ export class TimeSpanComponent implements OnInit, OnChanges, OnDestroy {
|
||||
|
||||
calculate() {
|
||||
const seconds = Math.floor(this.time);
|
||||
if (seconds < 60) {
|
||||
return $localize`:@@date-base.just-now:Just now`;
|
||||
}
|
||||
let counter: number;
|
||||
for (const i in this.intervals) {
|
||||
if (this.intervals.hasOwnProperty(i)) {
|
||||
|
||||
@@ -54,6 +54,18 @@
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<ng-template [ngIf]="transactionTime !== 0">
|
||||
<tr *ngIf="transactionTime === -1; else firstSeenTmpl">
|
||||
<td><span class="skeleton-loader"></span></td>
|
||||
<td><span class="skeleton-loader"></span></td>
|
||||
</tr>
|
||||
<ng-template #firstSeenTmpl>
|
||||
<tr>
|
||||
<td i18n="transaction.first-seen|Transaction first seen">First seen</td>
|
||||
<td><i><app-time-since [time]="transactionTime" [fastRender]="true"></app-time-since></i></td>
|
||||
</tr>
|
||||
</ng-template>
|
||||
</ng-template>
|
||||
<tr *ngIf="latestBlock && tx.status.block_height <= latestBlock.height - 8">
|
||||
<td class="td-width" i18n="transaction.included-in-block|Transaction included in block">Included in block</td>
|
||||
<td>
|
||||
@@ -63,10 +75,10 @@
|
||||
<ng-template [ngIf]="transactionTime > 0">
|
||||
<tr>
|
||||
<td i18n="transaction.confirmed|Transaction Confirmed state">Confirmed</td>
|
||||
<td><app-time-span [time]="tx.status.block_time - transactionTime" [fastRender]="true"></app-time-span></td>
|
||||
<td><app-time-span [time]="tx.status.block_time - transactionTime"></app-time-span></td>
|
||||
</tr>
|
||||
</ng-template>
|
||||
<tr *ngIf="network !== 'liquid' && network !== 'liquidtestnet'">
|
||||
<tr *ngIf="network !== 'liquid' && network !== 'liquidtestnet' && (cpfpInfo && (cpfpInfo?.bestDescendant || cpfpInfo?.descendants?.length || cpfpInfo?.ancestors?.length) || !(transactionTime > 0))">
|
||||
<td class="td-width" i18n="transaction.features|Transaction features">Features</td>
|
||||
<td>
|
||||
<app-tx-features [tx]="tx"></app-tx-features>
|
||||
@@ -497,6 +509,12 @@
|
||||
<button type="button" class="btn btn-outline-info btn-sm btn-small-height float-right" (click)="showCpfpDetails = !showCpfpDetails">CPFP <fa-icon [icon]="['fas', 'info-circle']" [fixedWidth]="true"></fa-icon></button>
|
||||
</td>
|
||||
</tr>
|
||||
<tr *ngIf="tx?.status?.confirmed && (!cpfpInfo || (!cpfpInfo?.bestDescendant && !cpfpInfo?.descendants?.length && !cpfpInfo?.ancestors?.length)) && transactionTime > 0 && network !== 'liquid' && network !== 'liquidtestnet'">
|
||||
<td class="td-width" i18n="transaction.features|Transaction Features">Features</td>
|
||||
<td>
|
||||
<app-tx-features [tx]="tx"></app-tx-features>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</ng-template>
|
||||
|
||||
@@ -110,7 +110,7 @@ export class TransactionComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
.pipe(
|
||||
switchMap((txId) =>
|
||||
this.apiService
|
||||
.getCpfpinfo$(txId)
|
||||
.getTransactionExtras$(txId)
|
||||
.pipe(retryWhen((errors) => errors.pipe(
|
||||
mergeMap((error) => {
|
||||
if (!this.tx?.status || this.tx.status.confirmed) {
|
||||
@@ -156,6 +156,9 @@ export class TransactionComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
txFeePerVSize: this.tx.effectiveFeePerVsize,
|
||||
});
|
||||
}
|
||||
if (cpfpInfo.firstSeen) {
|
||||
this.transactionTime = cpfpInfo.firstSeen;
|
||||
}
|
||||
this.cpfpInfo = cpfpInfo;
|
||||
});
|
||||
|
||||
|
||||
@@ -27,6 +27,10 @@ export interface CpfpInfo {
|
||||
effectiveFeePerVsize?: number;
|
||||
}
|
||||
|
||||
export interface TransactionExtras extends CpfpInfo {
|
||||
firstSeen?: number;
|
||||
}
|
||||
|
||||
export interface DifficultyAdjustment {
|
||||
progressPercent: number;
|
||||
difficultyChange: number;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
import { HttpClient, HttpParams } from '@angular/common/http';
|
||||
import { CpfpInfo, OptimizedMempoolStats, AddressInformation, LiquidPegs, ITranslators,
|
||||
PoolStat, BlockExtended, TransactionStripped, RewardStats, AuditScore } from '../interfaces/node-api.interface';
|
||||
PoolStat, BlockExtended, TransactionStripped, RewardStats, AuditScore, TransactionExtras } from '../interfaces/node-api.interface';
|
||||
import { Observable } from 'rxjs';
|
||||
import { StateService } from './state.service';
|
||||
import { WebsocketResponse } from '../interfaces/websocket.interface';
|
||||
@@ -115,6 +115,10 @@ export class ApiService {
|
||||
return this.httpClient.get<CpfpInfo>(this.apiBaseUrl + this.apiBasePath + '/api/v1/cpfp/' + txid);
|
||||
}
|
||||
|
||||
getTransactionExtras$(txid: string): Observable<TransactionExtras> {
|
||||
return this.httpClient.get<TransactionExtras>(this.apiBaseUrl + this.apiBasePath + '/api/v1/extras/' + txid);
|
||||
}
|
||||
|
||||
validateAddress$(address: string): Observable<AddressInformation> {
|
||||
return this.httpClient.get<AddressInformation>(this.apiBaseUrl + this.apiBasePath + '/api/v1/validate-address/' + address);
|
||||
}
|
||||
|
||||
@@ -9,5 +9,6 @@
|
||||
"MEMPOOL_WEBSITE_URL": "https://mempool.space",
|
||||
"LIQUID_WEBSITE_URL": "https://liquid.network",
|
||||
"BISQ_WEBSITE_URL": "https://bisq.markets",
|
||||
"ITEMS_PER_PAGE": 25
|
||||
"ITEMS_PER_PAGE": 25,
|
||||
"LIGHTNING": true
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user