Merge branch 'master' into nymkappa/bugfix/404-ftx-not-found
This commit is contained in:
@@ -1,7 +1,9 @@
|
||||
{
|
||||
"MEMPOOL": {
|
||||
"ENABLED": true,
|
||||
"NETWORK": "__MEMPOOL_NETWORK__",
|
||||
"BACKEND": "__MEMPOOL_BACKEND__",
|
||||
"ENABLED": true,
|
||||
"BLOCKS_SUMMARIES_INDEXING": true,
|
||||
"HTTP_PORT": 1,
|
||||
"SPAWN_CLUSTER_PROCS": 2,
|
||||
@@ -23,7 +25,8 @@
|
||||
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
|
||||
"INDEXING_BLOCKS_AMOUNT": 14,
|
||||
"POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__",
|
||||
"POOLS_JSON_URL": "__POOLS_JSON_URL__"
|
||||
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
|
||||
"ADVANCED_TRANSACTION_SELECTION": "__ADVANCED_TRANSACTION_SELECTION__"
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "__CORE_RPC_HOST__",
|
||||
|
||||
@@ -13,6 +13,7 @@ describe('Mempool Backend Config', () => {
|
||||
const config = jest.requireActual('../config').default;
|
||||
|
||||
expect(config.MEMPOOL).toStrictEqual({
|
||||
ENABLED: true,
|
||||
NETWORK: 'mainnet',
|
||||
BACKEND: 'none',
|
||||
BLOCKS_SUMMARIES_INDEXING: false,
|
||||
@@ -36,7 +37,8 @@ describe('Mempool Backend Config', () => {
|
||||
USER_AGENT: 'mempool',
|
||||
STDOUT_LOG_MIN_PRIORITY: 'debug',
|
||||
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json'
|
||||
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
||||
ADVANCED_TRANSACTION_SELECTION: false,
|
||||
});
|
||||
|
||||
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import logger from '../logger';
|
||||
import { BlockExtended, TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
|
||||
import config from '../config';
|
||||
import bitcoinApi from './bitcoin/bitcoin-api-factory';
|
||||
import { Common } from './common';
|
||||
import { TransactionExtended, MempoolBlockWithTransactions, AuditScore } from '../mempool.interfaces';
|
||||
import blocksRepository from '../repositories/BlocksRepository';
|
||||
import blocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import blocks from '../api/blocks';
|
||||
|
||||
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
|
||||
|
||||
@@ -44,8 +49,6 @@ class Audit {
|
||||
|
||||
displacedWeight += (4000 - transactions[0].weight);
|
||||
|
||||
logger.warn(`${fresh.length} fresh, ${Object.keys(isCensored).length} possibly censored, ${displacedWeight} displaced weight`);
|
||||
|
||||
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
|
||||
// these displaced transactions should occupy the first N weight units of the next projected block
|
||||
let displacedWeightRemaining = displacedWeight;
|
||||
@@ -73,6 +76,7 @@ class Audit {
|
||||
|
||||
// mark unexpected transactions in the mined block as 'added'
|
||||
let overflowWeight = 0;
|
||||
let totalWeight = 0;
|
||||
for (const tx of transactions) {
|
||||
if (inTemplate[tx.txid]) {
|
||||
matches.push(tx.txid);
|
||||
@@ -82,11 +86,13 @@ class Audit {
|
||||
}
|
||||
overflowWeight += tx.weight;
|
||||
}
|
||||
totalWeight += tx.weight;
|
||||
}
|
||||
|
||||
// transactions missing from near the end of our template are probably not being censored
|
||||
let overflowWeightRemaining = overflowWeight;
|
||||
let lastOverflowRate = 1.00;
|
||||
let overflowWeightRemaining = overflowWeight - (config.MEMPOOL.BLOCK_WEIGHT_UNITS - totalWeight);
|
||||
let maxOverflowRate = 0;
|
||||
let rateThreshold = 0;
|
||||
index = projectedBlocks[0].transactionIds.length - 1;
|
||||
while (index >= 0) {
|
||||
const txid = projectedBlocks[0].transactionIds[index];
|
||||
@@ -94,8 +100,11 @@ class Audit {
|
||||
if (isCensored[txid]) {
|
||||
delete isCensored[txid];
|
||||
}
|
||||
lastOverflowRate = mempool[txid].effectiveFeePerVsize;
|
||||
} else if (Math.floor(mempool[txid].effectiveFeePerVsize * 100) <= Math.ceil(lastOverflowRate * 100)) { // tolerance of 0.01 sat/vb
|
||||
if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) {
|
||||
maxOverflowRate = mempool[txid].effectiveFeePerVsize;
|
||||
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
|
||||
}
|
||||
} else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
|
||||
if (isCensored[txid]) {
|
||||
delete isCensored[txid];
|
||||
}
|
||||
@@ -113,6 +122,45 @@ class Audit {
|
||||
score
|
||||
};
|
||||
}
|
||||
|
||||
public async $getBlockAuditScores(fromHeight?: number, limit: number = 15): Promise<AuditScore[]> {
|
||||
let currentHeight = fromHeight !== undefined ? fromHeight : await blocksRepository.$mostRecentBlockHeight();
|
||||
const returnScores: AuditScore[] = [];
|
||||
|
||||
if (currentHeight < 0) {
|
||||
return returnScores;
|
||||
}
|
||||
|
||||
for (let i = 0; i < limit && currentHeight >= 0; i++) {
|
||||
const block = blocks.getBlocks().find((b) => b.height === currentHeight);
|
||||
if (block?.extras?.matchRate != null) {
|
||||
returnScores.push({
|
||||
hash: block.id,
|
||||
matchRate: block.extras.matchRate
|
||||
});
|
||||
} else {
|
||||
let currentHash;
|
||||
if (!currentHash && Common.indexingEnabled()) {
|
||||
const dbBlock = await blocksRepository.$getBlockByHeight(currentHeight);
|
||||
if (dbBlock && dbBlock['id']) {
|
||||
currentHash = dbBlock['id'];
|
||||
}
|
||||
}
|
||||
if (!currentHash) {
|
||||
currentHash = await bitcoinApi.$getBlockHash(currentHeight);
|
||||
}
|
||||
if (currentHash) {
|
||||
const auditScore = await blocksAuditsRepository.$getBlockAuditScore(currentHash);
|
||||
returnScores.push({
|
||||
hash: currentHash,
|
||||
matchRate: auditScore?.matchRate
|
||||
});
|
||||
}
|
||||
}
|
||||
currentHeight--;
|
||||
}
|
||||
return returnScores;
|
||||
}
|
||||
}
|
||||
|
||||
export default new Audit();
|
||||
@@ -34,6 +34,7 @@ class Blocks {
|
||||
private lastDifficultyAdjustmentTime = 0;
|
||||
private previousDifficultyRetarget = 0;
|
||||
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
|
||||
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = [];
|
||||
|
||||
constructor() { }
|
||||
|
||||
@@ -57,6 +58,10 @@ class Blocks {
|
||||
this.newBlockCallbacks.push(fn);
|
||||
}
|
||||
|
||||
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) {
|
||||
this.newAsyncBlockCallbacks.push(fn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the list of transaction for a block
|
||||
* @param blockHash
|
||||
@@ -130,7 +135,7 @@ class Blocks {
|
||||
const stripped = block.tx.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
vsize: tx.weight / 4,
|
||||
fee: tx.fee ? Math.round(tx.fee * 100000000) : 0,
|
||||
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0) * 100000000)
|
||||
};
|
||||
@@ -195,9 +200,9 @@ class Blocks {
|
||||
};
|
||||
}
|
||||
|
||||
const auditSummary = await BlocksAuditsRepository.$getShortBlockAudit(block.id);
|
||||
if (auditSummary) {
|
||||
blockExtended.extras.matchRate = auditSummary.matchRate;
|
||||
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
|
||||
if (auditScore != null) {
|
||||
blockExtended.extras.matchRate = auditScore.matchRate;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -444,6 +449,9 @@ class Blocks {
|
||||
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
|
||||
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
|
||||
|
||||
// start async callbacks
|
||||
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
if (!fastForwarded) {
|
||||
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
|
||||
@@ -514,6 +522,9 @@ class Blocks {
|
||||
if (!memPool.hasPriority()) {
|
||||
diskCache.$saveCacheToDisk();
|
||||
}
|
||||
|
||||
// wait for pending async callbacks to finish
|
||||
await Promise.all(callbackPromises);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 41;
|
||||
private queryTimeout = 120000;
|
||||
private static currentVersion = 44;
|
||||
private queryTimeout = 900_000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
|
||||
@@ -352,6 +352,19 @@ class DatabaseMigration {
|
||||
if (databaseSchemaVersion < 41 && isBitcoin === true) {
|
||||
await this.$executeQuery('UPDATE channels SET closing_reason = NULL WHERE closing_reason = 1');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 42 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD closing_resolved tinyint(1) DEFAULT 0');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 43 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateLNNodeRecordsTableQuery(), await this.$checkIfTableExists('nodes_records'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 44 && isBitcoin === true) {
|
||||
await this.$executeQuery('TRUNCATE TABLE `blocks_audits`');
|
||||
await this.$executeQuery('UPDATE blocks_summaries SET template = NULL');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -787,6 +800,19 @@ class DatabaseMigration {
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateLNNodeRecordsTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS nodes_records (
|
||||
public_key varchar(66) NOT NULL,
|
||||
type int(10) unsigned NOT NULL,
|
||||
payload blob NOT NULL,
|
||||
UNIQUE KEY public_key_type (public_key, type),
|
||||
INDEX (public_key),
|
||||
FOREIGN KEY (public_key)
|
||||
REFERENCES nodes (public_key)
|
||||
ON DELETE CASCADE
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
public async $truncateIndexedData(tables: string[]) {
|
||||
const allowedTables = ['blocks', 'hashrates', 'prices'];
|
||||
|
||||
|
||||
@@ -117,6 +117,17 @@ class ChannelsApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getUnresolvedClosedChannels(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsWithoutCreatedDate(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE created IS NULL`;
|
||||
|
||||
@@ -105,6 +105,18 @@ class NodesApi {
|
||||
node.closed_channel_count = rows[0].closed_channel_count;
|
||||
}
|
||||
|
||||
// Custom records
|
||||
query = `
|
||||
SELECT type, payload
|
||||
FROM nodes_records
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
[rows] = await DB.query(query, [public_key]);
|
||||
node.custom_records = {};
|
||||
for (const record of rows) {
|
||||
node.custom_records[record.type] = Buffer.from(record.payload, 'binary').toString('hex');
|
||||
}
|
||||
|
||||
return node;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
@@ -512,7 +524,37 @@ class NodesApi {
|
||||
|
||||
public async $getNodesPerISP(ISPId: string) {
|
||||
try {
|
||||
const query = `
|
||||
let query = `
|
||||
SELECT channels.node1_public_key AS node1PublicKey, isp1.id as isp1ID,
|
||||
channels.node2_public_key AS node2PublicKey, isp2.id as isp2ID
|
||||
FROM channels
|
||||
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
|
||||
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
|
||||
JOIN geo_names isp1 ON isp1.id = node1.as_number
|
||||
JOIN geo_names isp2 ON isp2.id = node2.as_number
|
||||
WHERE channels.status = 1 AND (node1.as_number IN (?) OR node2.as_number IN (?))
|
||||
ORDER BY short_id DESC
|
||||
`;
|
||||
|
||||
const IPSIds = ISPId.split(',');
|
||||
const [rows]: any = await DB.query(query, [IPSIds, IPSIds]);
|
||||
const nodes = {};
|
||||
|
||||
const intISPIds: number[] = [];
|
||||
for (const ispId of IPSIds) {
|
||||
intISPIds.push(parseInt(ispId, 10));
|
||||
}
|
||||
|
||||
for (const channel of rows) {
|
||||
if (intISPIds.includes(channel.isp1ID)) {
|
||||
nodes[channel.node1PublicKey] = true;
|
||||
}
|
||||
if (intISPIds.includes(channel.isp2ID)) {
|
||||
nodes[channel.node2PublicKey] = true;
|
||||
}
|
||||
}
|
||||
|
||||
query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
@@ -523,17 +565,18 @@ class NodesApi {
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
|
||||
WHERE nodes.as_number IN (?)
|
||||
WHERE nodes.public_key IN (?)
|
||||
ORDER BY capacity DESC
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [ISPId.split(',')]);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
rows[i].subdivision = JSON.parse(rows[i].subdivision);
|
||||
const [rows2]: any = await DB.query(query, [Object.keys(nodes)]);
|
||||
for (let i = 0; i < rows2.length; ++i) {
|
||||
rows2[i].country = JSON.parse(rows2[i].country);
|
||||
rows2[i].city = JSON.parse(rows2[i].city);
|
||||
rows2[i].subdivision = JSON.parse(rows2[i].subdivision);
|
||||
}
|
||||
return rows;
|
||||
return rows2;
|
||||
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
|
||||
@@ -7,6 +7,15 @@ import { Common } from '../../common';
|
||||
* Convert a clightning "listnode" entry to a lnd node entry
|
||||
*/
|
||||
export function convertNode(clNode: any): ILightningApi.Node {
|
||||
let custom_records: { [type: number]: string } | undefined = undefined;
|
||||
if (clNode.option_will_fund) {
|
||||
try {
|
||||
custom_records = { '1': Buffer.from(clNode.option_will_fund.compact_lease || '', 'hex').toString('base64') };
|
||||
} catch (e) {
|
||||
logger.err(`Cannot decode option_will_fund compact_lease for ${clNode.nodeid}). Reason: ` + (e instanceof Error ? e.message : e));
|
||||
custom_records = undefined;
|
||||
}
|
||||
}
|
||||
return {
|
||||
alias: clNode.alias ?? '',
|
||||
color: `#${clNode.color ?? ''}`,
|
||||
@@ -23,6 +32,7 @@ export function convertNode(clNode: any): ILightningApi.Node {
|
||||
};
|
||||
}) ?? [],
|
||||
last_update: clNode?.last_timestamp ?? 0,
|
||||
custom_records
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ export namespace ILightningApi {
|
||||
}[];
|
||||
color: string;
|
||||
features: { [key: number]: Feature };
|
||||
custom_records?: { [type: number]: string };
|
||||
}
|
||||
|
||||
export interface Info {
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
import logger from '../logger';
|
||||
import { MempoolBlock, TransactionExtended, AuditTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces';
|
||||
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces';
|
||||
import { Common } from './common';
|
||||
import config from '../config';
|
||||
import { PairingHeap } from '../utils/pairing-heap';
|
||||
import { StaticPool } from 'node-worker-threads-pool';
|
||||
import path from 'path';
|
||||
|
||||
class MempoolBlocks {
|
||||
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
|
||||
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
|
||||
private makeTemplatesPool = new StaticPool({
|
||||
size: 1,
|
||||
task: path.resolve(__dirname, './tx-selection-worker.js'),
|
||||
});
|
||||
|
||||
constructor() {}
|
||||
|
||||
@@ -72,16 +77,15 @@ class MempoolBlocks {
|
||||
const time = end - start;
|
||||
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
|
||||
|
||||
const { blocks, deltas } = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
|
||||
const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
|
||||
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
|
||||
|
||||
this.mempoolBlocks = blocks;
|
||||
this.mempoolBlockDeltas = deltas;
|
||||
}
|
||||
|
||||
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]):
|
||||
{ blocks: MempoolBlockWithTransactions[], deltas: MempoolBlockDelta[] } {
|
||||
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] {
|
||||
const mempoolBlocks: MempoolBlockWithTransactions[] = [];
|
||||
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
|
||||
let blockWeight = 0;
|
||||
let blockSize = 0;
|
||||
let transactions: TransactionExtended[] = [];
|
||||
@@ -102,7 +106,11 @@ class MempoolBlocks {
|
||||
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
|
||||
}
|
||||
|
||||
// Calculate change from previous block states
|
||||
return mempoolBlocks;
|
||||
}
|
||||
|
||||
private calculateMempoolDeltas(prevBlocks: MempoolBlockWithTransactions[], mempoolBlocks: MempoolBlockWithTransactions[]): MempoolBlockDelta[] {
|
||||
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
|
||||
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
|
||||
let added: TransactionStripped[] = [];
|
||||
let removed: string[] = [];
|
||||
@@ -135,284 +143,25 @@ class MempoolBlocks {
|
||||
removed
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
blocks: mempoolBlocks,
|
||||
deltas: mempoolBlockDeltas
|
||||
};
|
||||
return mempoolBlockDeltas;
|
||||
}
|
||||
|
||||
/*
|
||||
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
|
||||
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
|
||||
*
|
||||
* blockLimit: number of blocks to build in total.
|
||||
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
|
||||
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
|
||||
* condenseRest: whether to ignore excess transactions or append them to the final block.
|
||||
*/
|
||||
public makeBlockTemplates(mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): MempoolBlockWithTransactions[] {
|
||||
const start = Date.now();
|
||||
const auditPool: { [txid: string]: AuditTransaction } = {};
|
||||
const mempoolArray: AuditTransaction[] = [];
|
||||
const restOfArray: TransactionExtended[] = [];
|
||||
|
||||
let weight = 0;
|
||||
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
|
||||
// grab the top feerate txs up to maxWeight
|
||||
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
|
||||
weight += tx.weight;
|
||||
if (weight >= maxWeight) {
|
||||
restOfArray.push(tx);
|
||||
return;
|
||||
}
|
||||
// initializing everything up front helps V8 optimize property access later
|
||||
auditPool[tx.txid] = {
|
||||
txid: tx.txid,
|
||||
fee: tx.fee,
|
||||
size: tx.size,
|
||||
weight: tx.weight,
|
||||
feePerVsize: tx.feePerVsize,
|
||||
vin: tx.vin,
|
||||
relativesSet: false,
|
||||
ancestorMap: new Map<string, AuditTransaction>(),
|
||||
children: new Set<AuditTransaction>(),
|
||||
ancestorFee: 0,
|
||||
ancestorWeight: 0,
|
||||
score: 0,
|
||||
used: false,
|
||||
modified: false,
|
||||
modifiedNode: null,
|
||||
}
|
||||
mempoolArray.push(auditPool[tx.txid]);
|
||||
})
|
||||
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): Promise<void> {
|
||||
const { mempool, blocks } = await this.makeTemplatesPool.exec({ mempool: newMempool, blockLimit, weightLimit, condenseRest });
|
||||
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
|
||||
|
||||
// Build relatives graph & calculate ancestor scores
|
||||
for (const tx of mempoolArray) {
|
||||
if (!tx.relativesSet) {
|
||||
this.setRelatives(tx, auditPool);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by descending ancestor score
|
||||
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
|
||||
|
||||
// Build blocks by greedily choosing the highest feerate package
|
||||
// (i.e. the package rooted in the transaction with the best ancestor score)
|
||||
const blocks: MempoolBlockWithTransactions[] = [];
|
||||
let blockWeight = 4000;
|
||||
let blockSize = 0;
|
||||
let transactions: AuditTransaction[] = [];
|
||||
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
|
||||
let overflow: AuditTransaction[] = [];
|
||||
let failures = 0;
|
||||
let top = 0;
|
||||
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) {
|
||||
// skip invalid transactions
|
||||
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
|
||||
top++;
|
||||
}
|
||||
|
||||
// Select best next package
|
||||
let nextTx;
|
||||
const nextPoolTx = mempoolArray[top];
|
||||
const nextModifiedTx = modified.peek();
|
||||
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
|
||||
nextTx = nextPoolTx;
|
||||
top++;
|
||||
} else {
|
||||
modified.pop();
|
||||
if (nextModifiedTx) {
|
||||
nextTx = nextModifiedTx;
|
||||
nextTx.modifiedNode = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
if (nextTx && !nextTx?.used) {
|
||||
// Check if the package fits into this block
|
||||
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
|
||||
blockWeight += nextTx.ancestorWeight;
|
||||
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
|
||||
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
|
||||
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
|
||||
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
|
||||
sortedTxSet.forEach((ancestor, i, arr) => {
|
||||
const mempoolTx = mempool[ancestor.txid];
|
||||
if (ancestor && !ancestor?.used) {
|
||||
ancestor.used = true;
|
||||
// update original copy of this tx with effective fee rate & relatives data
|
||||
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
|
||||
mempoolTx.ancestors = (Array.from(ancestor.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
|
||||
return {
|
||||
txid: a.txid,
|
||||
fee: a.fee,
|
||||
weight: a.weight,
|
||||
}
|
||||
})
|
||||
if (i < arr.length - 1) {
|
||||
mempoolTx.bestDescendant = {
|
||||
txid: arr[arr.length - 1].txid,
|
||||
fee: arr[arr.length - 1].fee,
|
||||
weight: arr[arr.length - 1].weight,
|
||||
};
|
||||
}
|
||||
transactions.push(ancestor);
|
||||
blockSize += ancestor.size;
|
||||
}
|
||||
});
|
||||
|
||||
// remove these as valid package ancestors for any descendants remaining in the mempool
|
||||
if (sortedTxSet.length) {
|
||||
sortedTxSet.forEach(tx => {
|
||||
this.updateDescendants(tx, auditPool, modified);
|
||||
});
|
||||
}
|
||||
|
||||
failures = 0;
|
||||
} else {
|
||||
// hold this package in an overflow list while we check for smaller options
|
||||
overflow.push(nextTx);
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
|
||||
// this block is full
|
||||
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
|
||||
if (exceededPackageTries && (!condenseRest || blocks.length < blockLimit - 1)) {
|
||||
// construct this block
|
||||
if (transactions.length) {
|
||||
blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
// reset for the next block
|
||||
transactions = [];
|
||||
blockSize = 0;
|
||||
blockWeight = 4000;
|
||||
|
||||
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
|
||||
for (const overflowTx of overflow.reverse()) {
|
||||
if (overflowTx.modified) {
|
||||
overflowTx.modifiedNode = modified.add(overflowTx);
|
||||
} else {
|
||||
top--;
|
||||
mempoolArray[top] = overflowTx;
|
||||
}
|
||||
}
|
||||
overflow = [];
|
||||
}
|
||||
}
|
||||
if (condenseRest) {
|
||||
// pack any leftover transactions into the last block
|
||||
for (const tx of overflow) {
|
||||
if (!tx || tx?.used) {
|
||||
continue;
|
||||
}
|
||||
blockWeight += tx.weight;
|
||||
blockSize += tx.size;
|
||||
transactions.push(tx);
|
||||
tx.used = true;
|
||||
}
|
||||
const blockTransactions = transactions.map(t => mempool[t.txid])
|
||||
restOfArray.forEach(tx => {
|
||||
blockWeight += tx.weight;
|
||||
blockSize += tx.size;
|
||||
blockTransactions.push(tx);
|
||||
});
|
||||
if (blockTransactions.length) {
|
||||
blocks.push(this.dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
transactions = [];
|
||||
} else if (transactions.length) {
|
||||
blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
|
||||
const end = Date.now();
|
||||
const time = end - start;
|
||||
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
|
||||
|
||||
return blocks;
|
||||
}
|
||||
|
||||
// traverse in-mempool ancestors
|
||||
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
|
||||
public setRelatives(
|
||||
tx: AuditTransaction,
|
||||
mempool: { [txid: string]: AuditTransaction },
|
||||
): void {
|
||||
for (const parent of tx.vin) {
|
||||
const parentTx = mempool[parent.txid];
|
||||
if (parentTx && !tx.ancestorMap!.has(parent.txid)) {
|
||||
tx.ancestorMap.set(parent.txid, parentTx);
|
||||
parentTx.children.add(tx);
|
||||
// visit each node only once
|
||||
if (!parentTx.relativesSet) {
|
||||
this.setRelatives(parentTx, mempool);
|
||||
}
|
||||
parentTx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorMap.set(ancestor.txid, ancestor);
|
||||
});
|
||||
}
|
||||
};
|
||||
tx.ancestorFee = tx.fee || 0;
|
||||
tx.ancestorWeight = tx.weight || 0;
|
||||
tx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorFee += ancestor.fee;
|
||||
tx.ancestorWeight += ancestor.weight;
|
||||
});
|
||||
tx.score = tx.ancestorFee / (tx.ancestorWeight || 1);
|
||||
tx.relativesSet = true;
|
||||
}
|
||||
|
||||
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
|
||||
// avoids recursion to limit call stack depth
|
||||
private updateDescendants(
|
||||
rootTx: AuditTransaction,
|
||||
mempool: { [txid: string]: AuditTransaction },
|
||||
modified: PairingHeap<AuditTransaction>,
|
||||
): void {
|
||||
const descendantSet: Set<AuditTransaction> = new Set();
|
||||
// stack of nodes left to visit
|
||||
const descendants: AuditTransaction[] = [];
|
||||
let descendantTx;
|
||||
let ancestorIndex;
|
||||
let tmpScore;
|
||||
rootTx.children.forEach(childTx => {
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
// copy CPFP info across to main thread's mempool
|
||||
Object.keys(newMempool).forEach((txid) => {
|
||||
if (newMempool[txid] && mempool[txid]) {
|
||||
newMempool[txid].effectiveFeePerVsize = mempool[txid].effectiveFeePerVsize;
|
||||
newMempool[txid].ancestors = mempool[txid].ancestors;
|
||||
newMempool[txid].bestDescendant = mempool[txid].bestDescendant;
|
||||
newMempool[txid].cpfpChecked = mempool[txid].cpfpChecked;
|
||||
}
|
||||
});
|
||||
while (descendants.length) {
|
||||
descendantTx = descendants.pop();
|
||||
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
|
||||
// remove tx as ancestor
|
||||
descendantTx.ancestorMap.delete(rootTx.txid);
|
||||
descendantTx.ancestorFee -= rootTx.fee;
|
||||
descendantTx.ancestorWeight -= rootTx.weight;
|
||||
tmpScore = descendantTx.score;
|
||||
descendantTx.score = descendantTx.ancestorFee / descendantTx.ancestorWeight;
|
||||
|
||||
if (!descendantTx.modifiedNode) {
|
||||
descendantTx.modified = true;
|
||||
descendantTx.modifiedNode = modified.add(descendantTx);
|
||||
} else {
|
||||
// rebalance modified heap if score has changed
|
||||
if (descendantTx.score < tmpScore) {
|
||||
modified.decreasePriority(descendantTx.modifiedNode);
|
||||
} else if (descendantTx.score > tmpScore) {
|
||||
modified.increasePriority(descendantTx.modifiedNode);
|
||||
}
|
||||
}
|
||||
|
||||
// add this node's children to the stack
|
||||
descendantTx.children.forEach(childTx => {
|
||||
// visit each node only once
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
this.mempoolBlocks = blocks;
|
||||
this.mempoolBlockDeltas = deltas;
|
||||
}
|
||||
|
||||
private dataToMempoolBlocks(transactions: TransactionExtended[],
|
||||
|
||||
@@ -20,6 +20,8 @@ class Mempool {
|
||||
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
|
||||
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
|
||||
deletedTransactions: TransactionExtended[]) => void) | undefined;
|
||||
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
|
||||
deletedTransactions: TransactionExtended[]) => void) | undefined;
|
||||
|
||||
private txPerSecondArray: number[] = [];
|
||||
private txPerSecond: number = 0;
|
||||
@@ -63,6 +65,11 @@ class Mempool {
|
||||
this.mempoolChangedCallback = fn;
|
||||
}
|
||||
|
||||
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
|
||||
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => Promise<void>) {
|
||||
this.asyncMempoolChangedCallback = fn;
|
||||
}
|
||||
|
||||
public getMempool(): { [txid: string]: TransactionExtended } {
|
||||
return this.mempoolCache;
|
||||
}
|
||||
@@ -72,6 +79,9 @@ class Mempool {
|
||||
if (this.mempoolChangedCallback) {
|
||||
this.mempoolChangedCallback(this.mempoolCache, [], []);
|
||||
}
|
||||
if (this.asyncMempoolChangedCallback) {
|
||||
this.asyncMempoolChangedCallback(this.mempoolCache, [], []);
|
||||
}
|
||||
}
|
||||
|
||||
public async $updateMemPoolInfo() {
|
||||
@@ -103,12 +113,11 @@ class Mempool {
|
||||
return txTimes;
|
||||
}
|
||||
|
||||
public async $updateMempool() {
|
||||
logger.debug('Updating mempool');
|
||||
public async $updateMempool(): Promise<void> {
|
||||
logger.debug(`Updating mempool...`);
|
||||
const start = new Date().getTime();
|
||||
let hasChange: boolean = false;
|
||||
const currentMempoolSize = Object.keys(this.mempoolCache).length;
|
||||
let txCount = 0;
|
||||
const transactions = await bitcoinApi.$getRawMempool();
|
||||
const diff = transactions.length - currentMempoolSize;
|
||||
const newTransactions: TransactionExtended[] = [];
|
||||
@@ -124,7 +133,6 @@ class Mempool {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(txid);
|
||||
this.mempoolCache[txid] = transaction;
|
||||
txCount++;
|
||||
if (this.inSync) {
|
||||
this.txPerSecondArray.push(new Date().getTime());
|
||||
this.vBytesPerSecondArray.push({
|
||||
@@ -133,14 +141,9 @@ class Mempool {
|
||||
});
|
||||
}
|
||||
hasChange = true;
|
||||
if (diff > 0) {
|
||||
logger.debug('Fetched transaction ' + txCount + ' / ' + diff);
|
||||
} else {
|
||||
logger.debug('Fetched transaction ' + txCount);
|
||||
}
|
||||
newTransactions.push(transaction);
|
||||
} catch (e) {
|
||||
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
|
||||
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -194,11 +197,13 @@ class Mempool {
|
||||
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
|
||||
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
|
||||
}
|
||||
if (this.asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
|
||||
await this.asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
|
||||
}
|
||||
|
||||
const end = new Date().getTime();
|
||||
const time = end - start;
|
||||
logger.debug(`New mempool size: ${Object.keys(this.mempoolCache).length} Change: ${diff}`);
|
||||
logger.debug('Mempool updated in ' + time / 1000 + ' seconds');
|
||||
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
|
||||
}
|
||||
|
||||
public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from "../../config";
|
||||
import logger from '../../logger';
|
||||
import audits from '../audit';
|
||||
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import BlocksRepository from '../../repositories/BlocksRepository';
|
||||
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
|
||||
@@ -26,7 +27,11 @@ class MiningRoutes {
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores', this.$getBlockAuditScores)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores/:height', this.$getBlockAuditScores)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/timestamp/:timestamp', this.$getHeightFromTimestamp)
|
||||
;
|
||||
}
|
||||
|
||||
@@ -252,6 +257,52 @@ class MiningRoutes {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHeightFromTimestamp(req: Request, res: Response) {
|
||||
try {
|
||||
const timestamp = parseInt(req.params.timestamp, 10);
|
||||
// This will prevent people from entering milliseconds etc.
|
||||
// Block timestamps are allowed to be up to 2 hours off, so 24 hours
|
||||
// will never put the maximum value before the most recent block
|
||||
const nowPlus1day = Math.floor(Date.now() / 1000) + 60 * 60 * 24;
|
||||
// Prevent non-integers that are not seconds
|
||||
if (!/^[1-9][0-9]*$/.test(req.params.timestamp) || timestamp > nowPlus1day) {
|
||||
throw new Error(`Invalid timestamp, value must be Unix seconds`);
|
||||
}
|
||||
const result = await BlocksRepository.$getBlockHeightFromTimestamp(
|
||||
timestamp,
|
||||
);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getBlockAuditScores(req: Request, res: Response) {
|
||||
try {
|
||||
const height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(await audits.$getBlockAuditScores(height, 15));
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockAuditScore(req: Request, res: Response) {
|
||||
try {
|
||||
const audit = await BlocksAuditsRepository.$getBlockAuditScore(req.params.hash);
|
||||
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
|
||||
res.json(audit || 'null');
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new MiningRoutes();
|
||||
|
||||
@@ -14,10 +14,10 @@ interface Pool {
|
||||
class PoolsParser {
|
||||
miningPools: any[] = [];
|
||||
unknownPool: any = {
|
||||
'name': "Unknown",
|
||||
'link': "https://learnmeabitcoin.com/technical/coinbase-transaction",
|
||||
'regexes': "[]",
|
||||
'addresses': "[]",
|
||||
'name': 'Unknown',
|
||||
'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction',
|
||||
'regexes': '[]',
|
||||
'addresses': '[]',
|
||||
'slug': 'unknown'
|
||||
};
|
||||
slugWarnFlag = false;
|
||||
@@ -25,7 +25,7 @@ class PoolsParser {
|
||||
/**
|
||||
* Parse the pools.json file, consolidate the data and dump it into the database
|
||||
*/
|
||||
public async migratePoolsJson(poolsJson: object) {
|
||||
public async migratePoolsJson(poolsJson: object): Promise<void> {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||
return;
|
||||
}
|
||||
@@ -81,6 +81,7 @@ class PoolsParser {
|
||||
// Finally, we generate the final consolidated pools data
|
||||
const finalPoolDataAdd: Pool[] = [];
|
||||
const finalPoolDataUpdate: Pool[] = [];
|
||||
const finalPoolDataRename: Pool[] = [];
|
||||
for (let i = 0; i < poolNames.length; ++i) {
|
||||
let allAddresses: string[] = [];
|
||||
let allRegexes: string[] = [];
|
||||
@@ -127,8 +128,26 @@ class PoolsParser {
|
||||
finalPoolDataUpdate.push(poolObj);
|
||||
}
|
||||
} else {
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`);
|
||||
finalPoolDataAdd.push(poolObj);
|
||||
// Double check that if we're not just renaming a pool (same address same regex)
|
||||
const [poolToRename]: any[] = await DB.query(`
|
||||
SELECT * FROM pools
|
||||
WHERE addresses = ? OR regexes = ?`,
|
||||
[JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)]
|
||||
);
|
||||
if (poolToRename && poolToRename.length > 0) {
|
||||
// We're actually renaming an existing pool
|
||||
finalPoolDataRename.push({
|
||||
'name': poolObj.name,
|
||||
'link': poolObj.link,
|
||||
'regexes': allRegexes,
|
||||
'addresses': allAddresses,
|
||||
'slug': slug
|
||||
});
|
||||
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`);
|
||||
} else {
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`);
|
||||
finalPoolDataAdd.push(poolObj);
|
||||
}
|
||||
}
|
||||
|
||||
this.miningPools.push({
|
||||
@@ -145,7 +164,9 @@ class PoolsParser {
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) {
|
||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
|
||||
finalPoolDataRename.length > 0
|
||||
) {
|
||||
logger.debug(`Update pools table now`);
|
||||
|
||||
// Add new mining pools into the database
|
||||
@@ -169,8 +190,22 @@ class PoolsParser {
|
||||
;`);
|
||||
}
|
||||
|
||||
// Rename mining pools
|
||||
const renameQueries: string[] = [];
|
||||
for (let i = 0; i < finalPoolDataRename.length; ++i) {
|
||||
renameQueries.push(`
|
||||
UPDATE pools
|
||||
SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}',
|
||||
slug='${finalPoolDataRename[i].slug}'
|
||||
WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}'
|
||||
AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}'
|
||||
;`);
|
||||
}
|
||||
|
||||
try {
|
||||
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
||||
if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) {
|
||||
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
||||
}
|
||||
|
||||
if (finalPoolDataAdd.length > 0) {
|
||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||
@@ -178,6 +213,9 @@ class PoolsParser {
|
||||
for (const query of updateQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
for (const query of renameQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
logger.info('Mining pools.json import completed');
|
||||
} catch (e) {
|
||||
|
||||
336
backend/src/api/tx-selection-worker.ts
Normal file
336
backend/src/api/tx-selection-worker.ts
Normal file
@@ -0,0 +1,336 @@
|
||||
import config from '../config';
|
||||
import logger from '../logger';
|
||||
import { TransactionExtended, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
|
||||
import { PairingHeap } from '../utils/pairing-heap';
|
||||
import { Common } from './common';
|
||||
import { parentPort } from 'worker_threads';
|
||||
|
||||
if (parentPort) {
|
||||
parentPort.on('message', (params: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null, condenseRest: boolean}) => {
|
||||
const { mempool, blocks } = makeBlockTemplates(params);
|
||||
|
||||
// return the result to main thread.
|
||||
if (parentPort) {
|
||||
parentPort.postMessage({ mempool, blocks });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
|
||||
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
|
||||
*
|
||||
* blockLimit: number of blocks to build in total.
|
||||
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
|
||||
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
|
||||
* condenseRest: whether to ignore excess transactions or append them to the final block.
|
||||
*/
|
||||
function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit?: number | null, condenseRest?: boolean | null })
|
||||
: { mempool: { [txid: string]: TransactionExtended }, blocks: MempoolBlockWithTransactions[] } {
|
||||
const start = Date.now();
|
||||
const auditPool: { [txid: string]: AuditTransaction } = {};
|
||||
const mempoolArray: AuditTransaction[] = [];
|
||||
const restOfArray: TransactionExtended[] = [];
|
||||
|
||||
let weight = 0;
|
||||
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
|
||||
// grab the top feerate txs up to maxWeight
|
||||
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
|
||||
weight += tx.weight;
|
||||
if (weight >= maxWeight) {
|
||||
restOfArray.push(tx);
|
||||
return;
|
||||
}
|
||||
// initializing everything up front helps V8 optimize property access later
|
||||
auditPool[tx.txid] = {
|
||||
txid: tx.txid,
|
||||
fee: tx.fee,
|
||||
size: tx.size,
|
||||
weight: tx.weight,
|
||||
feePerVsize: tx.feePerVsize,
|
||||
vin: tx.vin,
|
||||
relativesSet: false,
|
||||
ancestorMap: new Map<string, AuditTransaction>(),
|
||||
children: new Set<AuditTransaction>(),
|
||||
ancestorFee: 0,
|
||||
ancestorWeight: 0,
|
||||
score: 0,
|
||||
used: false,
|
||||
modified: false,
|
||||
modifiedNode: null,
|
||||
};
|
||||
mempoolArray.push(auditPool[tx.txid]);
|
||||
});
|
||||
|
||||
// Build relatives graph & calculate ancestor scores
|
||||
for (const tx of mempoolArray) {
|
||||
if (!tx.relativesSet) {
|
||||
setRelatives(tx, auditPool);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by descending ancestor score
|
||||
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
|
||||
|
||||
// Build blocks by greedily choosing the highest feerate package
|
||||
// (i.e. the package rooted in the transaction with the best ancestor score)
|
||||
const blocks: MempoolBlockWithTransactions[] = [];
|
||||
let blockWeight = 4000;
|
||||
let blockSize = 0;
|
||||
let transactions: AuditTransaction[] = [];
|
||||
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
|
||||
let overflow: AuditTransaction[] = [];
|
||||
let failures = 0;
|
||||
let top = 0;
|
||||
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) {
|
||||
// skip invalid transactions
|
||||
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
|
||||
top++;
|
||||
}
|
||||
|
||||
// Select best next package
|
||||
let nextTx;
|
||||
const nextPoolTx = mempoolArray[top];
|
||||
const nextModifiedTx = modified.peek();
|
||||
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
|
||||
nextTx = nextPoolTx;
|
||||
top++;
|
||||
} else {
|
||||
modified.pop();
|
||||
if (nextModifiedTx) {
|
||||
nextTx = nextModifiedTx;
|
||||
nextTx.modifiedNode = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
if (nextTx && !nextTx?.used) {
|
||||
// Check if the package fits into this block
|
||||
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
|
||||
blockWeight += nextTx.ancestorWeight;
|
||||
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
|
||||
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
|
||||
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
|
||||
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
|
||||
sortedTxSet.forEach((ancestor, i, arr) => {
|
||||
const mempoolTx = mempool[ancestor.txid];
|
||||
if (ancestor && !ancestor?.used) {
|
||||
ancestor.used = true;
|
||||
// update original copy of this tx with effective fee rate & relatives data
|
||||
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
|
||||
mempoolTx.ancestors = (Array.from(ancestor.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
|
||||
return {
|
||||
txid: a.txid,
|
||||
fee: a.fee,
|
||||
weight: a.weight,
|
||||
};
|
||||
});
|
||||
mempoolTx.cpfpChecked = true;
|
||||
if (i < arr.length - 1) {
|
||||
mempoolTx.bestDescendant = {
|
||||
txid: arr[arr.length - 1].txid,
|
||||
fee: arr[arr.length - 1].fee,
|
||||
weight: arr[arr.length - 1].weight,
|
||||
};
|
||||
} else {
|
||||
mempoolTx.bestDescendant = null;
|
||||
}
|
||||
transactions.push(ancestor);
|
||||
blockSize += ancestor.size;
|
||||
}
|
||||
});
|
||||
|
||||
// remove these as valid package ancestors for any descendants remaining in the mempool
|
||||
if (sortedTxSet.length) {
|
||||
sortedTxSet.forEach(tx => {
|
||||
updateDescendants(tx, auditPool, modified);
|
||||
});
|
||||
}
|
||||
|
||||
failures = 0;
|
||||
} else {
|
||||
// hold this package in an overflow list while we check for smaller options
|
||||
overflow.push(nextTx);
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
|
||||
// this block is full
|
||||
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
|
||||
const queueEmpty = top >= mempoolArray.length && modified.isEmpty();
|
||||
if ((exceededPackageTries || queueEmpty) && (!condenseRest || blocks.length < blockLimit - 1)) {
|
||||
// construct this block
|
||||
if (transactions.length) {
|
||||
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
// reset for the next block
|
||||
transactions = [];
|
||||
blockSize = 0;
|
||||
blockWeight = 4000;
|
||||
|
||||
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
|
||||
for (const overflowTx of overflow.reverse()) {
|
||||
if (overflowTx.modified) {
|
||||
overflowTx.modifiedNode = modified.add(overflowTx);
|
||||
} else {
|
||||
top--;
|
||||
mempoolArray[top] = overflowTx;
|
||||
}
|
||||
}
|
||||
overflow = [];
|
||||
}
|
||||
}
|
||||
if (condenseRest) {
|
||||
// pack any leftover transactions into the last block
|
||||
for (const tx of overflow) {
|
||||
if (!tx || tx?.used) {
|
||||
continue;
|
||||
}
|
||||
blockWeight += tx.weight;
|
||||
blockSize += tx.size;
|
||||
const mempoolTx = mempool[tx.txid];
|
||||
// update original copy of this tx with effective fee rate & relatives data
|
||||
mempoolTx.effectiveFeePerVsize = tx.score;
|
||||
mempoolTx.ancestors = (Array.from(tx.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
|
||||
return {
|
||||
txid: a.txid,
|
||||
fee: a.fee,
|
||||
weight: a.weight,
|
||||
};
|
||||
});
|
||||
mempoolTx.bestDescendant = null;
|
||||
mempoolTx.cpfpChecked = true;
|
||||
transactions.push(tx);
|
||||
tx.used = true;
|
||||
}
|
||||
const blockTransactions = transactions.map(t => mempool[t.txid]);
|
||||
restOfArray.forEach(tx => {
|
||||
blockWeight += tx.weight;
|
||||
blockSize += tx.size;
|
||||
tx.effectiveFeePerVsize = tx.feePerVsize;
|
||||
tx.cpfpChecked = false;
|
||||
tx.ancestors = [];
|
||||
tx.bestDescendant = null;
|
||||
blockTransactions.push(tx);
|
||||
});
|
||||
if (blockTransactions.length) {
|
||||
blocks.push(dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
transactions = [];
|
||||
} else if (transactions.length) {
|
||||
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
|
||||
const end = Date.now();
|
||||
const time = end - start;
|
||||
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
|
||||
|
||||
return {
|
||||
mempool,
|
||||
blocks
|
||||
};
|
||||
}
|
||||
|
||||
// traverse in-mempool ancestors
|
||||
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
|
||||
function setRelatives(
|
||||
tx: AuditTransaction,
|
||||
mempool: { [txid: string]: AuditTransaction },
|
||||
): void {
|
||||
for (const parent of tx.vin) {
|
||||
const parentTx = mempool[parent.txid];
|
||||
if (parentTx && !tx.ancestorMap?.has(parent.txid)) {
|
||||
tx.ancestorMap.set(parent.txid, parentTx);
|
||||
parentTx.children.add(tx);
|
||||
// visit each node only once
|
||||
if (!parentTx.relativesSet) {
|
||||
setRelatives(parentTx, mempool);
|
||||
}
|
||||
parentTx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorMap.set(ancestor.txid, ancestor);
|
||||
});
|
||||
}
|
||||
};
|
||||
tx.ancestorFee = tx.fee || 0;
|
||||
tx.ancestorWeight = tx.weight || 0;
|
||||
tx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorFee += ancestor.fee;
|
||||
tx.ancestorWeight += ancestor.weight;
|
||||
});
|
||||
tx.score = tx.ancestorFee / ((tx.ancestorWeight / 4) || 1);
|
||||
tx.relativesSet = true;
|
||||
}
|
||||
|
||||
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
|
||||
// avoids recursion to limit call stack depth
|
||||
function updateDescendants(
|
||||
rootTx: AuditTransaction,
|
||||
mempool: { [txid: string]: AuditTransaction },
|
||||
modified: PairingHeap<AuditTransaction>,
|
||||
): void {
|
||||
const descendantSet: Set<AuditTransaction> = new Set();
|
||||
// stack of nodes left to visit
|
||||
const descendants: AuditTransaction[] = [];
|
||||
let descendantTx;
|
||||
let tmpScore;
|
||||
rootTx.children.forEach(childTx => {
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
while (descendants.length) {
|
||||
descendantTx = descendants.pop();
|
||||
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
|
||||
// remove tx as ancestor
|
||||
descendantTx.ancestorMap.delete(rootTx.txid);
|
||||
descendantTx.ancestorFee -= rootTx.fee;
|
||||
descendantTx.ancestorWeight -= rootTx.weight;
|
||||
tmpScore = descendantTx.score;
|
||||
descendantTx.score = descendantTx.ancestorFee / (descendantTx.ancestorWeight / 4);
|
||||
|
||||
if (!descendantTx.modifiedNode) {
|
||||
descendantTx.modified = true;
|
||||
descendantTx.modifiedNode = modified.add(descendantTx);
|
||||
} else {
|
||||
// rebalance modified heap if score has changed
|
||||
if (descendantTx.score < tmpScore) {
|
||||
modified.decreasePriority(descendantTx.modifiedNode);
|
||||
} else if (descendantTx.score > tmpScore) {
|
||||
modified.increasePriority(descendantTx.modifiedNode);
|
||||
}
|
||||
}
|
||||
|
||||
// add this node's children to the stack
|
||||
descendantTx.children.forEach(childTx => {
|
||||
// visit each node only once
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function dataToMempoolBlocks(transactions: TransactionExtended[],
|
||||
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
|
||||
let rangeLength = 4;
|
||||
if (blocksIndex === 0) {
|
||||
rangeLength = 8;
|
||||
}
|
||||
if (transactions.length > 4000) {
|
||||
rangeLength = 6;
|
||||
} else if (transactions.length > 10000) {
|
||||
rangeLength = 8;
|
||||
}
|
||||
return {
|
||||
blockSize: blockSize,
|
||||
blockVSize: blockWeight / 4,
|
||||
nTx: transactions.length,
|
||||
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
|
||||
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
|
||||
feeRange: Common.getFeesInRange(transactions, rangeLength),
|
||||
transactionIds: transactions.map((tx) => tx.txid),
|
||||
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
|
||||
};
|
||||
}
|
||||
@@ -244,13 +244,18 @@ class WebsocketHandler {
|
||||
});
|
||||
}
|
||||
|
||||
handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
|
||||
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) {
|
||||
async handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
|
||||
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]): Promise<void> {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
mempoolBlocks.updateMempoolBlocks(newMempool);
|
||||
if (config.MEMPOOL.ADVANCED_TRANSACTION_SELECTION) {
|
||||
await mempoolBlocks.makeBlockTemplates(newMempool, 8, null, true);
|
||||
}
|
||||
else {
|
||||
mempoolBlocks.updateMempoolBlocks(newMempool);
|
||||
}
|
||||
const mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
const mempoolInfo = memPool.getMempoolInfo();
|
||||
@@ -405,22 +410,25 @@ class WebsocketHandler {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): void {
|
||||
|
||||
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
let mBlocks: undefined | MempoolBlock[];
|
||||
let mBlockDeltas: undefined | MempoolBlockDelta[];
|
||||
let matchRate;
|
||||
const _memPool = memPool.getMempool();
|
||||
let matchRate;
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
const mempoolCopy = cloneMempool(_memPool);
|
||||
const projectedBlocks = mempoolBlocks.makeBlockTemplates(mempoolCopy, 2);
|
||||
if (config.MEMPOOL.ADVANCED_TRANSACTION_SELECTION) {
|
||||
await mempoolBlocks.makeBlockTemplates(_memPool, 2);
|
||||
} else {
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool);
|
||||
}
|
||||
|
||||
const { censored, added, score } = Audit.auditBlock(transactions, projectedBlocks, mempoolCopy);
|
||||
if (Common.indexingEnabled() && memPool.isInSync()) {
|
||||
const projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
|
||||
|
||||
const { censored, added, score } = Audit.auditBlock(transactions, projectedBlocks, _memPool);
|
||||
matchRate = Math.round(score * 100 * 100) / 100;
|
||||
|
||||
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
|
||||
@@ -459,9 +467,13 @@ class WebsocketHandler {
|
||||
delete _memPool[txId];
|
||||
}
|
||||
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool);
|
||||
mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
if (config.MEMPOOL.ADVANCED_TRANSACTION_SELECTION) {
|
||||
await mempoolBlocks.makeBlockTemplates(_memPool, 2);
|
||||
} else {
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool);
|
||||
}
|
||||
const mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
|
||||
const da = difficultyAdjustment.getDifficultyAdjustment();
|
||||
const fees = feeApi.getRecommendedFee();
|
||||
@@ -569,14 +581,4 @@ class WebsocketHandler {
|
||||
}
|
||||
}
|
||||
|
||||
function cloneMempool(mempool: { [txid: string]: TransactionExtended }): { [txid: string]: TransactionExtended } {
|
||||
const cloned = {};
|
||||
Object.keys(mempool).forEach(id => {
|
||||
cloned[id] = {
|
||||
...mempool[id]
|
||||
};
|
||||
});
|
||||
return cloned;
|
||||
}
|
||||
|
||||
export default new WebsocketHandler();
|
||||
|
||||
@@ -4,6 +4,7 @@ const configFromFile = require(
|
||||
|
||||
interface IConfig {
|
||||
MEMPOOL: {
|
||||
ENABLED: boolean;
|
||||
NETWORK: 'mainnet' | 'testnet' | 'signet' | 'liquid' | 'liquidtestnet';
|
||||
BACKEND: 'esplora' | 'electrum' | 'none';
|
||||
HTTP_PORT: number;
|
||||
@@ -28,6 +29,7 @@ interface IConfig {
|
||||
AUTOMATIC_BLOCK_REINDEXING: boolean;
|
||||
POOLS_JSON_URL: string,
|
||||
POOLS_JSON_TREE_URL: string,
|
||||
ADVANCED_TRANSACTION_SELECTION: boolean;
|
||||
};
|
||||
ESPLORA: {
|
||||
REST_API_URL: string;
|
||||
@@ -119,6 +121,7 @@ interface IConfig {
|
||||
|
||||
const defaults: IConfig = {
|
||||
'MEMPOOL': {
|
||||
'ENABLED': true,
|
||||
'NETWORK': 'mainnet',
|
||||
'BACKEND': 'none',
|
||||
'HTTP_PORT': 8999,
|
||||
@@ -143,6 +146,7 @@ const defaults: IConfig = {
|
||||
'AUTOMATIC_BLOCK_REINDEXING': false,
|
||||
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
||||
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||
'ADVANCED_TRANSACTION_SELECTION': false,
|
||||
},
|
||||
'ESPLORA': {
|
||||
'REST_API_URL': 'http://127.0.0.1:3000',
|
||||
@@ -224,11 +228,11 @@ const defaults: IConfig = {
|
||||
'BISQ_URL': 'https://bisq.markets/api',
|
||||
'BISQ_ONION': 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
|
||||
},
|
||||
"MAXMIND": {
|
||||
'MAXMIND': {
|
||||
'ENABLED': false,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
|
||||
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
|
||||
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
|
||||
'GEOLITE2_CITY': '/usr/local/share/GeoIP/GeoLite2-City.mmdb',
|
||||
'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
|
||||
'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import express from "express";
|
||||
import express from 'express';
|
||||
import { Application, Request, Response, NextFunction } from 'express';
|
||||
import * as http from 'http';
|
||||
import * as WebSocket from 'ws';
|
||||
@@ -34,7 +34,7 @@ import miningRoutes from './api/mining/mining-routes';
|
||||
import bisqRoutes from './api/bisq/bisq.routes';
|
||||
import liquidRoutes from './api/liquid/liquid.routes';
|
||||
import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
|
||||
import fundingTxFetcher from "./tasks/lightning/sync-tasks/funding-tx-fetcher";
|
||||
import fundingTxFetcher from './tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||
|
||||
class Server {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@@ -74,7 +74,7 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
async startServer(worker = false) {
|
||||
async startServer(worker = false): Promise<void> {
|
||||
logger.notice(`Starting Mempool Server${worker ? ' (worker)' : ''}... (${backendInfo.getShortCommitHash()})`);
|
||||
|
||||
this.app
|
||||
@@ -92,7 +92,9 @@ class Server {
|
||||
this.setUpWebsocketHandling();
|
||||
|
||||
await syncAssets.syncAssets$();
|
||||
diskCache.loadMempoolCache();
|
||||
if (config.MEMPOOL.ENABLED) {
|
||||
diskCache.loadMempoolCache();
|
||||
}
|
||||
|
||||
if (config.DATABASE.ENABLED) {
|
||||
await DB.checkDbConnection();
|
||||
@@ -127,7 +129,10 @@ class Server {
|
||||
fiatConversion.startService();
|
||||
|
||||
this.setUpHttpApiRoutes();
|
||||
this.runMainUpdateLoop();
|
||||
|
||||
if (config.MEMPOOL.ENABLED) {
|
||||
this.runMainUpdateLoop();
|
||||
}
|
||||
|
||||
if (config.BISQ.ENABLED) {
|
||||
bisq.startBisqService();
|
||||
@@ -149,7 +154,7 @@ class Server {
|
||||
});
|
||||
}
|
||||
|
||||
async runMainUpdateLoop() {
|
||||
async runMainUpdateLoop(): Promise<void> {
|
||||
try {
|
||||
try {
|
||||
await memPool.$updateMemPoolInfo();
|
||||
@@ -183,7 +188,7 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
async $runLightningBackend() {
|
||||
async $runLightningBackend(): Promise<void> {
|
||||
try {
|
||||
await fundingTxFetcher.$init();
|
||||
await networkSyncService.$startService();
|
||||
@@ -195,7 +200,7 @@ class Server {
|
||||
};
|
||||
}
|
||||
|
||||
setUpWebsocketHandling() {
|
||||
setUpWebsocketHandling(): void {
|
||||
if (this.wss) {
|
||||
websocketHandler.setWebsocketServer(this.wss);
|
||||
}
|
||||
@@ -209,19 +214,21 @@ class Server {
|
||||
});
|
||||
}
|
||||
websocketHandler.setupConnectionHandling();
|
||||
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
|
||||
blocks.setNewBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
|
||||
memPool.setMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
|
||||
if (config.MEMPOOL.ENABLED) {
|
||||
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
|
||||
memPool.setAsyncMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
|
||||
blocks.setNewAsyncBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
|
||||
}
|
||||
fiatConversion.setProgressChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler));
|
||||
loadingIndicators.setProgressChangedCallback(websocketHandler.handleLoadingChanged.bind(websocketHandler));
|
||||
}
|
||||
|
||||
setUpHttpApiRoutes() {
|
||||
|
||||
setUpHttpApiRoutes(): void {
|
||||
bitcoinRoutes.initRoutes(this.app);
|
||||
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED) {
|
||||
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && config.MEMPOOL.ENABLED) {
|
||||
statisticsRoutes.initRoutes(this.app);
|
||||
}
|
||||
if (Common.indexingEnabled()) {
|
||||
if (Common.indexingEnabled() && config.MEMPOOL.ENABLED) {
|
||||
miningRoutes.initRoutes(this.app);
|
||||
}
|
||||
if (config.BISQ.ENABLED) {
|
||||
@@ -238,4 +245,4 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
const server = new Server();
|
||||
((): Server => new Server())();
|
||||
|
||||
@@ -32,6 +32,11 @@ export interface BlockAudit {
|
||||
matchRate: number,
|
||||
}
|
||||
|
||||
export interface AuditScore {
|
||||
hash: string,
|
||||
matchRate?: number,
|
||||
}
|
||||
|
||||
export interface MempoolBlock {
|
||||
blockSize: number;
|
||||
blockVSize: number;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { BlockAudit } from '../mempool.interfaces';
|
||||
import { BlockAudit, AuditScore } from '../mempool.interfaces';
|
||||
|
||||
class BlocksAuditRepositories {
|
||||
public async $saveAudit(audit: BlockAudit): Promise<void> {
|
||||
@@ -72,10 +72,10 @@ class BlocksAuditRepositories {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getShortBlockAudit(hash: string): Promise<any> {
|
||||
public async $getBlockAuditScore(hash: string): Promise<AuditScore> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(
|
||||
`SELECT hash as id, match_rate as matchRate
|
||||
`SELECT hash, match_rate as matchRate
|
||||
FROM blocks_audits
|
||||
WHERE blocks_audits.hash = "${hash}"
|
||||
`);
|
||||
|
||||
@@ -392,6 +392,36 @@ class BlocksRepository {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first block at or directly after a given timestamp
|
||||
* @param timestamp number unix time in seconds
|
||||
* @returns The height and timestamp of a block (timestamp might vary from given timestamp)
|
||||
*/
|
||||
public async $getBlockHeightFromTimestamp(
|
||||
timestamp: number,
|
||||
): Promise<{ height: number; hash: string; timestamp: number }> {
|
||||
try {
|
||||
// Get first block at or after the given timestamp
|
||||
const query = `SELECT height, hash, blockTimestamp as timestamp FROM blocks
|
||||
WHERE blockTimestamp <= FROM_UNIXTIME(?)
|
||||
ORDER BY blockTimestamp DESC
|
||||
LIMIT 1`;
|
||||
const params = [timestamp];
|
||||
const [rows]: any[][] = await DB.query(query, params);
|
||||
if (rows.length === 0) {
|
||||
throw new Error(`No block was found before timestamp ${timestamp}`);
|
||||
}
|
||||
|
||||
return rows[0];
|
||||
} catch (e) {
|
||||
logger.err(
|
||||
'Cannot get block height from timestamp from the db. Reason: ' +
|
||||
(e instanceof Error ? e.message : e),
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return blocks height
|
||||
*/
|
||||
|
||||
67
backend/src/repositories/NodeRecordsRepository.ts
Normal file
67
backend/src/repositories/NodeRecordsRepository.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { ResultSetHeader, RowDataPacket } from 'mysql2';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
|
||||
export interface NodeRecord {
|
||||
publicKey: string; // node public key
|
||||
type: number; // TLV extension record type
|
||||
payload: string; // base64 record payload
|
||||
}
|
||||
|
||||
class NodesRecordsRepository {
|
||||
public async $saveRecord(record: NodeRecord): Promise<void> {
|
||||
try {
|
||||
const payloadBytes = Buffer.from(record.payload, 'base64');
|
||||
await DB.query(`
|
||||
INSERT INTO nodes_records(public_key, type, payload)
|
||||
VALUE (?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
payload = ?
|
||||
`, [record.publicKey, record.type, payloadBytes, payloadBytes]);
|
||||
} catch (e: any) {
|
||||
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
|
||||
logger.err(`Cannot save node record (${[record.publicKey, record.type, record.payload]}) into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
// We don't throw, not a critical issue if we miss some nodes records
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $getRecordTypes(publicKey: string): Promise<any> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT type FROM nodes_records
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
const [rows] = await DB.query<RowDataPacket[][]>(query, [publicKey]);
|
||||
return rows.map(row => row['type']);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot retrieve custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
public async $deleteUnusedRecords(publicKey: string, recordTypes: number[]): Promise<number> {
|
||||
try {
|
||||
let query;
|
||||
if (recordTypes.length) {
|
||||
query = `
|
||||
DELETE FROM nodes_records
|
||||
WHERE public_key = ?
|
||||
AND type NOT IN (${recordTypes.map(type => `${type}`).join(',')})
|
||||
`;
|
||||
} else {
|
||||
query = `
|
||||
DELETE FROM nodes_records
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
}
|
||||
const [result] = await DB.query<ResultSetHeader>(query, [publicKey]);
|
||||
return result.affectedRows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot delete unused custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodesRecordsRepository();
|
||||
@@ -13,6 +13,7 @@ import fundingTxFetcher from './sync-tasks/funding-tx-fetcher';
|
||||
import NodesSocketsRepository from '../../repositories/NodesSocketsRepository';
|
||||
import { Common } from '../../api/common';
|
||||
import blocks from '../../api/blocks';
|
||||
import NodeRecordsRepository from '../../repositories/NodeRecordsRepository';
|
||||
|
||||
class NetworkSyncService {
|
||||
loggerTimer = 0;
|
||||
@@ -63,6 +64,7 @@ class NetworkSyncService {
|
||||
let progress = 0;
|
||||
|
||||
let deletedSockets = 0;
|
||||
let deletedRecords = 0;
|
||||
const graphNodesPubkeys: string[] = [];
|
||||
for (const node of nodes) {
|
||||
const latestUpdated = await channelsApi.$getLatestChannelUpdateForNode(node.pub_key);
|
||||
@@ -84,8 +86,23 @@ class NetworkSyncService {
|
||||
addresses.push(socket.addr);
|
||||
}
|
||||
deletedSockets += await NodesSocketsRepository.$deleteUnusedSockets(node.pub_key, addresses);
|
||||
|
||||
const oldRecordTypes = await NodeRecordsRepository.$getRecordTypes(node.pub_key);
|
||||
const customRecordTypes: number[] = [];
|
||||
for (const [type, payload] of Object.entries(node.custom_records || {})) {
|
||||
const numericalType = parseInt(type);
|
||||
await NodeRecordsRepository.$saveRecord({
|
||||
publicKey: node.pub_key,
|
||||
type: numericalType,
|
||||
payload,
|
||||
});
|
||||
customRecordTypes.push(numericalType);
|
||||
}
|
||||
if (oldRecordTypes.reduce((changed, type) => changed || customRecordTypes.indexOf(type) === -1, false)) {
|
||||
deletedRecords += await NodeRecordsRepository.$deleteUnusedRecords(node.pub_key, customRecordTypes);
|
||||
}
|
||||
}
|
||||
logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted`);
|
||||
logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`);
|
||||
|
||||
// If a channel if not present in the graph, mark it as inactive
|
||||
await nodesApi.$setNodesInactive(graphNodesPubkeys);
|
||||
@@ -309,7 +326,7 @@ class NetworkSyncService {
|
||||
└──────────────────┘
|
||||
*/
|
||||
|
||||
private async $runClosedChannelsForensics(): Promise<void> {
|
||||
private async $runClosedChannelsForensics(skipUnresolved: boolean = false): Promise<void> {
|
||||
if (!config.ESPLORA.REST_API_URL) {
|
||||
return;
|
||||
}
|
||||
@@ -318,9 +335,18 @@ class NetworkSyncService {
|
||||
|
||||
try {
|
||||
logger.info(`Started running closed channel forensics...`);
|
||||
const channels = await channelsApi.$getClosedChannelsWithoutReason();
|
||||
let channels;
|
||||
const closedChannels = await channelsApi.$getClosedChannelsWithoutReason();
|
||||
if (skipUnresolved) {
|
||||
channels = closedChannels;
|
||||
} else {
|
||||
const unresolvedChannels = await channelsApi.$getUnresolvedClosedChannels();
|
||||
channels = [...closedChannels, ...unresolvedChannels];
|
||||
}
|
||||
|
||||
for (const channel of channels) {
|
||||
let reason = 0;
|
||||
let resolvedForceClose = false;
|
||||
// Only Esplora backend can retrieve spent transaction outputs
|
||||
try {
|
||||
let outspends: IEsploraApi.Outspend[] | undefined;
|
||||
@@ -350,6 +376,7 @@ class NetworkSyncService {
|
||||
reason = 3;
|
||||
} else {
|
||||
reason = 2;
|
||||
resolvedForceClose = true;
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
@@ -374,6 +401,9 @@ class NetworkSyncService {
|
||||
if (reason) {
|
||||
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
|
||||
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
|
||||
if (reason === 2 && resolvedForceClose) {
|
||||
await DB.query(`UPDATE channels SET closing_resolved = ? WHERE id = ?`, [true, channel.id]);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
|
||||
@@ -6,6 +6,7 @@ import DB from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import * as IPCheck from '../../../utils/ipcheck.js';
|
||||
import { Reader } from 'mmdb-lib';
|
||||
|
||||
export async function $lookupNodeLocation(): Promise<void> {
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
@@ -18,7 +19,10 @@ export async function $lookupNodeLocation(): Promise<void> {
|
||||
const nodes = await nodesApi.$getAllNodes();
|
||||
const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
|
||||
const lookupAsn = await maxmind.open<AsnResponse>(config.MAXMIND.GEOLITE2_ASN);
|
||||
const lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
|
||||
let lookupIsp: Reader<IspResponse> | null = null;
|
||||
try {
|
||||
lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
|
||||
} catch (e) { }
|
||||
|
||||
for (const node of nodes) {
|
||||
const sockets: string[] = node.sockets.split(',');
|
||||
@@ -29,7 +33,10 @@ export async function $lookupNodeLocation(): Promise<void> {
|
||||
if (hasClearnet && ip !== '127.0.1.1' && ip !== '127.0.0.1') {
|
||||
const city = lookupCity.get(ip);
|
||||
const asn = lookupAsn.get(ip);
|
||||
const isp = lookupIsp.get(ip);
|
||||
let isp: IspResponse | null = null;
|
||||
if (lookupIsp) {
|
||||
isp = lookupIsp.get(ip);
|
||||
}
|
||||
|
||||
let asOverwrite: any | undefined;
|
||||
if (asn && (IPCheck.match(ip, '170.75.160.0/20') || IPCheck.match(ip, '172.81.176.0/21'))) {
|
||||
|
||||
Reference in New Issue
Block a user