diff --git a/backend/src/api/audit.ts b/backend/src/api/audit.ts index 6efb50938..2d9fbc430 100644 --- a/backend/src/api/audit.ts +++ b/backend/src/api/audit.ts @@ -1,3 +1,4 @@ +import logger from '../logger'; import { BlockExtended, TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces'; const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners diff --git a/backend/src/api/mempool-blocks.ts b/backend/src/api/mempool-blocks.ts index 2fb524b11..d0c2a4f63 100644 --- a/backend/src/api/mempool-blocks.ts +++ b/backend/src/api/mempool-blocks.ts @@ -1,7 +1,8 @@ import logger from '../logger'; -import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, TransactionSet, Ancestor } from '../mempool.interfaces'; +import { MempoolBlock, TransactionExtended, AuditTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces'; import { Common } from './common'; import config from '../config'; +import { PairingHeap } from '../utils/pairing-heap'; class MempoolBlocks { private mempoolBlocks: MempoolBlockWithTransactions[] = []; @@ -72,6 +73,7 @@ class MempoolBlocks { logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds'); const { blocks, deltas } = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks); + this.mempoolBlocks = blocks; this.mempoolBlockDeltas = deltas; } @@ -144,226 +146,273 @@ class MempoolBlocks { * Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core * (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp) * - * templateLimit: number of blocks to build using the full algo, - * remaining blocks up to blockLimit will skip the expensive updateDescendants step - * - * blockLimit: number of blocks to build in total. Excess transactions will be ignored. + * blockLimit: number of blocks to build in total. + * weightLimit: maximum weight of transactions to consider using the selection algorithm. + * if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate + * condenseRest: whether to ignore excess transactions or append them to the final block. */ - public makeBlockTemplates(mempool: { [txid: string]: TransactionExtended }, templateLimit: number = Infinity, blockLimit: number = Infinity): MempoolBlockWithTransactions[] { - const start = new Date().getTime(); - const txSets: { [txid: string]: TransactionSet } = {}; - const mempoolArray: TransactionExtended[] = Object.values(mempool); - - mempoolArray.forEach((tx) => { - tx.bestDescendant = null; - tx.ancestors = []; - tx.cpfpChecked = false; - tx.effectiveFeePerVsize = tx.feePerVsize; - txSets[tx.txid] = { - fee: 0, - weight: 1, + public makeBlockTemplates(mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): MempoolBlockWithTransactions[] { + const start = Date.now(); + const auditPool: { [txid: string]: AuditTransaction } = {}; + const mempoolArray: AuditTransaction[] = []; + const restOfArray: TransactionExtended[] = []; + + let weight = 0; + const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity; + // grab the top feerate txs up to maxWeight + Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => { + weight += tx.weight; + if (weight >= maxWeight) { + restOfArray.push(tx); + return; + } + // initializing everything up front helps V8 optimize property access later + auditPool[tx.txid] = { + txid: tx.txid, + fee: tx.fee, + size: tx.size, + weight: tx.weight, + feePerVsize: tx.feePerVsize, + vin: tx.vin, + relativesSet: false, + ancestorMap: new Map(), + children: new Set(), + ancestorFee: 0, + ancestorWeight: 0, score: 0, - children: [], - available: true, + used: false, modified: false, - }; - }); + modifiedNode: null, + } + mempoolArray.push(auditPool[tx.txid]); + }) // Build relatives graph & calculate ancestor scores - mempoolArray.forEach((tx) => { - this.setRelatives(tx, mempool, txSets); - }); + for (const tx of mempoolArray) { + if (!tx.relativesSet) { + this.setRelatives(tx, auditPool); + } + } // Sort by descending ancestor score - const byAncestor = (a, b): number => this.sortByAncestorScore(a, b, txSets); - mempoolArray.sort(byAncestor); + mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0)); // Build blocks by greedily choosing the highest feerate package // (i.e. the package rooted in the transaction with the best ancestor score) const blocks: MempoolBlockWithTransactions[] = []; let blockWeight = 4000; let blockSize = 0; - let transactions: TransactionExtended[] = []; - let modified: TransactionExtended[] = []; - let overflow: TransactionExtended[] = []; + let transactions: AuditTransaction[] = []; + const modified: PairingHeap = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0)); + let overflow: AuditTransaction[] = []; let failures = 0; - while ((mempoolArray.length || modified.length) && blocks.length < blockLimit) { - const simpleMode = blocks.length >= templateLimit; - let anyModified = false; - // Select best next package - let nextTx; - if (mempoolArray.length && (!modified.length || txSets[mempoolArray[0].txid]?.score > txSets[modified[0].txid]?.score)) { - nextTx = mempoolArray.shift(); - if (txSets[nextTx?.txid]?.modified) { - nextTx = null; - } - } else { - nextTx = modified.shift(); + let top = 0; + while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) { + // skip invalid transactions + while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) { + top++; } - if (nextTx && txSets[nextTx.txid]?.available) { - const nextTxSet = txSets[nextTx.txid]; + // Select best next package + let nextTx; + const nextPoolTx = mempoolArray[top]; + const nextModifiedTx = modified.peek(); + if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) { + nextTx = nextPoolTx; + top++; + } else { + modified.pop(); + if (nextModifiedTx) { + nextTx = nextModifiedTx; + nextTx.modifiedNode = undefined; + } + } + + if (nextTx && !nextTx?.used) { // Check if the package fits into this block - if (nextTxSet && blockWeight + nextTxSet.weight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) { - blockWeight += nextTxSet.weight; - // sort txSet by dependency graph (equivalent to sorting by ascending ancestor count) - const sortedTxSet = [...nextTx.ancestors.sort((a, b) => { - return (mempool[a.txid]?.ancestors?.length || 0) - (mempool[b.txid]?.ancestors?.length || 0); - }), nextTx]; + if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) { + blockWeight += nextTx.ancestorWeight; + const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values()); + // sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count) + const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx]; + const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4); sortedTxSet.forEach((ancestor, i, arr) => { - const tx = mempool[ancestor.txid]; - const txSet = txSets[ancestor.txid]; - if (txSet.available) { - txSet.available = false; - tx.effectiveFeePerVsize = nextTxSet.fee / (nextTxSet.weight / 4); - tx.cpfpChecked = true; + const mempoolTx = mempool[ancestor.txid]; + if (ancestor && !ancestor?.used) { + ancestor.used = true; + // update original copy of this tx with effective fee rate & relatives data + mempoolTx.effectiveFeePerVsize = effectiveFeeRate; + mempoolTx.ancestors = (Array.from(ancestor.ancestorMap?.values()) as AuditTransaction[]).map((a) => { + return { + txid: a.txid, + fee: a.fee, + weight: a.weight, + } + }) if (i < arr.length - 1) { - tx.bestDescendant = { - txid: arr[i + 1].txid, - fee: arr[i + 1].fee, - weight: arr[i + 1].weight, + mempoolTx.bestDescendant = { + txid: arr[arr.length - 1].txid, + fee: arr[arr.length - 1].fee, + weight: arr[arr.length - 1].weight, }; } - transactions.push(tx); - blockSize += tx.size; + transactions.push(ancestor); + blockSize += ancestor.size; } }); - // remove these as valid package ancestors for any remaining descendants - if (!simpleMode) { + // remove these as valid package ancestors for any descendants remaining in the mempool + if (sortedTxSet.length) { sortedTxSet.forEach(tx => { - anyModified = this.updateDescendants(tx, tx, mempool, txSets, modified); + this.updateDescendants(tx, auditPool, modified); }); } failures = 0; } else { // hold this package in an overflow list while we check for smaller options - txSets[nextTx.txid].modified = true; overflow.push(nextTx); failures++; } } // this block is full - const outOfTransactions = !mempoolArray.length && !modified.length; const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000); - const exceededSimpleTries = failures > 0 && simpleMode; - if (outOfTransactions || exceededPackageTries || exceededSimpleTries) { + if (exceededPackageTries && (!condenseRest || blocks.length < blockLimit - 1)) { // construct this block - blocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, blocks.length)); + if (transactions.length) { + blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length)); + } // reset for the next block transactions = []; blockSize = 0; blockWeight = 4000; // 'overflow' packages didn't fit in this block, but are valid candidates for the next - if (overflow.length) { - modified = modified.concat(overflow); - overflow = []; - anyModified = true; + for (const overflowTx of overflow.reverse()) { + if (overflowTx.modified) { + overflowTx.modifiedNode = modified.add(overflowTx); + } else { + top--; + mempoolArray[top] = overflowTx; + } } - } - - // re-sort modified list if necessary - if (anyModified) { - modified = modified.filter(tx => txSets[tx.txid]?.available).sort(byAncestor); + overflow = []; } } + if (condenseRest) { + // pack any leftover transactions into the last block + for (const tx of overflow) { + if (!tx || tx?.used) { + continue; + } + blockWeight += tx.weight; + blockSize += tx.size; + transactions.push(tx); + tx.used = true; + } + const blockTransactions = transactions.map(t => mempool[t.txid]) + restOfArray.forEach(tx => { + blockWeight += tx.weight; + blockSize += tx.size; + blockTransactions.push(tx); + }); + if (blockTransactions.length) { + blocks.push(this.dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length)); + } + transactions = []; + } else if (transactions.length) { + blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length)); + } - const end = new Date().getTime(); + const end = Date.now(); const time = end - start; logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds'); return blocks; } - private sortByAncestorScore(a, b, txSets): number { - return txSets[b.txid]?.score - txSets[a.txid]?.score; + // traverse in-mempool ancestors + // recursion unavoidable, but should be limited to depth < 25 by mempool policy + public setRelatives( + tx: AuditTransaction, + mempool: { [txid: string]: AuditTransaction }, + ): void { + for (const parent of tx.vin) { + const parentTx = mempool[parent.txid]; + if (parentTx && !tx.ancestorMap!.has(parent.txid)) { + tx.ancestorMap.set(parent.txid, parentTx); + parentTx.children.add(tx); + // visit each node only once + if (!parentTx.relativesSet) { + this.setRelatives(parentTx, mempool); + } + parentTx.ancestorMap.forEach((ancestor) => { + tx.ancestorMap.set(ancestor.txid, ancestor); + }); + } + }; + tx.ancestorFee = tx.fee || 0; + tx.ancestorWeight = tx.weight || 0; + tx.ancestorMap.forEach((ancestor) => { + tx.ancestorFee += ancestor.fee; + tx.ancestorWeight += ancestor.weight; + }); + tx.score = tx.ancestorFee / (tx.ancestorWeight || 1); + tx.relativesSet = true; } - private setRelatives(tx: TransactionExtended, mempool: { [txid: string]: TransactionExtended }, txSets: { [txid: string]: TransactionSet }): { [txid: string]: Ancestor } { - let ancestors: { [txid: string]: Ancestor } = {}; - tx.vin.forEach((parent) => { - const parentTx = mempool[parent.txid]; - const parentTxSet = txSets[parent.txid]; - if (parentTx && parentTxSet) { - ancestors[parentTx.txid] = parentTx; - if (!parentTxSet.children) { - parentTxSet.children = [tx.txid]; - } else { - parentTxSet.children.push(tx.txid); - } - if (!parentTxSet.score) { - ancestors = { - ...ancestors, - ...this.setRelatives(parentTx, mempool, txSets), - }; - } + // iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score + // avoids recursion to limit call stack depth + private updateDescendants( + rootTx: AuditTransaction, + mempool: { [txid: string]: AuditTransaction }, + modified: PairingHeap, + ): void { + const descendantSet: Set = new Set(); + // stack of nodes left to visit + const descendants: AuditTransaction[] = []; + let descendantTx; + let ancestorIndex; + let tmpScore; + rootTx.children.forEach(childTx => { + if (!descendantSet.has(childTx)) { + descendants.push(childTx); + descendantSet.add(childTx); } }); - tx.ancestors = Object.values(ancestors).map(ancestor => { - return { - txid: ancestor.txid, - fee: ancestor.fee, - weight: ancestor.weight - }; - }); - let totalFees = tx.fee; - let totalWeight = tx.weight; - tx.ancestors.forEach(ancestor => { - totalFees += ancestor.fee; - totalWeight += ancestor.weight; - }); - txSets[tx.txid].fee = totalFees; - txSets[tx.txid].weight = totalWeight; - txSets[tx.txid].score = this.calcAncestorScore(tx, totalFees, totalWeight); + while (descendants.length) { + descendantTx = descendants.pop(); + if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) { + // remove tx as ancestor + descendantTx.ancestorMap.delete(rootTx.txid); + descendantTx.ancestorFee -= rootTx.fee; + descendantTx.ancestorWeight -= rootTx.weight; + tmpScore = descendantTx.score; + descendantTx.score = descendantTx.ancestorFee / descendantTx.ancestorWeight; - return ancestors; - } - - private calcAncestorScore(tx: TransactionExtended, ancestorFees: number, ancestorWeight: number): number { - return Math.min(tx.fee / tx.weight, ancestorFees / ancestorWeight); - } - - // walk over remaining descendants, removing the root as a valid ancestor & updating the ancestor score - // returns whether any descendants were modified - private updateDescendants( - root: TransactionExtended, - tx: TransactionExtended, - mempool: { [txid: string]: TransactionExtended }, - txSets: { [txid: string]: TransactionSet }, - modified: TransactionExtended[], - ): boolean { - let anyModified = false; - const txSet = txSets[tx.txid]; - if (txSet.children) { - txSet.children.forEach(childId => { - const child = mempool[childId]; - if (child && child.ancestors) { - const ancestorIndex = child.ancestors.findIndex(a => a.txid === root.txid); - if (ancestorIndex > -1) { - // remove tx as ancestor - child.ancestors.splice(ancestorIndex, 1); - const childTxSet = txSets[childId]; - childTxSet.fee -= root.fee; - childTxSet.weight -= root.weight; - childTxSet.score = this.calcAncestorScore(child, childTxSet.fee, childTxSet.weight); - anyModified = true; - - if (!childTxSet.modified) { - childTxSet.modified = true; - modified.push(child); - } + if (!descendantTx.modifiedNode) { + descendantTx.modified = true; + descendantTx.modifiedNode = modified.add(descendantTx); + } else { + // rebalance modified heap if score has changed + if (descendantTx.score < tmpScore) { + modified.decreasePriority(descendantTx.modifiedNode); + } else if (descendantTx.score > tmpScore) { + modified.increasePriority(descendantTx.modifiedNode); } } - // recursively update grandchildren - if (child) { - anyModified = this.updateDescendants(root, child, mempool, txSets, modified) || anyModified; - } - }); + + // add this node's children to the stack + descendantTx.children.forEach(childTx => { + // visit each node only once + if (!descendantSet.has(childTx)) { + descendants.push(childTx); + descendantSet.add(childTx); + } + }); + } } - return anyModified; } private dataToMempoolBlocks(transactions: TransactionExtended[], diff --git a/backend/src/api/websocket-handler.ts b/backend/src/api/websocket-handler.ts index c3d4dad9b..9daad3161 100644 --- a/backend/src/api/websocket-handler.ts +++ b/backend/src/api/websocket-handler.ts @@ -250,6 +250,8 @@ class WebsocketHandler { throw new Error('WebSocket.Server is not set'); } + logger.debug("mempool changed!"); + mempoolBlocks.updateMempoolBlocks(newMempool); const mBlocks = mempoolBlocks.getMempoolBlocks(); const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas(); @@ -417,7 +419,7 @@ class WebsocketHandler { const _memPool = memPool.getMempool(); const mempoolCopy = cloneMempool(_memPool); - const projectedBlocks = mempoolBlocks.makeBlockTemplates(mempoolCopy, 2, 2); + const projectedBlocks = mempoolBlocks.makeBlockTemplates(mempoolCopy, 2); if (projectedBlocks[0]) { const { censored, added, score } = Audit.auditBlock(block, txIds, transactions, projectedBlocks, mempoolCopy); diff --git a/backend/src/mempool.interfaces.ts b/backend/src/mempool.interfaces.ts index fec26c0f3..32d87f3dc 100644 --- a/backend/src/mempool.interfaces.ts +++ b/backend/src/mempool.interfaces.ts @@ -1,4 +1,5 @@ import { IEsploraApi } from './api/bitcoin/esplora-api.interface'; +import { HeapNode } from "./utils/pairing-heap"; export interface PoolTag { id: number; // mysql row id @@ -70,6 +71,24 @@ export interface TransactionExtended extends IEsploraApi.Transaction { deleteAfter?: number; } +export interface AuditTransaction { + txid: string; + fee: number; + size: number; + weight: number; + feePerVsize: number; + vin: IEsploraApi.Vin[]; + relativesSet: boolean; + ancestorMap: Map; + children: Set; + ancestorFee: number; + ancestorWeight: number; + score: number; + used: boolean; + modified: boolean; + modifiedNode: HeapNode; +} + export interface Ancestor { txid: string; weight: number; @@ -80,9 +99,10 @@ export interface TransactionSet { fee: number; weight: number; score: number; - children?: string[]; + children?: Set; available?: boolean; modified?: boolean; + modifiedNode?: HeapNode; } interface BestDescendant { diff --git a/backend/src/utils/pairing-heap.ts b/backend/src/utils/pairing-heap.ts new file mode 100644 index 000000000..876e056c4 --- /dev/null +++ b/backend/src/utils/pairing-heap.ts @@ -0,0 +1,174 @@ +export type HeapNode = { + element: T + child?: HeapNode + next?: HeapNode + prev?: HeapNode +} | null | undefined; + +// minimal pairing heap priority queue implementation +export class PairingHeap { + private root: HeapNode = null; + private comparator: (a: T, b: T) => boolean; + + // comparator function should return 'true' if a is higher priority than b + constructor(comparator: (a: T, b: T) => boolean) { + this.comparator = comparator; + } + + isEmpty(): boolean { + return !this.root; + } + + add(element: T): HeapNode { + const node: HeapNode = { + element + }; + + this.root = this.meld(this.root, node); + + return node; + } + + // returns the top priority element without modifying the queue + peek(): T | void { + return this.root?.element; + } + + // removes and returns the top priority element + pop(): T | void { + let element; + if (this.root) { + const node = this.root; + element = node.element; + this.root = this.mergePairs(node.child); + } + return element; + } + + deleteNode(node: HeapNode): void { + if (!node) { + return; + } + + if (node === this.root) { + this.root = this.mergePairs(node.child); + } + else { + if (node.prev) { + if (node.prev.child === node) { + node.prev.child = node.next; + } + else { + node.prev.next = node.next; + } + } + if (node.next) { + node.next.prev = node.prev; + } + this.root = this.meld(this.root, this.mergePairs(node.child)); + } + + node.child = null; + node.prev = null; + node.next = null; + } + + // fix the heap after increasing the priority of a given node + increasePriority(node: HeapNode): void { + // already the top priority element + if (!node || node === this.root) { + return; + } + // extract from siblings + if (node.prev) { + if (node.prev?.child === node) { + if (this.comparator(node.prev.element, node.element)) { + // already in a valid position + return; + } + node.prev.child = node.next; + } + else { + node.prev.next = node.next; + } + } + if (node.next) { + node.next.prev = node.prev; + } + + this.root = this.meld(this.root, node); + } + + decreasePriority(node: HeapNode): void { + this.deleteNode(node); + this.root = this.meld(this.root, node); + } + + meld(a: HeapNode, b: HeapNode): HeapNode { + if (!a) { + return b; + } + if (!b || a === b) { + return a; + } + + let parent: HeapNode = b; + let child: HeapNode = a; + if (this.comparator(a.element, b.element)) { + parent = a; + child = b; + } + + child.next = parent.child; + if (parent.child) { + parent.child.prev = child; + } + child.prev = parent; + parent.child = child; + + parent.next = null; + parent.prev = null; + + return parent; + } + + mergePairs(node: HeapNode): HeapNode { + if (!node) { + return null; + } + + let current: HeapNode = node; + let next: HeapNode; + let nextCurrent: HeapNode; + let pairs: HeapNode; + let melded: HeapNode; + while (current) { + next = current.next; + if (next) { + nextCurrent = next.next; + melded = this.meld(current, next); + if (melded) { + melded.prev = pairs; + } + pairs = melded; + } + else { + nextCurrent = null; + current.prev = pairs; + pairs = current; + break; + } + current = nextCurrent; + } + + melded = null; + let prev: HeapNode; + while (pairs) { + prev = pairs.prev; + melded = this.meld(melded, pairs); + pairs = prev; + } + + return melded; + } +} \ No newline at end of file