Compare commits

..

3 Commits

Author SHA1 Message Date
junderw
ebf43bd074 Use p-limit to limit concurrent requests 2023-08-18 23:45:03 -07:00
Mononaut
e4fcadf39b More verbose comments on $getMempoolTransactionsExtended 2023-08-19 04:47:19 +09:00
Mononaut
1b2122cd35 Don't overload core with mempool tx requests 2023-08-19 01:02:27 +09:00
23 changed files with 1358 additions and 2734 deletions

View File

@@ -27,8 +27,8 @@ jobs:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install 1.63.x Rust toolchain
uses: dtolnay/rust-toolchain@1.63
- name: Install 1.70.x Rust toolchain
uses: dtolnay/rust-toolchain@1.70
- name: Install
if: ${{ matrix.flavor == 'dev'}}

View File

@@ -6,8 +6,6 @@ authors = ["mononaut"]
edition = "2021"
publish = false
[workspace]
[lib]
crate-type = ["cdylib"]

View File

@@ -335,15 +335,13 @@ fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
let mut total_sigops: u32 = 0;
for ancestor_id in &ancestors {
if let Some(ancestor) = audit_pool
let Some(ancestor) = audit_pool
.get(*ancestor_id as usize)
.expect("audit_pool contains all ancestors")
{
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
} else { todo!() };
.expect("audit_pool contains all ancestors") else { todo!() };
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
}
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {

View File

@@ -29,10 +29,6 @@ import websocketHandler from './websocket-handler';
import redisCache from './redis-cache';
import rbfCache from './rbf-cache';
import { calcBitsDifference } from './difficulty-adjustment';
import os from 'os';
import { Worker } from 'worker_threads';
import path from 'path';
class Blocks {
private blocks: BlockExtended[] = [];
@@ -410,8 +406,6 @@ class Blocks {
return;
}
const workerPool: Worker[] = [];
try {
// Get all indexed block hash
const indexedBlocks = await blocksRepository.$getIndexedBlocks();
@@ -426,67 +420,39 @@ class Blocks {
let newlyIndexed = 0;
let totalIndexed = indexedBlockSummariesHashesArray.length;
let indexedThisRun = 0;
let timer = Date.now();
const startedAt = Date.now();
let timer = Date.now() / 1000;
const startedAt = Date.now() / 1000;
const blocksToIndex = indexedBlocks.filter(block => !indexedBlockSummariesHashes[block.hash]);
if (!blocksToIndex.length) {
return;
}
const numWorkers = Math.max(1, os.cpus().length - 1);
for (let i = 0; i < numWorkers; i++) {
workerPool.push(new Worker(path.resolve(__dirname, '../index-workers/block-summary-worker.js')));
}
const promises: Promise<void>[] = [];
// This function assigns a task to a worker
const assignTask = (worker: Worker): boolean => {
if (blocksToIndex.length === 0) {
return false;
} else {
worker.postMessage(blocksToIndex.shift());
return true;
for (const block of indexedBlocks) {
if (indexedBlockSummariesHashes[block.hash] === true) {
continue;
}
};
const handleResult = (height: number): void => {
// Logging
const elapsedSeconds = (Date.now() / 1000) - timer;
if (elapsedSeconds > 5) {
const runningFor = (Date.now() / 1000) - startedAt;
const blockPerSeconds = indexedThisRun / elapsedSeconds;
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor.toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now() / 1000;
indexedThisRun = 0;
}
if (config.MEMPOOL.BACKEND === 'esplora') {
const txs = (await bitcoinApi.$getTxsForBlock(block.hash)).map(tx => transactionUtils.extendTransaction(tx));
const cpfpSummary = await this.$indexCPFP(block.hash, block.height, txs);
await this.$getStrippedBlockTransactions(block.hash, true, true, cpfpSummary, block.height); // This will index the block summary
} else {
await this.$getStrippedBlockTransactions(block.hash, true, true); // This will index the block summary
}
// Logging
indexedThisRun++;
totalIndexed++;
newlyIndexed++;
const elapsed = Date.now() - timer;
if (elapsed > 5000) {
const runningFor = Date.now() - startedAt;
const blockPerSeconds = indexedThisRun / (elapsed / 1000);
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
logger.debug(`Indexing block summary for #${height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${(runningFor / 1000).toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now();
indexedThisRun = 0;
}
};
// Start a task on each worker
for (const worker of workerPool) {
promises.push(new Promise((resolve, reject) => {
worker.removeAllListeners();
worker.on('message', (result) => {
// Handle the result, then assign a new task to the worker
handleResult(result);
if (!assignTask(worker)) {
resolve();
};
});
worker.on('error', reject);
if (!assignTask(worker)) {
resolve();
}
}));
}
await Promise.all(promises);
if (newlyIndexed > 0) {
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
} else {
@@ -495,14 +461,6 @@ class Blocks {
} catch (e) {
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e;
} finally {
for (const worker of workerPool) {
if (worker) {
// clean up the workers
worker.removeAllListeners();
worker.terminate();
}
}
}
}
@@ -599,7 +557,6 @@ class Blocks {
* [INDEXING] Index all blocks metadata for the mining dashboard
*/
public async $generateBlockDatabase(): Promise<boolean> {
const workerPool: Worker[] = [];
try {
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
let currentBlockHeight = blockchainInfo.blocks;
@@ -618,18 +575,12 @@ class Blocks {
let totalIndexed = await blocksRepository.$blockCountBetweenHeight(currentBlockHeight, lastBlockToIndex);
let indexedThisRun = 0;
let newlyIndexed = 0;
const startedAt = Date.now();
let timer = Date.now();
if (currentBlockHeight >= lastBlockToIndex) {
const numWorkers = Math.max(1, os.cpus().length - 1);
for (let i = 0; i < numWorkers; i++) {
workerPool.push(new Worker(path.resolve(__dirname, '../index-workers/block-worker.js')));
}
}
const startedAt = Date.now() / 1000;
let timer = Date.now() / 1000;
while (currentBlockHeight >= lastBlockToIndex) {
const endBlock = Math.max(0, lastBlockToIndex, currentBlockHeight - chunkSize + 1);
const missingBlockHeights: number[] = await blocksRepository.$getMissingBlocksBetweenHeights(
currentBlockHeight, endBlock);
if (missingBlockHeights.length <= 0) {
@@ -639,65 +590,33 @@ class Blocks {
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`, logger.tags.mining);
const promises: Promise<void>[] = [];
// This function assigns a task to a worker
const assignTask = (worker: Worker): boolean => {
if (missingBlockHeights.length === 0) {
return false;
} else {
worker.postMessage({ height: missingBlockHeights.shift() });
return true;
for (const blockHeight of missingBlockHeights) {
if (blockHeight < lastBlockToIndex) {
break;
}
};
const handleResult = (height: number): void => {
indexedThisRun++;
totalIndexed++;
newlyIndexed++;
const elapsed = Date.now() - timer;
if (elapsed > 5000 || height === lastBlockToIndex) {
const runningFor = Date.now() - startedAt;
const blockPerSeconds = indexedThisRun / (elapsed / 1000);
++indexedThisRun;
++totalIndexed;
const elapsedSeconds = (Date.now() / 1000) - timer;
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
const runningFor = (Date.now() / 1000) - startedAt;
const blockPerSeconds = indexedThisRun / elapsedSeconds;
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
logger.debug(`Indexing block #${height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress.toFixed(2)}%) | elapsed: ${(runningFor / 1000).toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now();
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress.toFixed(2)}%) | elapsed: ${runningFor.toFixed(2)} seconds`, logger.tags.mining);
timer = Date.now() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('block-indexing', progress, false);
}
};
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, null, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
// Start a task on each worker
for (const worker of workerPool) {
promises.push(new Promise((resolve, reject) => {
worker.removeAllListeners();
worker.on('message', (result) => {
// Handle the result, then assign a new task to the worker
handleResult(result);
if (!assignTask(worker)) {
resolve();
};
});
worker.on('error', reject);
if (!assignTask(worker)) {
resolve();
}
}));
newlyIndexed++;
await blocksRepository.$saveBlockInDatabase(blockExtended);
}
await Promise.all(promises);
currentBlockHeight -= chunkSize;
}
for (const worker of workerPool) {
if (worker) {
// clean up the workers
worker.removeAllListeners();
worker.terminate();
}
}
if (newlyIndexed > 0) {
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
} else {
@@ -708,14 +627,6 @@ class Blocks {
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
loadingIndicators.setProgress('block-indexing', 100);
throw e;
} finally {
for (const worker of workerPool) {
if (worker) {
// clean up the workers
worker.removeAllListeners();
worker.terminate();
}
}
}
return await BlocksRepository.$validateChain();

View File

@@ -53,7 +53,7 @@ class RbfCache {
private expiring: Map<string, number> = new Map();
private cacheQueue: CacheEvent[] = [];
public init(): void {
constructor() {
setInterval(this.cleanup.bind(this), 1000 * 60 * 10);
}

View File

@@ -23,21 +23,24 @@ class RedisCache {
private cacheQueue: MempoolTransactionExtended[] = [];
private txFlushLimit: number = 10000;
constructor() {
if (config.REDIS.ENABLED) {
const redisConfig = {
socket: {
path: config.REDIS.UNIX_SOCKET_PATH
},
database: NetworkDB[config.MEMPOOL.NETWORK],
};
this.client = createClient(redisConfig);
this.client.on('error', (e) => {
logger.err(`Error in Redis client: ${e instanceof Error ? e.message : e}`);
});
this.$ensureConnected();
}
}
private async $ensureConnected(): Promise<void> {
if (!this.connected && config.REDIS.ENABLED) {
if (!this.client) {
const redisConfig = {
socket: {
path: config.REDIS.UNIX_SOCKET_PATH
},
database: NetworkDB[config.MEMPOOL.NETWORK],
};
this.client = createClient(redisConfig);
this.client.on('error', (e) => {
logger.err(`Error in Redis client: ${e instanceof Error ? e.message : e}`);
});
}
return this.client.connect().then(async () => {
this.connected = true;
logger.info(`Redis client connected`);

View File

@@ -5,6 +5,7 @@ import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
import * as bitcoinjs from 'bitcoinjs-lib';
import logger from '../logger';
import config from '../config';
import pLimit from '../utils/p-limit';
class TransactionUtils {
constructor() { }
@@ -74,8 +75,12 @@ class TransactionUtils {
public async $getMempoolTransactionsExtended(txids: string[], addPrevouts = false, lazyPrevouts = false, forceCore = false): Promise<MempoolTransactionExtended[]> {
if (forceCore || config.MEMPOOL.BACKEND !== 'esplora') {
const results = await Promise.allSettled(txids.map(txid => this.$getTransactionExtended(txid, addPrevouts, lazyPrevouts, forceCore, true)));
return (results.filter(r => r.status === 'fulfilled') as PromiseFulfilledResult<MempoolTransactionExtended>[]).map(r => r.value);
const limiter = pLimit(32); // Run 32 requests at a time
const results = await Promise.allSettled(txids.map(
txid => limiter(() => this.$getMempoolTransactionExtended(txid, addPrevouts, lazyPrevouts, forceCore))
));
return results.filter(reply => reply.status === 'fulfilled')
.map(r => (r as PromiseFulfilledResult<MempoolTransactionExtended>).value);
} else {
const transactions = await bitcoinApi.$getMempoolTransactions(txids);
return transactions.map(transaction => {

View File

@@ -1,33 +0,0 @@
import { parentPort } from 'worker_threads';
import bitcoinApi from '../api/bitcoin/bitcoin-api-factory';
import blocks from '../api/blocks';
import config from '../config';
import transactionUtils from '../api/transaction-utils';
import bitcoinClient from '../api/bitcoin/bitcoin-client';
if (parentPort) {
parentPort.on('message', async ({ hash, height }) => {
if (hash != null && height != null) {
await indexBlockSummary(hash, height);
}
if (parentPort) {
parentPort.postMessage(height);
}
});
}
async function indexBlockSummary(hash: string, height: number): Promise<void> {
const block = await bitcoinClient.getBlock(hash, 2);
const txs = block.tx.map(tx => {
tx.fee = Math.round(tx.fee * 100_000_000);
tx.vout.forEach((vout) => {
vout.value = Math.round(vout.value * 100000000);
});
tx.vsize = Math.round(tx.weight / 4); // required for backwards compatibility
return tx;
});
const cpfpSummary = await blocks.$indexCPFP(hash, height, txs);
await blocks.$getStrippedBlockTransactions(hash, true, true, cpfpSummary, height); // This will index the block summary
}

View File

@@ -1,25 +0,0 @@
import { parentPort } from 'worker_threads';
import bitcoinApi from '../api/bitcoin/bitcoin-api-factory';
import blocksRepository from '../repositories/BlocksRepository';
import blocks from '../api/blocks';
import { IEsploraApi } from '../api/bitcoin/esplora-api.interface';
if (parentPort) {
parentPort.on('message', async (params) => {
if (params.height != null) {
await indexBlock(params.height);
}
if (parentPort) {
parentPort.postMessage(params.height);
}
});
}
async function indexBlock(blockHeight: number): Promise<void> {
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block: IEsploraApi.Block = await bitcoinApi.$getBlock(blockHash);
const transactions = await blocks['$getTransactionsExtended'](blockHash, block.height, true, null, true);
const blockExtended = await blocks['$getBlockExtended'](block, transactions);
await blocksRepository.$saveBlockInDatabase(blockExtended);
}

View File

@@ -43,7 +43,6 @@ import { AxiosError } from 'axios';
import v8 from 'v8';
import { formatBytes, getBytesUnit } from './utils/format';
import redisCache from './api/redis-cache';
import rbfCache from './api/rbf-cache';
class Server {
private wss: WebSocket.Server | undefined;
@@ -108,8 +107,6 @@ class Server {
}
}
rbfCache.init();
this.app
.use((req: Request, res: Response, next: NextFunction) => {
res.setHeader('Access-Control-Allow-Origin', '*');

View File

@@ -0,0 +1,179 @@
/*
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
How it works:
`this._head` is an instance of `Node` which keeps track of its current value and nests
another instance of `Node` that keeps the value that comes after it. When a value is
provided to `.enqueue()`, the code needs to iterate through `this._head`, going deeper
and deeper to find the last value. However, iterating through every single item is slow.
This problem is solved by saving a reference to the last value as `this._tail` so that
it can reference it to add a new value.
*/
class Node {
value;
next;
constructor(value) {
this.value = value;
}
}
class Queue {
private _head;
private _tail;
private _size;
constructor() {
this.clear();
}
enqueue(value) {
const node = new Node(value);
if (this._head) {
this._tail.next = node;
this._tail = node;
} else {
this._head = node;
this._tail = node;
}
this._size++;
}
dequeue() {
const current = this._head;
if (!current) {
return;
}
this._head = this._head.next;
this._size--;
return current.value;
}
clear() {
this._head = undefined;
this._tail = undefined;
this._size = 0;
}
get size() {
return this._size;
}
*[Symbol.iterator]() {
let current = this._head;
while (current) {
yield current.value;
current = current.next;
}
}
}
interface LimitFunction {
readonly activeCount: number;
readonly pendingCount: number;
clearQueue: () => void;
<Arguments extends unknown[], ReturnType>(
fn: (...args: Arguments) => PromiseLike<ReturnType> | ReturnType,
...args: Arguments
): Promise<ReturnType>;
}
export default function pLimit(concurrency: number): LimitFunction {
if (
!(
(Number.isInteger(concurrency) ||
concurrency === Number.POSITIVE_INFINITY) &&
concurrency > 0
)
) {
throw new TypeError('Expected `concurrency` to be a number from 1 and up');
}
const queue = new Queue();
let activeCount = 0;
const next = () => {
activeCount--;
if (queue.size > 0) {
queue.dequeue()();
}
};
const run = async (fn, resolve, args) => {
activeCount++;
const result = (async () => fn(...args))();
resolve(result);
try {
await result;
} catch {}
next();
};
const enqueue = (fn, resolve, args) => {
queue.enqueue(run.bind(undefined, fn, resolve, args));
(async () => {
// This function needs to wait until the next microtask before comparing
// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
// when the run function is dequeued and called. The comparison in the if-statement
// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
await Promise.resolve();
if (activeCount < concurrency && queue.size > 0) {
queue.dequeue()();
}
})();
};
const generator = (fn, ...args) =>
new Promise((resolve) => {
enqueue(fn, resolve, args);
});
Object.defineProperties(generator, {
activeCount: {
get: () => activeCount,
},
pendingCount: {
get: () => queue.size,
},
clearQueue: {
value: () => {
queue.clear();
},
},
});
return generator as any;
}

View File

@@ -1,3 +0,0 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file with sha256 hash c80c5ee4c71c5a76a1f6cd35339bd0c45b25b491933ea7b02a66470e9f43a6fd.
Signed: TheBlueMatt

File diff suppressed because it is too large Load Diff

View File

@@ -61,18 +61,18 @@
"cypress:run:ci:staging": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 && npm run generate-config && start-server-and-test serve:local-staging 4200 cypress:run:record"
},
"dependencies": {
"@angular-devkit/build-angular": "^16.2.0",
"@angular/animations": "^16.2.2",
"@angular/cli": "^16.2.0",
"@angular/common": "^16.2.2",
"@angular/compiler": "^16.2.2",
"@angular/core": "^16.2.2",
"@angular/forms": "^16.2.2",
"@angular/localize": "^16.2.2",
"@angular/platform-browser": "^16.2.2",
"@angular/platform-browser-dynamic": "^16.2.2",
"@angular/platform-server": "^16.2.2",
"@angular/router": "^16.2.2",
"@angular-devkit/build-angular": "^16.1.4",
"@angular/animations": "^16.1.5",
"@angular/cli": "^16.1.4",
"@angular/common": "^16.1.5",
"@angular/compiler": "^16.1.5",
"@angular/core": "^16.1.5",
"@angular/forms": "^16.1.5",
"@angular/localize": "^16.1.5",
"@angular/platform-browser": "^16.1.5",
"@angular/platform-browser-dynamic": "^16.1.5",
"@angular/platform-server": "^16.1.5",
"@angular/router": "^16.1.5",
"@fortawesome/angular-fontawesome": "~0.13.0",
"@fortawesome/fontawesome-common-types": "~6.4.0",
"@fortawesome/fontawesome-svg-core": "~6.4.0",

View File

@@ -70,11 +70,9 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
this.canvas.nativeElement.addEventListener('webglcontextlost', this.handleContextLost, false);
this.canvas.nativeElement.addEventListener('webglcontextrestored', this.handleContextRestored, false);
this.gl = this.canvas.nativeElement.getContext('webgl');
this.initCanvas();
if (this.gl) {
this.initCanvas();
this.resizeCanvas();
}
this.resizeCanvas();
}
ngOnChanges(changes): void {
@@ -197,16 +195,10 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
cancelAnimationFrame(this.animationFrameRequest);
this.animationFrameRequest = null;
this.running = false;
this.gl = null;
}
handleContextRestored(event): void {
if (this.canvas?.nativeElement) {
this.gl = this.canvas.nativeElement.getContext('webgl');
if (this.gl) {
this.initCanvas();
}
}
this.initCanvas();
}
@HostListener('window:resize', ['$event'])
@@ -232,9 +224,6 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
}
compileShader(src, type): WebGLShader {
if (!this.gl) {
return;
}
const shader = this.gl.createShader(type);
this.gl.shaderSource(shader, src);
@@ -248,9 +237,6 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
}
buildShaderProgram(shaderInfo): WebGLProgram {
if (!this.gl) {
return;
}
const program = this.gl.createProgram();
shaderInfo.forEach((desc) => {
@@ -287,7 +273,7 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
now = performance.now();
}
// skip re-render if there's no change to the scene
if (this.scene && this.gl) {
if (this.scene) {
/* SET UP SHADER UNIFORMS */
// screen dimensions
this.gl.uniform2f(this.gl.getUniformLocation(this.shaderProgram, 'screenSize'), this.displayWidth, this.displayHeight);

View File

@@ -90,7 +90,7 @@ export const download = (href, name) => {
export function detectWebGL(): boolean {
const canvas = document.createElement('canvas');
const gl = canvas.getContext('webgl');
const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
return !!(gl && gl instanceof WebGLRenderingContext);
}

View File

@@ -530,7 +530,6 @@ osCertbotDryRun()
zfsCreateFilesystems()
{
zfs create -o "mountpoint=/backup" "${ZPOOL}/backup"
zfs create -o "mountpoint=/var/cache/nginx" "${ZPOOL}/cache"
zfs create -o "mountpoint=${ELEMENTS_HOME}" "${ZPOOL}/elements"
zfs create -o "mountpoint=${BITCOIN_HOME}" "${ZPOOL}/bitcoin"
@@ -1853,6 +1852,8 @@ chown "${MEMPOOL_USER}:${MEMPOOL_GROUP}" "${MEMPOOL_MYSQL_CREDENTIALS}"
echo "[*] Adding Nginx configuration"
osSudo "${ROOT_USER}" install -c -o "${ROOT_USER}" -g "${ROOT_GROUP}" -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/nginx/nginx.conf" "${NGINX_CONFIGURATION}"
mkdir -p /var/cache/nginx/services /var/cache/nginx/api
chown "${NGINX_USER}:${NGINX_GROUP}" /var/cache/nginx/services /var/cache/nginx/api
ln -s "${MEMPOOL_HOME}/mempool" "${NGINX_ETC_FOLDER}/mempool"
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_USER__!${NGINX_USER}!" "${NGINX_CONFIGURATION}"
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_ETC_FOLDER__!${NGINX_ETC_FOLDER}!" "${NGINX_CONFIGURATION}"

View File

@@ -1,7 +1,7 @@
# proxy cache
proxy_cache_path /var/cache/nginx/api keys_zone=api:20m levels=1:2 inactive=365d max_size=2000m;
proxy_cache_path /var/cache/nginx/unfurler keys_zone=unfurler:20m levels=1:2 inactive=365d max_size=2000m;
proxy_cache_path /var/cache/nginx/slurper keys_zone=slurper:20m levels=1:2 inactive=365d max_size=5000m;
proxy_cache_path /var/cache/nginx/services keys_zone=services:20m levels=1:2 inactive=365d max_size=100m;
proxy_cache_path /var/cache/nginx/markets keys_zone=markets:20m levels=1:2 inactive=365d max_size=100m;
types_hash_max_size 4096;
proxy_cache_path /var/cache/nginx/api keys_zone=api:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/services keys_zone=services:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/markets keys_zone=markets:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/unfurler keys_zone=unfurler:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/slurper keys_zone=slurper:20m levels=1:2 inactive=600s max_size=200m;
types_hash_max_size 2048;

View File

@@ -97,14 +97,6 @@ location ~* ^/.+\..+\.(js|css)$ {
expires 1y;
}
# old stuff is gone
location /explorer/ {
return 410;
}
location /sitemap/ {
return 410;
}
# unfurl preview
location /preview {
try_files /$lang/$uri $uri /en-US/$uri /en-US/index.html =404;
@@ -113,6 +105,7 @@ location /preview {
# unfurl renderer
location ^~ /render {
try_files /dev/null @mempool-space-unfurler;
expires 10m;
}
# unfurl handler
location /unfurl/ {
@@ -143,10 +136,8 @@ location @mempool-space-unfurler {
proxy_cache_background_update on;
proxy_cache_use_stale updating;
proxy_cache unfurler;
proxy_cache_valid 200 1h; # will re-render page if older than this
proxy_cache_valid 200 10m;
proxy_redirect off;
expires 1d;
}
location @mempool-space-slurper {
@@ -160,8 +151,6 @@ location @mempool-space-slurper {
proxy_cache_background_update on;
proxy_cache_use_stale updating;
proxy_cache slurper;
proxy_cache_valid 200 1h; # will re-render page if older than this
proxy_cache_valid 200 10m;
proxy_redirect off;
expires 10d;
}

View File

@@ -11,13 +11,12 @@ const BROWSER_TIMEOUT = 8000;
const maxAgeMs = (config.PUPPETEER.MAX_PAGE_AGE || (24 * 60 * 60)) * 1000;
const maxConcurrency = config.PUPPETEER.CLUSTER_SIZE;
export interface RepairablePage extends puppeteer.Page {
interface RepairablePage extends puppeteer.Page {
repairRequested?: boolean;
language?: string | null;
createdAt?: number;
free?: boolean;
index?: number;
clusterGroup?: string;
}
interface ResourceData {
@@ -77,7 +76,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
for (let i = 0; i < maxConcurrency; i++) {
const newPage = await this.initPage();
newPage.index = this.pages.length;
logger.info(`initialized page ${newPage.clusterGroup}:${newPage.index}`);
logger.info(`initialized page ${newPage.index}`);
this.pages.push(newPage);
}
}
@@ -88,7 +87,6 @@ export default class ReusablePage extends ConcurrencyImplementation {
protected async initPage(): Promise<RepairablePage> {
const page = await (this.browser as puppeteer.Browser).newPage() as RepairablePage;
page.clusterGroup = 'unfurler';
page.language = null;
page.createdAt = Date.now();
let defaultUrl
@@ -110,7 +108,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
page.waitForSelector('meta[property="og:preview:fail"]', { timeout: config.PUPPETEER.RENDER_TIMEOUT || 3000 }).then(() => false)
])
} catch (e) {
logger.err(`failed to load frontend during page initialization ${page.clusterGroup}:${page.index}: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to load frontend during page initialization: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
@@ -131,7 +129,6 @@ export default class ReusablePage extends ConcurrencyImplementation {
protected async repairPage(page) {
// create a new page
logger.info(`Repairing page ${page.clusterGroup}:${page.index}`);
const newPage = await this.initPage();
newPage.free = true;
// replace the old page
@@ -141,10 +138,9 @@ export default class ReusablePage extends ConcurrencyImplementation {
try {
await page.goto('about:blank', {timeout: 200}); // prevents memory leak (maybe?)
} catch (e) {
logger.err(`unexpected page repair error ${page.clusterGroup}:${page.index}`);
} finally {
await page.close();
logger.err('unexpected page repair error');
}
await page.close();
return newPage;
}

View File

@@ -2,11 +2,19 @@ import * as puppeteer from 'puppeteer';
import { timeoutExecute } from 'puppeteer-cluster/dist/util';
import logger from '../logger';
import config from '../config';
import ReusablePage, { RepairablePage } from './ReusablePage';
import ReusablePage from './ReusablePage';
const mempoolHost = config.MEMPOOL.HTTP_HOST + (config.MEMPOOL.HTTP_PORT ? ':' + config.MEMPOOL.HTTP_PORT : '');
const mockImageBuffer = Buffer.from("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVQYV2NgYAAAAAMAAWgmWQ0AAAAASUVORK5CYII=", 'base64');
interface RepairablePage extends puppeteer.Page {
repairRequested?: boolean;
language?: string | null;
createdAt?: number;
free?: boolean;
index?: number;
}
export default class ReusableSSRPage extends ReusablePage {
public constructor(options: puppeteer.LaunchOptions, puppeteer: any) {
@@ -19,37 +27,36 @@ export default class ReusableSSRPage extends ReusablePage {
protected async initPage(): Promise<RepairablePage> {
const page = await (this.browser as puppeteer.Browser).newPage() as RepairablePage;
page.clusterGroup = 'slurper';
page.language = null;
page.createdAt = Date.now();
const defaultUrl = mempoolHost + '/preview/block/1';
const defaultUrl = mempoolHost + '/about';
page.on('pageerror', (err) => {
console.log(err);
page.repairRequested = true;
// page.repairRequested = true;
});
await page.setRequestInterception(true);
page.on('request', req => {
if (req.isInterceptResolutionHandled()) {
return req.continue();
}
if (req.resourceType() === 'image') {
return req.respond({
contentType: 'image/png',
headers: {"Access-Control-Allow-Origin": "*"},
body: mockImageBuffer
});
} else if (req.resourceType() === 'media') {
return req.abort();
} else {
return req.continue();
}
});
page.on('request', req => {
if (req.isInterceptResolutionHandled()) {
return req.continue();
}
if (req.resourceType() === 'image') {
return req.respond({
contentType: 'image/png',
headers: {"Access-Control-Allow-Origin": "*"},
body: mockImageBuffer
});
} else if (!['document', 'script', 'xhr', 'fetch'].includes(req.resourceType())) {
return req.abort();
} else {
return req.continue();
}
});
try {
await page.goto(defaultUrl, { waitUntil: "networkidle0" });
await page.waitForSelector('meta[property="og:meta:ready"]', { timeout: config.PUPPETEER.RENDER_TIMEOUT || 3000 });
} catch (e) {
logger.err(`failed to load frontend during ssr page initialization ${page.clusterGroup}:${page.index}: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to load frontend during ssr page initialization: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
page.free = true;

View File

@@ -5,7 +5,7 @@ import * as https from 'https';
import config from './config';
import { Cluster } from 'puppeteer-cluster';
import ReusablePage from './concurrency/ReusablePage';
import ReusableSSRPage from './concurrency/ReusableSSRPage';
import ReusableSSRPage from './concurrency/ReusablePage';
import { parseLanguageUrl } from './language/lang';
import { matchRoute } from './routes';
import nodejsPath from 'path';
@@ -28,18 +28,13 @@ class Server {
mempoolUrl: URL;
network: string;
secureHost = true;
secureMempoolHost = true;
canonicalHost: string;
seoQueueLength: number = 0;
unfurlQueueLength: number = 0;
constructor() {
this.app = express();
this.mempoolHost = config.MEMPOOL.HTTP_HOST + (config.MEMPOOL.HTTP_PORT ? ':' + config.MEMPOOL.HTTP_PORT : '');
this.mempoolUrl = new URL(this.mempoolHost);
this.secureHost = config.SERVER.HOST.startsWith('https');
this.secureMempoolHost = config.MEMPOOL.HTTP_HOST.startsWith('https');
this.network = config.MEMPOOL.NETWORK || 'bitcoin';
let canonical;
@@ -125,10 +120,8 @@ class Server {
this.app.get('*', (req, res) => { return this.renderHTML(req, res, false) })
}
async clusterTask({ page, data: { url, path, action, reqUrl } }) {
const start = Date.now();
async clusterTask({ page, data: { url, path, action } }) {
try {
logger.info(`rendering "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
const urlParts = parseLanguageUrl(path);
if (page.language !== urlParts.lang) {
// switch language
@@ -161,30 +154,27 @@ class Server {
captureBeyondViewport: false,
clip: { width: 1200, height: 600, x: 0, y: 0, scale: 1 },
});
logger.info(`rendered unfurl img in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return screenshot;
} else if (success === false) {
logger.warn(`failed to render ${reqUrl} for ${action} due to client-side error, e.g. requested an invalid txid`);
logger.warn(`failed to render ${path} for ${action} due to client-side error, e.g. requested an invalid txid`);
page.repairRequested = true;
} else {
logger.warn(`failed to render ${reqUrl} for ${action} due to puppeteer timeout`);
logger.warn(`failed to render ${path} for ${action} due to puppeteer timeout`);
page.repairRequested = true;
}
} catch (e) {
logger.err(`failed to render ${reqUrl} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to render ${path} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
async ssrClusterTask({ page, data: { url, path, action, reqUrl } }) {
const start = Date.now();
async ssrClusterTask({ page, data: { url, path, action } }) {
try {
logger.info(`slurping "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
const urlParts = parseLanguageUrl(path);
if (page.language !== urlParts.lang) {
// switch language
page.language = urlParts.lang;
const localizedUrl = urlParts.lang ? `${this.mempoolHost}/${urlParts.lang}${urlParts.path}` : `${this.mempoolHost}${urlParts.path}`;
const localizedUrl = urlParts.lang ? `${this.mempoolHost}/${urlParts.lang}${urlParts.path}` : `${this.mempoolHost}${urlParts.path}` ;
await page.goto(localizedUrl, { waitUntil: "load" });
} else {
const loaded = await page.evaluate(async (path) => {
@@ -207,20 +197,17 @@ class Server {
return !!window['soft404'];
});
if (is404) {
logger.info(`slurp 404 in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return '404';
} else {
let html = await page.content();
logger.info(`rendered slurp in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return html;
}
} catch (e) {
if (e instanceof TimeoutError) {
let html = await page.content();
logger.info(`rendered partial slurp in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return html;
} else {
logger.err(`failed to render ${reqUrl} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to render ${path} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
@@ -232,8 +219,6 @@ class Server {
async renderPreview(req, res) {
try {
this.unfurlQueueLength++;
const start = Date.now();
const rawPath = req.params[0];
let img = null;
@@ -243,15 +228,12 @@ class Server {
// don't bother unless the route is definitely renderable
if (rawPath.includes('/preview/') && matchedRoute.render) {
img = await this.cluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'screenshot', reqUrl: req.url });
logger.info(`unfurl returned "${req.url}" in ${Date.now() - start}ms | ${this.unfurlQueueLength - 1} tasks in queue`);
} else {
logger.info('rendering not enabled for page "' + req.url + '"');
img = await this.cluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'screenshot' });
}
if (!img) {
// send local fallback image file
res.sendFile(nodejsPath.join(__dirname, matchedRoute.fallbackFile));
// proxy fallback image from the frontend
res.sendFile(nodejsPath.join(__dirname, matchedRoute.fallbackImg));
} else {
res.contentType('image/png');
res.send(img);
@@ -259,8 +241,6 @@ class Server {
} catch (e) {
logger.err(e instanceof Error ? e.message : `${e} ${req.params[0]}`);
res.status(500).send(e instanceof Error ? e.message : e);
} finally {
this.unfurlQueueLength--;
}
}
@@ -278,17 +258,10 @@ class Server {
res.status(404).send();
return;
} else {
logger.info('proxying resource "' + req.url + '"');
if (this.secureMempoolHost) {
https.get(this.mempoolHost + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => {
res.writeHead(got.statusCode, got.headers);
return got.pipe(res);
});
if (this.secureHost) {
https.get(config.SERVER.HOST + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => got.pipe(res));
} else {
http.get(this.mempoolHost + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => {
res.writeHead(got.statusCode, got.headers);
return got.pipe(res);
});
http.get(config.SERVER.HOST + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => got.pipe(res));
}
return;
}
@@ -297,13 +270,9 @@ class Server {
let result = '';
try {
if (unfurl) {
logger.info('unfurling "' + req.url + '"');
result = await this.renderUnfurlMeta(rawPath);
} else {
this.seoQueueLength++;
const start = Date.now();
result = await this.renderSEOPage(rawPath, req.url);
logger.info(`slurp returned "${req.url}" in ${Date.now() - start}ms | ${this.seoQueueLength - 1} tasks in queue`);
result = await this.renderSEOPage(rawPath);
}
if (result && result.length) {
if (result === '404') {
@@ -317,10 +286,6 @@ class Server {
} catch (e) {
logger.err(e instanceof Error ? e.message : `${e} ${req.params[0]}`);
res.status(500).send(e instanceof Error ? e.message : e);
} finally {
if (!unfurl) {
this.seoQueueLength--;
}
}
}
@@ -361,8 +326,8 @@ class Server {
</html>`;
}
async renderSEOPage(rawPath: string, reqUrl: string): Promise<string> {
let html = await this.ssrCluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'ssr', reqUrl });
async renderSEOPage(rawPath: string): Promise<string> {
let html = await this.ssrCluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'ssr' });
// remove javascript to prevent double hydration
if (html && html.length) {
html = html.replaceAll(/<script.*<\/script>/g, "");

View File

@@ -2,7 +2,6 @@ interface Match {
render: boolean;
title: string;
fallbackImg: string;
fallbackFile: string;
staticImg?: string;
networkMode: string;
}
@@ -31,8 +30,7 @@ const routes = {
},
lightning: {
title: "Lightning",
fallbackImg: '/resources/previews/lightning.png',
fallbackFile: '/resources/img/lightning.png',
fallbackImg: '/resources/img/lightning.png',
routes: {
node: {
render: true,
@@ -70,8 +68,7 @@ const routes = {
},
mining: {
title: "Mining",
fallbackImg: '/resources/previews/mining.png',
fallbackFile: '/resources/img/mining.png',
fallbackImg: '/resources/img/mining.png',
routes: {
pool: {
render: true,
@@ -86,15 +83,13 @@ const routes = {
const networks = {
bitcoin: {
fallbackImg: '/resources/previews/dashboard.png',
fallbackFile: '/resources/img/dashboard.png',
fallbackImg: '/resources/img/dashboard.png',
routes: {
...routes // all routes supported
}
},
liquid: {
fallbackImg: '/resources/liquid/liquid-network-preview.png',
fallbackFile: '/resources/img/liquid',
fallbackImg: '/resources/img/liquid.png',
routes: { // only block, address & tx routes supported
block: routes.block,
address: routes.address,
@@ -102,8 +97,7 @@ const networks = {
}
},
bisq: {
fallbackImg: '/resources/bisq/bisq-markets-preview.png',
fallbackFile: '/resources/img/bisq.png',
fallbackImg: '/resources/img/bisq.png',
routes: {} // no routes supported
}
};
@@ -113,7 +107,6 @@ export function matchRoute(network: string, path: string): Match {
render: false,
title: '',
fallbackImg: '',
fallbackFile: '',
networkMode: 'mainnet'
}
@@ -128,7 +121,6 @@ export function matchRoute(network: string, path: string): Match {
let route = networks[network] || networks.bitcoin;
match.fallbackImg = route.fallbackImg;
match.fallbackFile = route.fallbackFile;
// traverse the route tree until we run out of route or tree, or hit a renderable match
while (!route.render && route.routes && parts.length && route.routes[parts[0]]) {
@@ -136,7 +128,6 @@ export function matchRoute(network: string, path: string): Match {
parts.shift();
if (route.fallbackImg) {
match.fallbackImg = route.fallbackImg;
match.fallbackFile = route.fallbackFile;
}
}