Merge branch 'master' into mononaut/attitude-adjustment

This commit is contained in:
nymkappa 2023-08-26 11:48:21 +02:00 committed by GitHub
commit b9838fda8d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 2569 additions and 1145 deletions

View File

@ -27,8 +27,8 @@ jobs:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install 1.70.x Rust toolchain
uses: dtolnay/rust-toolchain@1.70
- name: Install 1.63.x Rust toolchain
uses: dtolnay/rust-toolchain@1.63
- name: Install
if: ${{ matrix.flavor == 'dev'}}

View File

@ -6,6 +6,8 @@ authors = ["mononaut"]
edition = "2021"
publish = false
[workspace]
[lib]
crate-type = ["cdylib"]

View File

@ -335,13 +335,15 @@ fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
let mut total_sigops: u32 = 0;
for ancestor_id in &ancestors {
let Some(ancestor) = audit_pool
if let Some(ancestor) = audit_pool
.get(*ancestor_id as usize)
.expect("audit_pool contains all ancestors") else { todo!() };
.expect("audit_pool contains all ancestors")
{
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
} else { todo!() };
}
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {

View File

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file with sha256 hash c80c5ee4c71c5a76a1f6cd35339bd0c45b25b491933ea7b02a66470e9f43a6fd.
Signed: TheBlueMatt

File diff suppressed because it is too large Load Diff

View File

@ -61,18 +61,18 @@
"cypress:run:ci:staging": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 && npm run generate-config && start-server-and-test serve:local-staging 4200 cypress:run:record"
},
"dependencies": {
"@angular-devkit/build-angular": "^16.1.4",
"@angular/animations": "^16.1.5",
"@angular/cli": "^16.1.4",
"@angular/common": "^16.1.5",
"@angular/compiler": "^16.1.5",
"@angular/core": "^16.1.5",
"@angular/forms": "^16.1.5",
"@angular/localize": "^16.1.5",
"@angular/platform-browser": "^16.1.5",
"@angular/platform-browser-dynamic": "^16.1.5",
"@angular/platform-server": "^16.1.5",
"@angular/router": "^16.1.5",
"@angular-devkit/build-angular": "^16.2.0",
"@angular/animations": "^16.2.2",
"@angular/cli": "^16.2.0",
"@angular/common": "^16.2.2",
"@angular/compiler": "^16.2.2",
"@angular/core": "^16.2.2",
"@angular/forms": "^16.2.2",
"@angular/localize": "^16.2.2",
"@angular/platform-browser": "^16.2.2",
"@angular/platform-browser-dynamic": "^16.2.2",
"@angular/platform-server": "^16.2.2",
"@angular/router": "^16.2.2",
"@fortawesome/angular-fontawesome": "~0.13.0",
"@fortawesome/fontawesome-common-types": "~6.4.0",
"@fortawesome/fontawesome-svg-core": "~6.4.0",

View File

@ -70,10 +70,12 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
this.canvas.nativeElement.addEventListener('webglcontextlost', this.handleContextLost, false);
this.canvas.nativeElement.addEventListener('webglcontextrestored', this.handleContextRestored, false);
this.gl = this.canvas.nativeElement.getContext('webgl');
this.initCanvas();
if (this.gl) {
this.initCanvas();
this.resizeCanvas();
}
}
ngOnChanges(changes): void {
if (changes.orientation || changes.flip) {
@ -195,11 +197,17 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
cancelAnimationFrame(this.animationFrameRequest);
this.animationFrameRequest = null;
this.running = false;
this.gl = null;
}
handleContextRestored(event): void {
if (this.canvas?.nativeElement) {
this.gl = this.canvas.nativeElement.getContext('webgl');
if (this.gl) {
this.initCanvas();
}
}
}
@HostListener('window:resize', ['$event'])
resizeCanvas(): void {
@ -224,6 +232,9 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
}
compileShader(src, type): WebGLShader {
if (!this.gl) {
return;
}
const shader = this.gl.createShader(type);
this.gl.shaderSource(shader, src);
@ -237,6 +248,9 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
}
buildShaderProgram(shaderInfo): WebGLProgram {
if (!this.gl) {
return;
}
const program = this.gl.createProgram();
shaderInfo.forEach((desc) => {
@ -273,7 +287,7 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On
now = performance.now();
}
// skip re-render if there's no change to the scene
if (this.scene) {
if (this.scene && this.gl) {
/* SET UP SHADER UNIFORMS */
// screen dimensions
this.gl.uniform2f(this.gl.getUniformLocation(this.shaderProgram, 'screenSize'), this.displayWidth, this.displayHeight);

View File

@ -90,7 +90,7 @@ export const download = (href, name) => {
export function detectWebGL(): boolean {
const canvas = document.createElement('canvas');
const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
const gl = canvas.getContext('webgl');
return !!(gl && gl instanceof WebGLRenderingContext);
}

View File

@ -48,7 +48,7 @@ load_rc_config ${name}
: ${bitcoin_syslog_facility:="local0"}
: ${bitcoin_syslog_priority:="info"}
: ${bitcoin_syslog_tag:="bitcoin"}
: ${bitcoin_kill_after:="300"}
: ${bitcoin_kill_after:="600"}
: ${bitcoinlimits_args:="-e -U ${bitcoin_user}"}
# set up dependant variables

View File

@ -530,6 +530,7 @@ osCertbotDryRun()
zfsCreateFilesystems()
{
zfs create -o "mountpoint=/backup" "${ZPOOL}/backup"
zfs create -o "mountpoint=/var/cache/nginx" "${ZPOOL}/cache"
zfs create -o "mountpoint=${ELEMENTS_HOME}" "${ZPOOL}/elements"
zfs create -o "mountpoint=${BITCOIN_HOME}" "${ZPOOL}/bitcoin"
@ -1044,11 +1045,9 @@ osSudo "${ROOT_USER}" crontab -u "${MEMPOOL_USER}" "${MEMPOOL_HOME}/${MEMPOOL_RE
echo "[*] Installing nvm.sh from GitHub"
osSudo "${MEMPOOL_USER}" sh -c 'curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.1/install.sh | zsh'
echo "[*] Building NodeJS v20.4.0 via nvm.sh"
osSudo "${MEMPOOL_USER}" zsh -c 'source ~/.zshrc ; nvm install v20.4.0 --shared-zlib'
echo "[*] Building NodeJS v18.16.1 via nvm.sh"
osSudo "${MEMPOOL_USER}" zsh -c 'source ~/.zshrc ; nvm install v18.16.1 --shared-zlib'
osSudo "${MEMPOOL_USER}" zsh -c 'source ~/.zshrc ; nvm alias default 18.16.1'
echo "[*] Building NodeJS v20.5.1 via nvm.sh"
osSudo "${MEMPOOL_USER}" zsh -c 'source ~/.zshrc ; nvm install v20.5.1 --shared-zlib'
osSudo "${MEMPOOL_USER}" zsh -c 'source ~/.zshrc ; nvm alias default 20.5.1'
####################
# Tor installation #
@ -1852,28 +1851,26 @@ chown "${MEMPOOL_USER}:${MEMPOOL_GROUP}" "${MEMPOOL_MYSQL_CREDENTIALS}"
echo "[*] Adding Nginx configuration"
osSudo "${ROOT_USER}" install -c -o "${ROOT_USER}" -g "${ROOT_GROUP}" -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/nginx/nginx.conf" "${NGINX_CONFIGURATION}"
mkdir -p /var/cache/nginx/services /var/cache/nginx/api
chown "${NGINX_USER}:${NGINX_GROUP}" /var/cache/nginx/services /var/cache/nginx/api
ln -s "${MEMPOOL_HOME}/mempool" "${NGINX_ETC_FOLDER}/mempool"
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_USER__!${NGINX_USER}!" "${NGINX_CONFIGURATION}"
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_ETC_FOLDER__!${NGINX_ETC_FOLDER}!" "${NGINX_CONFIGURATION}"
if [ "${TOR_INSTALL}" = ON ];then
echo "[*] Read tor v3 onion hostnames"
NGINX_MEMPOOL_ONION=$(cat "${TOR_RESOURCES}/mempool/hostname")
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_MEMPOOL_ONION__!${NGINX_MEMPOOL_ONION%.onion}!" "${NGINX_CONFIGURATION}"
if [ "${ELEMENTS_LIQUID_ENABLE}" = "ON" ];then
NGINX_LIQUID_ONION=$(cat "${TOR_RESOURCES}/liquid/hostname")
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_LIQUID_ONION__!${NGINX_LIQUID_ONIONi%.onion}!" "${NGINX_CONFIGURATION}"
fi
if [ "${BISQ_MAINNET_ENABLE}" = "ON" ];then
NGINX_BISQ_ONION=$(cat "${TOR_RESOURCES}/bisq/hostname")
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_BISQ_ONION__!${NGINX_BISQ_ONION%.onion}!" "${NGINX_CONFIGURATION}"
fi
fi
#if [ "${TOR_INSTALL}" = ON ];then
# echo "[*] Read tor v3 onion hostnames"
#
# NGINX_MEMPOOL_ONION=$(cat "${TOR_RESOURCES}/mempool/hostname")
# osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_MEMPOOL_ONION__!${NGINX_MEMPOOL_ONION%.onion}!" "${NGINX_CONFIGURATION}"
#
# if [ "${ELEMENTS_LIQUID_ENABLE}" = "ON" ];then
# NGINX_LIQUID_ONION=$(cat "${TOR_RESOURCES}/liquid/hostname")
# osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_LIQUID_ONION__!${NGINX_LIQUID_ONIONi%.onion}!" "${NGINX_CONFIGURATION}"
# fi
#
# if [ "${BISQ_MAINNET_ENABLE}" = "ON" ];then
# NGINX_BISQ_ONION=$(cat "${TOR_RESOURCES}/bisq/hostname")
# osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_BISQ_ONION__!${NGINX_BISQ_ONION%.onion}!" "${NGINX_CONFIGURATION}"
# fi
#fi
##### OS systemd
@ -1897,13 +1894,26 @@ echo "[*] Updating system startup configuration"
case $OS in
FreeBSD)
echo 'nginx_enable="YES"' >> /etc/rc.conf
echo 'bitcoin_enable="YES"' >> /etc/rc.conf
echo 'tor_enable="YES"' >> /etc/rc.conf
echo 'postfix_enable="YES"' >> /etc/rc.conf
echo 'mysql_enable="YES"' >> /etc/rc.conf
echo 'mysql_dbdir="/mysql"' >> /etc/rc.conf
echo 'tor_enable="YES"' >> /etc/rc.conf
cat >> /etc/rc.conf <<EOF
moused_nondefault_enable="NO"
nginx_enable="YES"
nginx_profiles="mempool"
nginx_mempool_flags="-p /mempool"
nginx_mempool_configfile="/mempool/mempool/nginx/nginx.conf"
mysql_enable="YES"
mysql_dbdir="/mysql"
mysql_args="--innodb-buffer-pool-size=8G --bind-address 127.0.0.1"
kld_list="nvidia"
nvidia_xorg_enable="YES"
dbus_enable="YES"
tor_enable="YES"
bitcoin_enable="YES"
postfix_enable="YES"
EOF
;;
Debian)
@ -2039,12 +2049,12 @@ osSudo "${MEMPOOL_USER}" sh -c "cd ${MEMPOOL_HOME} && ./upgrade" || true
##### finish
if [ "${TOR_INSTALL}" = ON ];then
echo "Your auto-generated Tor addresses are:"
echo "${NGINX_MEMPOOL_ONION}"
echo "${NGINX_BISQ_ONION}"
echo "${NGINX_LIQUID_ONION}"
fi
#if [ "${TOR_INSTALL}" = ON ];then
# echo "Your auto-generated Tor addresses are:"
# echo "${NGINX_MEMPOOL_ONION}"
# echo "${NGINX_BISQ_ONION}"
# echo "${NGINX_LIQUID_ONION}"
#fi
echo
echo 'Please reboot to start all the services.'

View File

@ -1,7 +1,7 @@
#!/usr/bin/env zsh
export NVM_DIR="$HOME/.nvm"
source "$NVM_DIR/nvm.sh"
nvm use v20.4.0
nvm use v20.5.1
# start all mempool backends that exist
for site in mainnet mainnet-lightning testnet testnet-lightning signet signet-lightning bisq liquid liquidtestnet;do

View File

@ -1,7 +1,7 @@
# proxy cache
proxy_cache_path /var/cache/nginx/api keys_zone=api:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/services keys_zone=services:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/markets keys_zone=markets:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/unfurler keys_zone=unfurler:20m levels=1:2 inactive=600s max_size=200m;
proxy_cache_path /var/cache/nginx/slurper keys_zone=slurper:20m levels=1:2 inactive=600s max_size=200m;
types_hash_max_size 2048;
proxy_cache_path /var/cache/nginx/api keys_zone=api:20m levels=1:2 inactive=365d max_size=2000m;
proxy_cache_path /var/cache/nginx/unfurler keys_zone=unfurler:20m levels=1:2 inactive=365d max_size=2000m;
proxy_cache_path /var/cache/nginx/slurper keys_zone=slurper:20m levels=1:2 inactive=365d max_size=5000m;
proxy_cache_path /var/cache/nginx/services keys_zone=services:20m levels=1:2 inactive=365d max_size=100m;
proxy_cache_path /var/cache/nginx/markets keys_zone=markets:20m levels=1:2 inactive=365d max_size=100m;
types_hash_max_size 4096;

View File

@ -97,6 +97,14 @@ location ~* ^/.+\..+\.(js|css)$ {
expires 1y;
}
# old stuff is gone
location /explorer/ {
return 410;
}
location /sitemap/ {
return 410;
}
# unfurl preview
location /preview {
try_files /$lang/$uri $uri /en-US/$uri /en-US/index.html =404;
@ -105,7 +113,6 @@ location /preview {
# unfurl renderer
location ^~ /render {
try_files /dev/null @mempool-space-unfurler;
expires 10m;
}
# unfurl handler
location /unfurl/ {
@ -136,8 +143,10 @@ location @mempool-space-unfurler {
proxy_cache_background_update on;
proxy_cache_use_stale updating;
proxy_cache unfurler;
proxy_cache_valid 200 10m;
proxy_cache_valid 200 1h; # will re-render page if older than this
proxy_redirect off;
expires 1d;
}
location @mempool-space-slurper {
@ -151,6 +160,8 @@ location @mempool-space-slurper {
proxy_cache_background_update on;
proxy_cache_use_stale updating;
proxy_cache slurper;
proxy_cache_valid 200 10m;
proxy_cache_valid 200 1h; # will re-render page if older than this
proxy_redirect off;
expires 10d;
}

View File

@ -42,6 +42,6 @@
"--use-mock-keychain",
"--ignore-gpu-blacklist",
"--ignore-gpu-blocklist",
"--use-gl=egl"
"--use-angle=default"
]
}

View File

@ -11,12 +11,13 @@ const BROWSER_TIMEOUT = 8000;
const maxAgeMs = (config.PUPPETEER.MAX_PAGE_AGE || (24 * 60 * 60)) * 1000;
const maxConcurrency = config.PUPPETEER.CLUSTER_SIZE;
interface RepairablePage extends puppeteer.Page {
export interface RepairablePage extends puppeteer.Page {
repairRequested?: boolean;
language?: string | null;
createdAt?: number;
free?: boolean;
index?: number;
clusterGroup?: string;
}
interface ResourceData {
@ -76,7 +77,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
for (let i = 0; i < maxConcurrency; i++) {
const newPage = await this.initPage();
newPage.index = this.pages.length;
logger.info(`initialized page ${newPage.index}`);
logger.info(`initialized page ${newPage.clusterGroup}:${newPage.index}`);
this.pages.push(newPage);
}
}
@ -87,6 +88,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
protected async initPage(): Promise<RepairablePage> {
const page = await (this.browser as puppeteer.Browser).newPage() as RepairablePage;
page.clusterGroup = 'unfurler';
page.language = null;
page.createdAt = Date.now();
let defaultUrl
@ -108,7 +110,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
page.waitForSelector('meta[property="og:preview:fail"]', { timeout: config.PUPPETEER.RENDER_TIMEOUT || 3000 }).then(() => false)
])
} catch (e) {
logger.err(`failed to load frontend during page initialization: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to load frontend during page initialization ${page.clusterGroup}:${page.index}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
@ -129,6 +131,7 @@ export default class ReusablePage extends ConcurrencyImplementation {
protected async repairPage(page) {
// create a new page
logger.info(`Repairing page ${page.clusterGroup}:${page.index}`);
const newPage = await this.initPage();
newPage.free = true;
// replace the old page
@ -138,9 +141,10 @@ export default class ReusablePage extends ConcurrencyImplementation {
try {
await page.goto('about:blank', {timeout: 200}); // prevents memory leak (maybe?)
} catch (e) {
logger.err('unexpected page repair error');
}
logger.err(`unexpected page repair error ${page.clusterGroup}:${page.index}`);
} finally {
await page.close();
}
return newPage;
}

View File

@ -2,19 +2,11 @@ import * as puppeteer from 'puppeteer';
import { timeoutExecute } from 'puppeteer-cluster/dist/util';
import logger from '../logger';
import config from '../config';
import ReusablePage from './ReusablePage';
import ReusablePage, { RepairablePage } from './ReusablePage';
const mempoolHost = config.MEMPOOL.HTTP_HOST + (config.MEMPOOL.HTTP_PORT ? ':' + config.MEMPOOL.HTTP_PORT : '');
const mockImageBuffer = Buffer.from("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVQYV2NgYAAAAAMAAWgmWQ0AAAAASUVORK5CYII=", 'base64');
interface RepairablePage extends puppeteer.Page {
repairRequested?: boolean;
language?: string | null;
createdAt?: number;
free?: boolean;
index?: number;
}
export default class ReusableSSRPage extends ReusablePage {
public constructor(options: puppeteer.LaunchOptions, puppeteer: any) {
@ -27,13 +19,14 @@ export default class ReusableSSRPage extends ReusablePage {
protected async initPage(): Promise<RepairablePage> {
const page = await (this.browser as puppeteer.Browser).newPage() as RepairablePage;
page.clusterGroup = 'slurper';
page.language = null;
page.createdAt = Date.now();
const defaultUrl = mempoolHost + '/about';
const defaultUrl = mempoolHost + '/preview/block/1';
page.on('pageerror', (err) => {
console.log(err);
// page.repairRequested = true;
page.repairRequested = true;
});
await page.setRequestInterception(true);
page.on('request', req => {
@ -46,7 +39,7 @@ export default class ReusableSSRPage extends ReusablePage {
headers: {"Access-Control-Allow-Origin": "*"},
body: mockImageBuffer
});
} else if (!['document', 'script', 'xhr', 'fetch'].includes(req.resourceType())) {
} else if (req.resourceType() === 'media') {
return req.abort();
} else {
return req.continue();
@ -56,7 +49,7 @@ export default class ReusableSSRPage extends ReusablePage {
await page.goto(defaultUrl, { waitUntil: "networkidle0" });
await page.waitForSelector('meta[property="og:meta:ready"]', { timeout: config.PUPPETEER.RENDER_TIMEOUT || 3000 });
} catch (e) {
logger.err(`failed to load frontend during ssr page initialization: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to load frontend during ssr page initialization ${page.clusterGroup}:${page.index}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
page.free = true;

View File

@ -5,7 +5,7 @@ import * as https from 'https';
import config from './config';
import { Cluster } from 'puppeteer-cluster';
import ReusablePage from './concurrency/ReusablePage';
import ReusableSSRPage from './concurrency/ReusablePage';
import ReusableSSRPage from './concurrency/ReusableSSRPage';
import { parseLanguageUrl } from './language/lang';
import { matchRoute } from './routes';
import nodejsPath from 'path';
@ -28,13 +28,18 @@ class Server {
mempoolUrl: URL;
network: string;
secureHost = true;
secureMempoolHost = true;
canonicalHost: string;
seoQueueLength: number = 0;
unfurlQueueLength: number = 0;
constructor() {
this.app = express();
this.mempoolHost = config.MEMPOOL.HTTP_HOST + (config.MEMPOOL.HTTP_PORT ? ':' + config.MEMPOOL.HTTP_PORT : '');
this.mempoolUrl = new URL(this.mempoolHost);
this.secureHost = config.SERVER.HOST.startsWith('https');
this.secureMempoolHost = config.MEMPOOL.HTTP_HOST.startsWith('https');
this.network = config.MEMPOOL.NETWORK || 'bitcoin';
let canonical;
@ -120,8 +125,10 @@ class Server {
this.app.get('*', (req, res) => { return this.renderHTML(req, res, false) })
}
async clusterTask({ page, data: { url, path, action } }) {
async clusterTask({ page, data: { url, path, action, reqUrl } }) {
const start = Date.now();
try {
logger.info(`rendering "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
const urlParts = parseLanguageUrl(path);
if (page.language !== urlParts.lang) {
// switch language
@ -154,22 +161,25 @@ class Server {
captureBeyondViewport: false,
clip: { width: 1200, height: 600, x: 0, y: 0, scale: 1 },
});
logger.info(`rendered unfurl img in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return screenshot;
} else if (success === false) {
logger.warn(`failed to render ${path} for ${action} due to client-side error, e.g. requested an invalid txid`);
logger.warn(`failed to render ${reqUrl} for ${action} due to client-side error, e.g. requested an invalid txid`);
page.repairRequested = true;
} else {
logger.warn(`failed to render ${path} for ${action} due to puppeteer timeout`);
logger.warn(`failed to render ${reqUrl} for ${action} due to puppeteer timeout`);
page.repairRequested = true;
}
} catch (e) {
logger.err(`failed to render ${path} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to render ${reqUrl} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
async ssrClusterTask({ page, data: { url, path, action } }) {
async ssrClusterTask({ page, data: { url, path, action, reqUrl } }) {
const start = Date.now();
try {
logger.info(`slurping "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
const urlParts = parseLanguageUrl(path);
if (page.language !== urlParts.lang) {
// switch language
@ -197,17 +207,20 @@ class Server {
return !!window['soft404'];
});
if (is404) {
logger.info(`slurp 404 in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return '404';
} else {
let html = await page.content();
logger.info(`rendered slurp in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return html;
}
} catch (e) {
if (e instanceof TimeoutError) {
let html = await page.content();
logger.info(`rendered partial slurp in ${Date.now() - start}ms for "${reqUrl}" on tab ${page.clusterGroup}:${page.index}`);
return html;
} else {
logger.err(`failed to render ${path} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
logger.err(`failed to render ${reqUrl} for ${action}: ` + (e instanceof Error ? e.message : `${e}`));
page.repairRequested = true;
}
}
@ -219,6 +232,8 @@ class Server {
async renderPreview(req, res) {
try {
this.unfurlQueueLength++;
const start = Date.now();
const rawPath = req.params[0];
let img = null;
@ -228,12 +243,15 @@ class Server {
// don't bother unless the route is definitely renderable
if (rawPath.includes('/preview/') && matchedRoute.render) {
img = await this.cluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'screenshot' });
img = await this.cluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'screenshot', reqUrl: req.url });
logger.info(`unfurl returned "${req.url}" in ${Date.now() - start}ms | ${this.unfurlQueueLength - 1} tasks in queue`);
} else {
logger.info('rendering not enabled for page "' + req.url + '"');
}
if (!img) {
// proxy fallback image from the frontend
res.sendFile(nodejsPath.join(__dirname, matchedRoute.fallbackImg));
// send local fallback image file
res.sendFile(nodejsPath.join(__dirname, matchedRoute.fallbackFile));
} else {
res.contentType('image/png');
res.send(img);
@ -241,6 +259,8 @@ class Server {
} catch (e) {
logger.err(e instanceof Error ? e.message : `${e} ${req.params[0]}`);
res.status(500).send(e instanceof Error ? e.message : e);
} finally {
this.unfurlQueueLength--;
}
}
@ -258,10 +278,17 @@ class Server {
res.status(404).send();
return;
} else {
if (this.secureHost) {
https.get(config.SERVER.HOST + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => got.pipe(res));
logger.info('proxying resource "' + req.url + '"');
if (this.secureMempoolHost) {
https.get(this.mempoolHost + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => {
res.writeHead(got.statusCode, got.headers);
return got.pipe(res);
});
} else {
http.get(config.SERVER.HOST + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => got.pipe(res));
http.get(this.mempoolHost + rawPath, { headers: { 'user-agent': 'mempoolunfurl' }}, (got) => {
res.writeHead(got.statusCode, got.headers);
return got.pipe(res);
});
}
return;
}
@ -270,9 +297,13 @@ class Server {
let result = '';
try {
if (unfurl) {
logger.info('unfurling "' + req.url + '"');
result = await this.renderUnfurlMeta(rawPath);
} else {
result = await this.renderSEOPage(rawPath);
this.seoQueueLength++;
const start = Date.now();
result = await this.renderSEOPage(rawPath, req.url);
logger.info(`slurp returned "${req.url}" in ${Date.now() - start}ms | ${this.seoQueueLength - 1} tasks in queue`);
}
if (result && result.length) {
if (result === '404') {
@ -286,6 +317,10 @@ class Server {
} catch (e) {
logger.err(e instanceof Error ? e.message : `${e} ${req.params[0]}`);
res.status(500).send(e instanceof Error ? e.message : e);
} finally {
if (!unfurl) {
this.seoQueueLength--;
}
}
}
@ -326,8 +361,8 @@ class Server {
</html>`;
}
async renderSEOPage(rawPath: string): Promise<string> {
let html = await this.ssrCluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'ssr' });
async renderSEOPage(rawPath: string, reqUrl: string): Promise<string> {
let html = await this.ssrCluster?.execute({ url: this.mempoolHost + rawPath, path: rawPath, action: 'ssr', reqUrl });
// remove javascript to prevent double hydration
if (html && html.length) {
html = html.replaceAll(/<script.*<\/script>/g, "");

View File

@ -2,6 +2,7 @@ interface Match {
render: boolean;
title: string;
fallbackImg: string;
fallbackFile: string;
staticImg?: string;
networkMode: string;
}
@ -30,7 +31,8 @@ const routes = {
},
lightning: {
title: "Lightning",
fallbackImg: '/resources/img/lightning.png',
fallbackImg: '/resources/previews/lightning.png',
fallbackFile: '/resources/img/lightning.png',
routes: {
node: {
render: true,
@ -68,7 +70,8 @@ const routes = {
},
mining: {
title: "Mining",
fallbackImg: '/resources/img/mining.png',
fallbackImg: '/resources/previews/mining.png',
fallbackFile: '/resources/img/mining.png',
routes: {
pool: {
render: true,
@ -83,13 +86,15 @@ const routes = {
const networks = {
bitcoin: {
fallbackImg: '/resources/img/dashboard.png',
fallbackImg: '/resources/previews/dashboard.png',
fallbackFile: '/resources/img/dashboard.png',
routes: {
...routes // all routes supported
}
},
liquid: {
fallbackImg: '/resources/img/liquid.png',
fallbackImg: '/resources/liquid/liquid-network-preview.png',
fallbackFile: '/resources/img/liquid',
routes: { // only block, address & tx routes supported
block: routes.block,
address: routes.address,
@ -97,7 +102,8 @@ const networks = {
}
},
bisq: {
fallbackImg: '/resources/img/bisq.png',
fallbackImg: '/resources/bisq/bisq-markets-preview.png',
fallbackFile: '/resources/img/bisq.png',
routes: {} // no routes supported
}
};
@ -107,6 +113,7 @@ export function matchRoute(network: string, path: string): Match {
render: false,
title: '',
fallbackImg: '',
fallbackFile: '',
networkMode: 'mainnet'
}
@ -121,6 +128,7 @@ export function matchRoute(network: string, path: string): Match {
let route = networks[network] || networks.bitcoin;
match.fallbackImg = route.fallbackImg;
match.fallbackFile = route.fallbackFile;
// traverse the route tree until we run out of route or tree, or hit a renderable match
while (!route.render && route.routes && parts.length && route.routes[parts[0]]) {
@ -128,6 +136,7 @@ export function matchRoute(network: string, path: string): Match {
parts.shift();
if (route.fallbackImg) {
match.fallbackImg = route.fallbackImg;
match.fallbackFile = route.fallbackFile;
}
}