Merge branch 'master' into nymkappa/bugfix/disable-ln-import-testsignet
This commit is contained in:
commit
1cd0796428
@ -8,6 +8,7 @@ import { isIP } from 'net';
|
|||||||
import { Common } from '../../../api/common';
|
import { Common } from '../../../api/common';
|
||||||
import channelsApi from '../../../api/explorer/channels.api';
|
import channelsApi from '../../../api/explorer/channels.api';
|
||||||
import nodesApi from '../../../api/explorer/nodes.api';
|
import nodesApi from '../../../api/explorer/nodes.api';
|
||||||
|
import { ResultSetHeader } from 'mysql2';
|
||||||
|
|
||||||
const fsPromises = promises;
|
const fsPromises = promises;
|
||||||
|
|
||||||
@ -24,6 +25,7 @@ class LightningStatsImporter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
await this.$importHistoricalLightningStats();
|
await this.$importHistoricalLightningStats();
|
||||||
|
await this.$cleanupIncorrectSnapshot();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -371,6 +373,12 @@ class LightningStatsImporter {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.isIncorrectSnapshot(timestamp, graph)) {
|
||||||
|
logger.debug(`Ignoring ${this.topologiesFolder}/${filename}, because we defined it as an incorrect snapshot`);
|
||||||
|
++totalProcessed;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if (!logStarted) {
|
if (!logStarted) {
|
||||||
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`);
|
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`);
|
||||||
logStarted = true;
|
logStarted = true;
|
||||||
@ -401,7 +409,7 @@ class LightningStatsImporter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async cleanupTopology(graph) {
|
cleanupTopology(graph): ILightningApi.NetworkGraph {
|
||||||
const newGraph = {
|
const newGraph = {
|
||||||
nodes: <ILightningApi.Node[]>[],
|
nodes: <ILightningApi.Node[]>[],
|
||||||
edges: <ILightningApi.Channel[]>[],
|
edges: <ILightningApi.Channel[]>[],
|
||||||
@ -460,6 +468,69 @@ class LightningStatsImporter {
|
|||||||
|
|
||||||
return newGraph;
|
return newGraph;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private isIncorrectSnapshot(timestamp, graph): boolean {
|
||||||
|
if (timestamp >= 1549065600 /* 2019-02-02 */ && timestamp <= 1550620800 /* 2019-02-20 */ && graph.nodes.length < 2600) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (timestamp >= 1552953600 /* 2019-03-19 */ && timestamp <= 1556323200 /* 2019-05-27 */ && graph.nodes.length < 4000) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (timestamp >= 1557446400 /* 2019-05-10 */ && timestamp <= 1560470400 /* 2019-06-14 */ && graph.nodes.length < 4000) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (timestamp >= 1561680000 /* 2019-06-28 */ && timestamp <= 1563148800 /* 2019-07-15 */ && graph.nodes.length < 4000) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (timestamp >= 1571270400 /* 2019-11-17 */ && timestamp <= 1580601600 /* 2020-02-02 */ && graph.nodes.length < 4500) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (timestamp >= 1591142400 /* 2020-06-03 */ && timestamp <= 1592006400 /* 2020-06-13 */ && graph.nodes.length < 5500) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (timestamp >= 1632787200 /* 2021-09-28 */ && timestamp <= 1633564800 /* 2021-10-07 */ && graph.nodes.length < 13000) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (timestamp >= 1634256000 /* 2021-10-15 */ && timestamp <= 1645401600 /* 2022-02-21 */ && graph.nodes.length < 17000) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (timestamp >= 1654992000 /* 2022-06-12 */ && timestamp <= 1661472000 /* 2022-08-26 */ && graph.nodes.length < 14000) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async $cleanupIncorrectSnapshot(): Promise<void> {
|
||||||
|
// We do not run this one automatically because those stats are not supposed to be inserted in the first
|
||||||
|
// place, but I write them here to remind us we manually run those queries
|
||||||
|
|
||||||
|
// DELETE FROM lightning_stats
|
||||||
|
// WHERE (
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1549065600 AND UNIX_TIMESTAMP(added) <= 1550620800 AND node_count < 2600 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1552953600 AND UNIX_TIMESTAMP(added) <= 1556323200 AND node_count < 4000 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1557446400 AND UNIX_TIMESTAMP(added) <= 1560470400 AND node_count < 4000 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1561680000 AND UNIX_TIMESTAMP(added) <= 1563148800 AND node_count < 4000 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1571270400 AND UNIX_TIMESTAMP(added) <= 1580601600 AND node_count < 4500 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1591142400 AND UNIX_TIMESTAMP(added) <= 1592006400 AND node_count < 5500 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1632787200 AND UNIX_TIMESTAMP(added) <= 1633564800 AND node_count < 13000 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1634256000 AND UNIX_TIMESTAMP(added) <= 1645401600 AND node_count < 17000 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1654992000 AND UNIX_TIMESTAMP(added) <= 1661472000 AND node_count < 14000
|
||||||
|
// )
|
||||||
|
|
||||||
|
// DELETE FROM node_stats
|
||||||
|
// WHERE (
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1549065600 AND UNIX_TIMESTAMP(added) <= 1550620800 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1552953600 AND UNIX_TIMESTAMP(added) <= 1556323200 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1557446400 AND UNIX_TIMESTAMP(added) <= 1560470400 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1561680000 AND UNIX_TIMESTAMP(added) <= 1563148800 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1571270400 AND UNIX_TIMESTAMP(added) <= 1580601600 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1591142400 AND UNIX_TIMESTAMP(added) <= 1592006400 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1632787200 AND UNIX_TIMESTAMP(added) <= 1633564800 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1634256000 AND UNIX_TIMESTAMP(added) <= 1645401600 OR
|
||||||
|
// UNIX_TIMESTAMP(added) >= 1654992000 AND UNIX_TIMESTAMP(added) <= 1661472000
|
||||||
|
// )
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default new LightningStatsImporter;
|
export default new LightningStatsImporter;
|
||||||
|
@ -63,6 +63,19 @@ build_frontend()
|
|||||||
npm run build || exit 1
|
npm run build || exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
build_unfurler()
|
||||||
|
{
|
||||||
|
local site="$1"
|
||||||
|
echo "[*] Building unfurler for ${site}"
|
||||||
|
[ -z "${HASH}" ] && exit 1
|
||||||
|
cd "$HOME/${site}/unfurler" || exit 1
|
||||||
|
if [ ! -e "config.json" ];then
|
||||||
|
cp "${HOME}/mempool/production/unfurler-config.${site}.json" "config.json"
|
||||||
|
fi
|
||||||
|
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true npm install || exit 1
|
||||||
|
npm run build || exit 1
|
||||||
|
}
|
||||||
|
|
||||||
build_backend()
|
build_backend()
|
||||||
{
|
{
|
||||||
local site="$1"
|
local site="$1"
|
||||||
@ -128,6 +141,11 @@ for repo in $backend_repos;do
|
|||||||
update_repo "${repo}"
|
update_repo "${repo}"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# build unfurlers
|
||||||
|
for repo in mainnet liquid;do
|
||||||
|
build_unfurler "${repo}"
|
||||||
|
done
|
||||||
|
|
||||||
# build backends
|
# build backends
|
||||||
for repo in $backend_repos;do
|
for repo in $backend_repos;do
|
||||||
build_backend "${repo}"
|
build_backend "${repo}"
|
||||||
|
@ -1,2 +1,8 @@
|
|||||||
#!/usr/bin/env zsh
|
#!/usr/bin/env zsh
|
||||||
killall sh node
|
killall sh
|
||||||
|
killall node
|
||||||
|
killall chrome
|
||||||
|
killall xinit
|
||||||
|
for pid in `ps uaxww|grep warmer|grep zsh|awk '{print $2}'`;do
|
||||||
|
kill $pid
|
||||||
|
done
|
||||||
|
@ -2,7 +2,29 @@
|
|||||||
export NVM_DIR="$HOME/.nvm"
|
export NVM_DIR="$HOME/.nvm"
|
||||||
source "$NVM_DIR/nvm.sh"
|
source "$NVM_DIR/nvm.sh"
|
||||||
|
|
||||||
|
# start all mempool backends that exist
|
||||||
for site in mainnet mainnet-lightning testnet testnet-lightning signet signet-lightning bisq liquid liquidtestnet;do
|
for site in mainnet mainnet-lightning testnet testnet-lightning signet signet-lightning bisq liquid liquidtestnet;do
|
||||||
cd "${HOME}/${site}/backend/" && \
|
cd "${HOME}/${site}/backend/" && \
|
||||||
|
echo "starting mempool backend: ${site}" && \
|
||||||
screen -dmS "${site}" sh -c 'while true;do npm run start-production;sleep 1;done'
|
screen -dmS "${site}" sh -c 'while true;do npm run start-production;sleep 1;done'
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# only start unfurler if GPU present
|
||||||
|
if pciconf -lv|grep -i nvidia >/dev/null 2>&1;then
|
||||||
|
export DISPLAY=:0
|
||||||
|
screen -dmS x startx
|
||||||
|
sleep 3
|
||||||
|
for site in mainnet liquid;do
|
||||||
|
cd "$HOME/${site}/unfurler" && \
|
||||||
|
echo "starting mempool unfurler: ${site}" && \
|
||||||
|
screen -dmS "unfurler-${site}" sh -c 'while true;do npm run unfurler;sleep 2;done'
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# start nginx warm cacher
|
||||||
|
for site in mainnet;do
|
||||||
|
echo "starting mempool cache warmer: ${site}"
|
||||||
|
screen -dmS "warmer-${site}" $HOME/mempool/production/nginx-cache-warmer
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
@ -2,6 +2,12 @@
|
|||||||
hostname=$(hostname)
|
hostname=$(hostname)
|
||||||
slugs=(`curl -sSL https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json | jq -r '.slugs[]'`)
|
slugs=(`curl -sSL https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json | jq -r '.slugs[]'`)
|
||||||
|
|
||||||
|
warm()
|
||||||
|
{
|
||||||
|
echo "$1"
|
||||||
|
curl -i -s "$1" | head -1
|
||||||
|
}
|
||||||
|
|
||||||
while true
|
while true
|
||||||
do for url in / \
|
do for url in / \
|
||||||
'/api/v1/blocks' \
|
'/api/v1/blocks' \
|
||||||
@ -81,14 +87,14 @@ do for url in / \
|
|||||||
'/api/v1/lightning/channels-geo?style=graph' \
|
'/api/v1/lightning/channels-geo?style=graph' \
|
||||||
|
|
||||||
do
|
do
|
||||||
curl -s "https://${hostname}${url}" >/dev/null
|
warm "https://${hostname}${url}"
|
||||||
done
|
done
|
||||||
|
|
||||||
for slug in $slugs
|
for slug in $slugs
|
||||||
do
|
do
|
||||||
curl -s "https://${hostname}/api/v1/mining/pool/${slug}" >/dev/null
|
warm "https://${hostname}/api/v1/mining/pool/${slug}"
|
||||||
curl -s "https://${hostname}/api/v1/mining/pool/${slug}/hashrate" >/dev/null
|
warm "https://${hostname}/api/v1/mining/pool/${slug}/hashrate"
|
||||||
curl -s "https://${hostname}/api/v1/mining/pool/${slug}/blocks" >/dev/null
|
warm "https://${hostname}/api/v1/mining/pool/${slug}/blocks"
|
||||||
done
|
done
|
||||||
|
|
||||||
sleep 10
|
sleep 10
|
||||||
|
@ -1,62 +0,0 @@
|
|||||||
#!/usr/bin/env zsh
|
|
||||||
PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:$HOME/bin
|
|
||||||
HOSTNAME=$(hostname)
|
|
||||||
LOCATION=$(hostname|cut -d . -f2)
|
|
||||||
LOCKFILE="${HOME}/lock"
|
|
||||||
REF=$(echo "${1:=origin/master}"|sed -e 's!:!/!')
|
|
||||||
|
|
||||||
if [ -f "${LOCKFILE}" ];then
|
|
||||||
echo "upgrade already running? check lockfile ${LOCKFILE}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# on exit, remove lockfile but preserve exit code
|
|
||||||
trap "rv=\$?; rm -f "${LOCKFILE}"; exit \$rv" INT TERM EXIT
|
|
||||||
|
|
||||||
# create lockfile
|
|
||||||
touch "${LOCKFILE}"
|
|
||||||
|
|
||||||
# notify logged in users
|
|
||||||
echo "Upgrading unfurler to ${REF}" | wall
|
|
||||||
|
|
||||||
update_repo()
|
|
||||||
{
|
|
||||||
echo "[*] Upgrading unfurler to ${REF}"
|
|
||||||
cd "$HOME/unfurl/unfurler" || exit 1
|
|
||||||
|
|
||||||
git fetch origin || exit 1
|
|
||||||
for remote in origin;do
|
|
||||||
git remote add "${remote}" "https://github.com/${remote}/mempool" >/dev/null 2>&1
|
|
||||||
git fetch "${remote}" || exit 1
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ $(git tag -l "${REF}") ];then
|
|
||||||
git reset --hard "tags/${REF}" || exit 1
|
|
||||||
elif [ $(git branch -r -l "origin/${REF}") ];then
|
|
||||||
git reset --hard "origin/${REF}" || exit 1
|
|
||||||
else
|
|
||||||
git reset --hard "${REF}" || exit 1
|
|
||||||
fi
|
|
||||||
export HASH=$(git rev-parse HEAD)
|
|
||||||
}
|
|
||||||
|
|
||||||
build_backend()
|
|
||||||
{
|
|
||||||
echo "[*] Building backend for unfurler"
|
|
||||||
[ -z "${HASH}" ] && exit 1
|
|
||||||
cd "$HOME/unfurl/unfurler" || exit 1
|
|
||||||
if [ ! -e "config.json" ];then
|
|
||||||
cp "${HOME}/unfurl/production/mempool-config.unfurl.json" "config.json"
|
|
||||||
fi
|
|
||||||
npm install || exit 1
|
|
||||||
npm run build || exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
update_repo
|
|
||||||
build_backend
|
|
||||||
|
|
||||||
# notify everyone
|
|
||||||
echo "${HOSTNAME} unfurl updated to \`${REF}\` @ \`${HASH}\`" | /usr/local/bin/keybase chat send --nonblock --channel general mempool.dev
|
|
||||||
echo "${HOSTNAME} unfurl updated to \`${REF}\` @ \`${HASH}\`" | /usr/local/bin/keybase chat send --nonblock --channel general "mempool.ops.${LOCATION}"
|
|
||||||
|
|
||||||
exit 0
|
|
@ -1,2 +0,0 @@
|
|||||||
#!/usr/bin/env zsh
|
|
||||||
killall sh node
|
|
@ -1,6 +0,0 @@
|
|||||||
#!/usr/bin/env zsh
|
|
||||||
export NVM_DIR="$HOME/.nvm"
|
|
||||||
source "$NVM_DIR/nvm.sh"
|
|
||||||
|
|
||||||
cd "${HOME}/unfurl/unfurler/" && \
|
|
||||||
screen -dmS "unfurl" sh -c 'while true;do npm run start-production;sleep 1;done'
|
|
17
production/unfurler-config.liquid.json
Normal file
17
production/unfurler-config.liquid.json
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"SERVER": {
|
||||||
|
"HOST": "https://liquid.network",
|
||||||
|
"HTTP_PORT": 8002
|
||||||
|
},
|
||||||
|
"MEMPOOL": {
|
||||||
|
"HTTP_HOST": "https://liquid.network",
|
||||||
|
"HTTP_PORT": 443,
|
||||||
|
"NETWORK": "liquid"
|
||||||
|
},
|
||||||
|
"PUPPETEER": {
|
||||||
|
"CLUSTER_SIZE": 8,
|
||||||
|
"EXEC_PATH": "/usr/local/bin/chrome",
|
||||||
|
"MAX_PAGE_AGE": 86400,
|
||||||
|
"RENDER_TIMEOUT": 3000
|
||||||
|
}
|
||||||
|
}
|
17
production/unfurler-config.mainnet.json
Normal file
17
production/unfurler-config.mainnet.json
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"SERVER": {
|
||||||
|
"HOST": "https://mempool.space",
|
||||||
|
"HTTP_PORT": 8001
|
||||||
|
},
|
||||||
|
"MEMPOOL": {
|
||||||
|
"HTTP_HOST": "https://mempool.space",
|
||||||
|
"HTTP_PORT": 443,
|
||||||
|
"NETWORK": "bitcoin"
|
||||||
|
},
|
||||||
|
"PUPPETEER": {
|
||||||
|
"CLUSTER_SIZE": 8,
|
||||||
|
"EXEC_PATH": "/usr/local/bin/chrome",
|
||||||
|
"MAX_PAGE_AGE": 86400,
|
||||||
|
"RENDER_TIMEOUT": 3000
|
||||||
|
}
|
||||||
|
}
|
@ -11,7 +11,7 @@
|
|||||||
"tsc": "./node_modules/typescript/bin/tsc",
|
"tsc": "./node_modules/typescript/bin/tsc",
|
||||||
"build": "npm run tsc",
|
"build": "npm run tsc",
|
||||||
"start": "node --max-old-space-size=2048 dist/index.js",
|
"start": "node --max-old-space-size=2048 dist/index.js",
|
||||||
"start-production": "node --max-old-space-size=4096 dist/index.js",
|
"unfurler": "node --max-old-space-size=4096 dist/index.js",
|
||||||
"lint": "./node_modules/.bin/eslint . --ext .ts",
|
"lint": "./node_modules/.bin/eslint . --ext .ts",
|
||||||
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
|
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
|
||||||
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
|
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
|
||||||
|
@ -41,6 +41,6 @@
|
|||||||
"--use-mock-keychain",
|
"--use-mock-keychain",
|
||||||
"--ignore-gpu-blacklist",
|
"--ignore-gpu-blacklist",
|
||||||
"--ignore-gpu-blocklist",
|
"--ignore-gpu-blocklist",
|
||||||
"--use-gl=swiftshader"
|
"--use-gl=egl"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user